Unverified Commit 23cb7917 authored by Brian Pickrell's avatar Brian Pickrell Committed by GitHub
Browse files

Merge branch 'develop' into blas_tuning

parents b5fcc0bc ea32ca70
...@@ -5,5 +5,5 @@ ...@@ -5,5 +5,5 @@
!*.txt !*.txt
!*.ini !*.ini
!/tools/*.sh !/tools/*.sh
!/doc/*.txt
!/test/onnx/.onnxrt-commit !/test/onnx/.onnxrt-commit
!/docs/.sphinx/*.txt
...@@ -4,62 +4,7 @@ ...@@ -4,62 +4,7 @@
# are installed, and if so, uses the installed version to format # are installed, and if so, uses the installed version to format
# the staged changes. # the staged changes.
base=clang-format-10
format=""
yapf_base=yapf
yapf_format=""
use_yapf=true
# Redirect output to stderr.
exec 1>&2
# check if clang-format is installed
type "$base" >/dev/null 2>&1 && format="$base"
# no versions of clang-format are installed
if [ -z "$format" ]
then
echo "$base is not installed. Pre-commit hook will not be executed."
exit 0
fi
# check if yapf is installed
type "$yapf_base" >/dev/null 2>&1 && yapf_format="$yapf_base"
# no versions of yapf are installed
if [ -z "$yapf_format" ]
then
echo "$yapf_base is not installed. Pre-commit hook for python files will not be executed"
use_yapf=false
fi
# Do everything from top - level # Do everything from top - level
cd $(git rev-parse --show-toplevel) cd $(git rev-parse --show-toplevel)
if git rev-parse --verify HEAD >/dev/null 2>&1 python3 tools/format.py -q -i HEAD
then
against=HEAD
else
# Initial commit: diff against an empty tree object
against=16bbb57
fi
# do the formatting
for file in $(git diff-index --cached --name-only $against | grep -E '\.h$|\.hpp$|\.cpp$|\.cl$|\.c$|\.h\.in$|\.hpp\.in$|\.cpp\.in$|\.py$')
do
if [ -e "$file" ]
then
if [ $(echo $file | grep -E '\.py$') ]
then
if $use_yapf
then
echo "$yapf_format $file"
"$yapf_format" -i "$file"
fi
else
echo "$format $file"
"$format" -i -style=file "$file"
fi
fi
done
# To get started with Dependabot version updates, you'll need to specify which
# package ecosystems to update and where the package manifests are located.
# Please see the documentation for all configuration options:
# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
version: 2
updates:
- package-ecosystem: "pip" # See documentation for possible values
directory: "/docs/.sphinx" # Location of package manifests
schedule:
interval: "daily"
- package-ecosystem: "pip"
directory: "/tools/accuracy"
schedule:
interval: "daily"
- package-ecosystem: "pip"
directory: "/examples/vision/python_unet"
schedule:
interval: "daily"
- package-ecosystem: "pip"
directory: "/examples/vision/python_super_resolution"
schedule:
interval: "daily"
- package-ecosystem: "pip"
directory: "/"
schedule:
interval: "daily"
# Workflows
## `add-to-project.yaml`
<p>
This workflow adds pull requests and issues to a specific GitHub project board when they are opened.
</p>
- ## Trigger
The workflow is triggered by the following events:
- A pull request being opened.
- An issue being opened.
- ## Jobs
The workflow has a single job named `add-to-project`. The following step is executed in this job:
- The `add-to-project` job uses the `actions/add-to-project@v0.4.0` action to add pull requests and issues to a specific project board. The `with` parameters are `project-url` and `github-token`, which specify the URL of the project board and the GitHub token used to authenticate the action.
For more details, please refer to the [add-to-project.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/add-to-project.yaml) file in the repository.
---
## `benchmark.yaml`
<p>
This workflow runs the `MiGraphX performance benchmarks` and generates reports by comparing the results with the reference data.
</p>
- ## Trigger
TODO: Update [benchmarks.yml (archived)](https://github.com/ROCmSoftwarePlatform/actions/blob/main/.github/workflows/benchmarks.yml) link after workflow is updated
- The workflow is triggered manually through the "Run workflow" button in the Actions tab of the repository and it will run reusable workflow [benchmarks.yml (archived)](https://github.com/ROCmSoftwarePlatform/actions/blob/main/.github/workflows/benchmarks.yml)
- ## Input Parameters
The workflow uses the following input parameters:
- `rocm_version`: the version of ROCm to use for running the benchmarks.
- `script_repo`: repository that contains the benchmark scripts.
- `result_path`: the path where benchmark results will be stored.
- `result_repo`: the repository where the benchmark results will be pushed for comparison.
For more details, please refer to the [benchmark.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/benchmark.yaml) file in the repository.
---
## `ci.yaml`
<p>
Overall, this workflow automates the process of building and testing the AMDMIGraphX project across multiple platforms and versions.
</p>
- ## Trigger
The workflow is triggered by the following events:
- A pull request being opened, synchronized or closed.
- On push to the `develop`, `master`, and `release/**` branches.
- ## Jobs
The following jobs are executed in the workflow:
- `cancel`: This job is responsible for canceling any previous runs of the workflow that may still be running. It runs on an `ubuntu-latest` runner and uses the `styfle/cancel-workflow-action` action to cancel any previous runs of the workflow.
- `tidy`: It runs on an `ubuntu-20.04` runner and runs `clang-tidy` for the codebase in a Docker container with the MIGraphX build environment.
- `cppcheck`: It runs on an `ubuntu-20.04` runner and performs static analysis on code in a Docker container, and caches the results for faster subsequent runs.
- `format`: It runs on an `ubuntu-20.04` runner and includes steps for freeing up disk space, caching Docker layers, and checking code formatting.
- `pyflakes`: It runs on an `ubuntu-20.04` runner and runs the Pyflakes static analysis tool to detect and report Python code issues.
- `licensing`: It runs on an `ubuntu-20.04` runner and includes steps to free up space, checkout the code, set up Python and run a license check using a Python script.
We have 2 jobs with multiple matrix configurations, both of them are running on `ubuntu-20.04` runner but right now only `linux` works on all 3 configurations (debug, release, codecov) ,`linux-fpga` works just on (debug).
- `linux`: this job runs continuous integration tests for AMDMIGraphX on a Linux operating system. It tests a variety of build configurations to ensure code quality and compatibility.
- `linux-fpga`: this job builds and tests AMDMIGraphX on a Linux operating system with support for FPGA acceleration. It includes additional steps to verify FPGA functionality and performance.
For more details, please refer to the [ci.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/ci.yaml) file in the repository.
---
## `clean-closed-pr-caches.yaml`
<p>
This workflow has purpose to clean up any cached data related to the pull request.
</p>
- ## Trigger
The workflow is triggered by the following events:
- A pull request being closed.
- ## Jobs
The workflow has a single job named `cleanup`. The following steps are executed in this job:
- `Check out code`: step checks out the codebase from the repository.
- `Cleanup`: step performs the actual cache cleanup using a series of commands.
For more details, please refer to the [clean-closed-pr-caches.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/clean-closed-pr-caches.yaml) file in the repository.
---
## `history.yaml`
<p>
This workflow generates a report of the MiGraphX benchmark results between two dates and sends it to a specified email address. The report is also uploaded to a specified repository.
</p>
- ## Trigger
- The workflow is triggered manually through the "Run workflow" button in the Actions tab of the repository and it will run reusable workflow [history.yml](https://github.com/ROCmSoftwarePlatform/migraphx-benchmark/blob/main/.github/workflows/history.yml)
- ## Input Parameters
The workflow requires the following inputs:
- `start_date`: Start date for results analysis.
- `end_date`: End date for results analysis.
- `history_repo`: Repository for history results between dates.
- `benchmark_utils_repo`: Repository where benchmark utils are stored.
- `organization`: Organization based on which location of files will be different.
For more details, please refer to the [history.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/history.yaml) file in the repository.
---
## `performance.yaml`
<p>
This workflow runs performance tests on the MIGraphX repository and generates a report of the results.
</p>
- ## Trigger
The workflow will run reusable workflow [perf-test.yml](https://github.com/ROCmSoftwarePlatform/migraphx-benchmark/blob/main/.github/workflows/perf-test.yml) by the following events:
- Pull requests opened, synchronized or closed on the `develop` branch.
- Schedule: Runs every day of the week from Monday to Saturday at 6:00 AM.
- Manual trigger through the "Run workflow" button in the Actions tab of the repository.
- ## Input Parameters
The workflow requires the following inputs:
- `rocm_release`: ROCm version to use for the performance tests.
- `performance_reports_repo`: Repository where the performance reports are stored.
- `benchmark_utils_repo`: Repository where the benchmark utilities are stored.
- `organization`: Organization based on which location of files will be different.
- `result_number`: Last N results.
- `model_timeout`: If a model in the performance test script passes this threshold, it will be skipped.
- `flags`: Command line arguments to be passed to the performance test script. Default is `-r`.
For more details, please refer to the [performance.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/performance.yaml) file in the repository.
---
## `rocm-image-release.yaml`
<p>
This workflow builds a Docker image for a specified ROCm release version and pushes it to the specified repository. If image already exists nothing will happen, and there is also option to overwrite existing image.
</p>
- ## Trigger
- The workflow is triggered manually through the "Run workflow" button in the Actions tab of the repository and it will run reusable workflow [rocm-release.yml](https://github.com/ROCmSoftwarePlatform/migraphx-benchmark/blob/main/.github/workflows/rocm-release.yml)
- ## Input Parameters
The workflow requires the following inputs:
- `rocm_release`: ROCm release version to build Docker image for.
- `benchmark_utils_repo`: Repository where benchmark utils are stored.
- `base_image`: Base image for ROCm Docker build.
- `docker_image`: Docker image name for ROCm Docker build.
- `build_navi`: Build number for the Navi architecture.
- `organization`: The organization name used to determine the location of files.
- `overwrite`: Specify whether to overwrite the Docker image if it already exists.
For more details, please refer to the [rocm-image-release.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/rocm-image-release.yaml) file in the repository.
---
## `sync-onnxrt-main.yaml`
<p>
This workflow updates a file with the latest commit hash then creates a pull request using the updated commit hash and adds labels, assignees, reviewers, and a title and body to describe the changes.
</p>
- ## Trigger
The workflow is triggered by the following events:
- Schedule: Runs every week on Friday at 05:07 PM.
- ## Jobs
The workflow has a single job named `Update and create pull request`. The following steps are executed in this job:
- `get_date`: step sets an environment variable to the current date in the format 'YYYY-MM-DD'.
- `extract_sha1`: step fetches the latest SHA1 commit hash of the HEAD branch of the `microsoft/onnxruntime` repository and sets it as an environment variable.
- `echo_sha1`: step prints the SHA1 commit hash set in step `extract_sha1`.
- `actions/checkout@v3`: step checks out the codebase from the repository.
- `update_file`: step updates a file in the repository with the SHA1 commit hash fetched in step `extract_sha1`.
- `Make changes to pull request`: step uses the `peter-evans/create-pull-request` action to create a pull request.
For more details, please refer to the [sync-onnxrt-main.yaml](https://github.com/ROCmSoftwarePlatform/AMDMIGraphX/blob/develop/.github/workflows/sync-onnxrt-main.yaml) file in the repository.
---
...@@ -18,13 +18,8 @@ jobs: ...@@ -18,13 +18,8 @@ jobs:
with: with:
access_token: ${{ github.token }} access_token: ${{ github.token }}
tidy: tidy:
runs-on: ubuntu-20.04 runs-on: ROCM-Ubuntu
steps:
steps:
- name: Free space
run: |
sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3 - uses: actions/checkout@v3
# In this step, this action saves a list of existing images, # In this step, this action saves a list of existing images,
...@@ -48,7 +43,8 @@ jobs: ...@@ -48,7 +43,8 @@ jobs:
restore-keys: tidy-cache- restore-keys: tidy-cache-
- name: Build the Docker image - name: Build the Docker image
run: docker build . --file hip-clang.docker --tag migraphx run: |
docker build . --file hip-clang.docker --tag migraphx
- name: Clang tidy - name: Clang tidy
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}" shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}"
...@@ -59,12 +55,13 @@ jobs: ...@@ -59,12 +55,13 @@ jobs:
-DMIGRAPHX_ENABLE_GPU=On \ -DMIGRAPHX_ENABLE_GPU=On \
-DMIGRAPHX_ENABLE_CPU=On \ -DMIGRAPHX_ENABLE_CPU=On \
-DMIGRAPHX_ENABLE_FPGA=On \ -DMIGRAPHX_ENABLE_FPGA=On \
-DMIGRAPHX_ENABLE_MLIR=On \
-DBUILD_DEV=On \ -DBUILD_DEV=On \
-DROCM_ENABLE_GH_ANNOTATIONS=On \ -DROCM_ENABLE_GH_ANNOTATIONS=On \
-DCLANG_TIDY_DEPEND_ON_TARGET=Off \ -DCLANG_TIDY_DEPEND_ON_TARGET=Off \
-DCLANG_TIDY_CACHE=/data/tidy-cache \ -DCLANG_TIDY_CACHE=/data/tidy-cache \
.. ..
make -j2 -k onnx-proto tf-proto tidy make -j$(nproc) -k onnx-proto tf-proto tidy
# GH actions can not update existing cache, as a workaround clear cache and then save it # GH actions can not update existing cache, as a workaround clear cache and then save it
- name: Clear tidy cache before saving - name: Clear tidy cache before saving
...@@ -86,11 +83,8 @@ jobs: ...@@ -86,11 +83,8 @@ jobs:
cppcheck: cppcheck:
runs-on: ubuntu-20.04 runs-on: ROCM-Ubuntu
steps: steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3 - uses: actions/checkout@v3
# In this step, this action saves a list of existing images, # In this step, this action saves a list of existing images,
...@@ -126,7 +120,7 @@ jobs: ...@@ -126,7 +120,7 @@ jobs:
-DBUILD_DEV=On \ -DBUILD_DEV=On \
-DROCM_ENABLE_GH_ANNOTATIONS=On \ -DROCM_ENABLE_GH_ANNOTATIONS=On \
.. ..
make -j2 cppcheck make -j$(nproc) cppcheck
# GH actions can not update existing cache, as a workaround clear cache and then save it # GH actions can not update existing cache, as a workaround clear cache and then save it
- name: Clear cppcheck cache before saving - name: Clear cppcheck cache before saving
...@@ -148,12 +142,11 @@ jobs: ...@@ -148,12 +142,11 @@ jobs:
format: format:
runs-on: ubuntu-20.04 runs-on: ROCM-Ubuntu
steps: steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with:
fetch-depth: 0
# In this step, this action saves a list of existing images, # In this step, this action saves a list of existing images,
# the cache is created without them in the post run. # the cache is created without them in the post run.
...@@ -174,26 +167,89 @@ jobs: ...@@ -174,26 +167,89 @@ jobs:
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}" shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}"
run: | run: |
set -e set -e
find . -iname '*.h' \ git config --global --add safe.directory /data
-o -iname '*.hpp' \ python3 tools/format.py origin/${{ github.event_name == 'pull_request' && github.base_ref || 'develop' }}
-o -iname '*.cpp' \
-o -iname '*.h.in' \ sles:
-o -iname '*.hpp.in' \ runs-on: ROCM-Ubuntu
-o -iname '*.cpp.in' \ steps:
-o -iname '*.cl' \ - uses: actions/checkout@v3
-o -iname '*.c' \ with:
| grep -v 'build/' \ fetch-depth: 0
| xargs -n 1 -P 1 -I{} -t sh -c 'clang-format-10 -style=file {} | diff - {}'
find . -iname '*.py' \ # In this step, this action saves a list of existing images,
| grep -v 'build/' \ # the cache is created without them in the post run.
| xargs -n 1 -P 1 -I{} -t sh -c 'yapf {} | diff - {}' # It also restores the cache if it exists.
- name: Docker layer cache
uses: jpribyl/action-docker-layer-caching@v0.1.1
with:
key: docker-layer-caching-migraphx-sles-${{hashFiles('hip-clang.docker', '**/*requirements.txt', '**/install_prereqs.sh', 'rbuild.ini')}}
restore-keys:
docker-layer-caching-migraphx-sles-
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Build the Docker image
run: docker build . --file tools/docker/sles.docker --tag migraphx-sles
- name: Restore cache files for ccache
uses: actions/cache/restore@v3
id: ccache_restore
with:
path: ${{ github.workspace }}/ccache
key: ccache-sles-${{ github.ref }}
restore-keys: ccache-sles-
- name: Build migraphx
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx-sles bash < {0}"
run: |
set -e
export CCACHE_COMPRESSLEVEL=10
export CCACHE_DIR=/data/ccache
export CCACHE_NOHASHDIR=true
export CCACHE_BASEDIR=/data
export CCACHE_MAXSIZE=1
mkdir build
cd build
CXX=/opt/rocm/llvm/bin/clang++ CC=/opt/rocm/llvm/bin/clang cmake \
-DBUILD_DEV=On \
-DCMAKE_CXX_COMPILER_LAUNCHER=/usr/local/bin/ccache \
-DCMAKE_C_COMPILER_LAUNCHER=/usr/local/bin/ccache \
..
make -j$(nproc) tests driver
- name: Clear ccache cache before saving
if: ${{ steps.ccache_restore.outputs.cache-hit }}
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
set +x
gh extension install actions/gh-actions-cache --pin v1.0.1
gh actions-cache delete ${{ steps.ccache_restore.outputs.cache-matched-key }} --confirm
- name: Save cache files for ccache
uses: actions/cache/save@v3
if: always()
with:
path: ${{ github.workspace }}/ccache
key: ccache-sles-${{ github.ref }}
pyflakes: pyflakes:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
steps: steps:
- name: Free space - name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku uses: jlumbroso/free-disk-space@main
with:
tool-cache: true
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true
docker-images: true
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
...@@ -205,7 +261,7 @@ jobs: ...@@ -205,7 +261,7 @@ jobs:
- name: Run pyflakes - name: Run pyflakes
run: | run: |
pyflakes --version pyflakes --version
pyflakes examples/ tools/ src/ test/ doc/ pyflakes examples/ tools/ src/ test/ docs/
mypy --version mypy --version
mypy tools/api.py mypy tools/api.py
...@@ -214,7 +270,16 @@ jobs: ...@@ -214,7 +270,16 @@ jobs:
steps: steps:
- name: Free space - name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku uses: jlumbroso/free-disk-space@main
with:
tool-cache: true
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true
docker-images: true
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
...@@ -244,9 +309,19 @@ jobs: ...@@ -244,9 +309,19 @@ jobs:
- codecov - codecov
steps: steps:
- name: Free space and install rbuild, lld - name: Free space
uses: jlumbroso/free-disk-space@main
with:
tool-cache: true
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true
docker-images: true
- name : Install rbuild and lld
run: | run: |
sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku
sudo apt-get install -y lld sudo apt-get install -y lld
python -m pip install --upgrade pip python -m pip install --upgrade pip
pip install https://github.com/RadeonOpenCompute/rbuild/archive/master.tar.gz pip install https://github.com/RadeonOpenCompute/rbuild/archive/master.tar.gz
...@@ -265,11 +340,10 @@ jobs: ...@@ -265,11 +340,10 @@ jobs:
# This path is specific to Ubuntu # This path is specific to Ubuntu
path: ${{ github.workspace }}/cget path: ${{ github.workspace }}/cget
# Look to see if there is a cache hit for the corresponding requirements file # Look to see if there is a cache hit for the corresponding requirements file
key: ${{ matrix.os }}-cget-4-${{ hashFiles('requirements.txt', 'dev-requirements.txt') }} key: ${{ matrix.os }}-cget-4-${{ hashFiles('requirements.txt', 'dev-requirements.txt', 'rbuild.ini') }}
restore-keys: ${{ matrix.os }}-cget-4- restore-keys: ${{ matrix.os }}-cget-4-
- name: Install dependencies - name: Install dependencies
if: steps.deps_cache.outputs.cache-hit != 'true'
run: rbuild prepare -d cget -s gh run: rbuild prepare -d cget -s gh
- name: Restore cache files for ccache - name: Restore cache files for ccache
...@@ -305,6 +379,7 @@ jobs: ...@@ -305,6 +379,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
set +x
gh extension install actions/gh-actions-cache --pin v1.0.1 gh extension install actions/gh-actions-cache --pin v1.0.1
gh actions-cache delete ${{ steps.ccache_restore.outputs.cache-matched-key }} --confirm gh actions-cache delete ${{ steps.ccache_restore.outputs.cache-matched-key }} --confirm
...@@ -352,7 +427,16 @@ jobs: ...@@ -352,7 +427,16 @@ jobs:
steps: steps:
- name: Free space - name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android /usr/local/graalvm /usr/local/aws* /usr/local/lib/heroku uses: jlumbroso/free-disk-space@main
with:
tool-cache: true
android: true
dotnet: true
haskell: true
large-packages: true
swap-storage: true
docker-images: true
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up Python - name: Set up Python
uses: actions/setup-python@v4 uses: actions/setup-python@v4
...@@ -412,6 +496,7 @@ jobs: ...@@ -412,6 +496,7 @@ jobs:
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
set +x
gh extension install actions/gh-actions-cache gh extension install actions/gh-actions-cache
gh actions-cache delete ${{ steps.ccache_restore_fpga.outputs.cache-matched-key }} --confirm gh actions-cache delete ${{ steps.ccache_restore_fpga.outputs.cache-matched-key }} --confirm
continue-on-error: true continue-on-error: true
......
...@@ -12,7 +12,7 @@ on: ...@@ -12,7 +12,7 @@ on:
rocm_release: rocm_release:
description: ROCm Version description: ROCm Version
required: true required: true
default: '5.4.2' default: '5.6'
performance_reports_repo: performance_reports_repo:
description: Repository where performance reports are stored description: Repository where performance reports are stored
required: true required: true
...@@ -33,21 +33,28 @@ on: ...@@ -33,21 +33,28 @@ on:
description: If model in performance test script passes this threshold, it will be skipped description: If model in performance test script passes this threshold, it will be skipped
required: true required: true
default: '30m' default: '30m'
performance_backup_repo:
description: Repository for backup
required: true
default: migraphx-benchmark/performance-backup
flags: flags:
description: -m for Max value; -s for Std dev; -r for Threshold file description: -m for Max value; -s for Std dev; -r for Threshold file
required: true required: true
default: '-r' default: '-r'
concurrency: "perftest-${{ github.head_ref || github.base_ref || 'schedule' }}" concurrency:
group: "perftest-${{ github.head_ref || github.base_ref || 'schedule' }}"
cancel-in-progress: true
jobs: jobs:
release: release:
uses: ROCmSoftwarePlatform/migraphx-benchmark/.github/workflows/perf-test.yml@main uses: ROCmSoftwarePlatform/migraphx-benchmark/.github/workflows/perf-test.yml@main
with: with:
rocm_release: ${{ github.event.inputs.rocm_release || '5.4.2' }} rocm_release: ${{ github.event.inputs.rocm_release || '5.6' }}
result_number: ${{ github.event.inputs.result_number || '10' }} result_number: ${{ github.event.inputs.result_number || '10' }}
flags: ${{ github.event.inputs.flags || '-r' }} flags: ${{ github.event.inputs.flags || '-r' }}
performance_reports_repo: ${{ github.event.inputs.performance_reports_repo || 'ROCmSoftwarePlatform/migraphx-reports' }} performance_reports_repo: ${{ github.event.inputs.performance_reports_repo || 'ROCmSoftwarePlatform/migraphx-reports' }}
performance_backup_repo: ${{ github.event.inputs.performance_backup_repo || 'migraphx-benchmark/performance-backup' }}
benchmark_utils_repo: ${{ github.event.inputs.benchmark_utils_repo || 'ROCmSoftwarePlatform/migraphx-benchmark-utils' }} benchmark_utils_repo: ${{ github.event.inputs.benchmark_utils_repo || 'ROCmSoftwarePlatform/migraphx-benchmark-utils' }}
organization: ${{ github.event.inputs.organization || 'AMD' }} organization: ${{ github.event.inputs.organization || 'AMD' }}
model_timeout: ${{ github.event.inputs.model_timeout || '30m' }} model_timeout: ${{ github.event.inputs.model_timeout || '30m' }}
......
...@@ -22,11 +22,6 @@ on: ...@@ -22,11 +22,6 @@ on:
description: Build navi number description: Build navi number
required: true required: true
default: "0" default: "0"
organization:
type: string
description: Organization based on which location of files will be different
required: true
default: "AMD"
overwrite: overwrite:
type: boolean type: boolean
description: Overwrite image if it already exists description: Overwrite image if it already exists
...@@ -38,7 +33,6 @@ jobs: ...@@ -38,7 +33,6 @@ jobs:
with: with:
rocm_release: ${{ github.event.inputs.rocm_release || '5.1' }} rocm_release: ${{ github.event.inputs.rocm_release || '5.1' }}
benchmark-utils_repo: ${{ github.event.inputs.benchmark-utils_repo || 'ROCmSoftwarePlatform/migraphx-benchmark-utils' }} benchmark-utils_repo: ${{ github.event.inputs.benchmark-utils_repo || 'ROCmSoftwarePlatform/migraphx-benchmark-utils' }}
organization: ${{ github.event.inputs.organization || 'AMD' }}
base_image: ${{ github.event.inputs.base_image || 'rocm/dev-ubuntu-20.04' }} base_image: ${{ github.event.inputs.base_image || 'rocm/dev-ubuntu-20.04' }}
docker_image: ${{ github.event.inputs.docker_image || 'rocm-migraphx' }} docker_image: ${{ github.event.inputs.docker_image || 'rocm-migraphx' }}
build_navi: ${{ github.event.inputs.build_navi || '0' }} build_navi: ${{ github.event.inputs.build_navi || '0' }}
......
...@@ -48,6 +48,8 @@ ...@@ -48,6 +48,8 @@
/CMakeSettings.json /CMakeSettings.json
# documentation artifacts
_toc.yml
#==============================================================================# #==============================================================================#
# Directories to ignore (do not add trailing '/'s, they skip symlinks). # Directories to ignore (do not add trailing '/'s, they skip symlinks).
...@@ -61,3 +63,20 @@ test/onnx/models ...@@ -61,3 +63,20 @@ test/onnx/models
# VS2017 and VSCode config files. # VS2017 and VSCode config files.
.vscode .vscode
.vs .vs
# documentation artifacts
docs/_build
docs/_images
docs/_static
docs/_templates
docs/.doxygen/docBin
docs/.doxygen/DoxygenWarningLog.txt
docs/_doxygen
docs/html
/_readthedocs
# JetBrains config directories (ignoring symlinks)
.idea/
cmake-build*/
build*/
# Read the Docs configuration file
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
version: 2
sphinx:
configuration: docs/conf.py
formats: [htmlzip]
python:
version: "3.8"
install:
- requirements: docs/.sphinx/requirements.txt
# Change Log for MIGraphX # Change Log for MIGraphX
Full documentation for MIGraphX is available at [MIGraphX Github Pages](https://rocmsoftwareplatform.github.io/AMDMIGraphX/doc/html/). Full documentation for MIGraphX is available at [MIGraphX Documentation](https://rocmdocs.amd.com/projects/AMDMIGraphX/en/latest/).
## MIGraphX 2.5 for ROCm 5.5.0 ## MIGraphX 2.5 for ROCm 5.5.0
......
...@@ -21,35 +21,37 @@ ...@@ -21,35 +21,37 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
##################################################################################### #####################################################################################
cmake_minimum_required(VERSION 3.5) cmake_minimum_required(VERSION 3.15 FATAL_ERROR)
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}") if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_BINARY_DIR}")
message(FATAL_ERROR "The binary and source directroy cannot be the same") message(FATAL_ERROR "The binary and source directroy cannot be the same")
endif() endif()
get_property(_GENERATOR_IS_MULTI_CONFIG GLOBAL PROPERTY GENERATOR_IS_MULTI_CONFIG)
# This has to be initialized before the project() command appears # This has to be initialized before the project() command appears
# Set the default of CMAKE_BUILD_TYPE to be release, unless user specifies with -D. MSVC_IDE does not use CMAKE_BUILD_TYPE # Set the default of CMAKE_BUILD_TYPE to be release, unless user specifies with -D. MSVC_IDE does not use CMAKE_BUILD_TYPE
if( NOT MSVC_IDE AND NOT CMAKE_BUILD_TYPE ) if(_GENERATOR_IS_MULTI_CONFIG)
set( CMAKE_BUILD_TYPE Release CACHE STRING "Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel." ) if (NOT CMAKE_CONFIGURATION_TYPES)
endif() set(CMAKE_CONFIGURATION_TYPES "Debug;Release;RelWithDebInfo;MinSizeRel" CACHE STRING
"Available build types (configurations) on multi-config generators")
# Setup valid strings for build type endif()
if (NOT CMAKE_CONFIGURATION_TYPES)
set(CMAKE_CONFIGURATION_TYPES "Debug;Release;RelWithDebInfo;MinSizeRel" CACHE STRING "Configs")
endif()
set_property(CACHE CMAKE_BUILD_TYPE PROPERTY STRINGS ${CMAKE_CONFIGURATION_TYPES})
# Default installation path
if(WIN32)
set(CMAKE_INSTALL_PREFIX "/opt/rocm/x86_64-w64-mingw32" CACHE PATH "")
else() else()
set(CMAKE_INSTALL_PREFIX "/opt/rocm" CACHE PATH "") if(NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release CACHE STRING
"Choose the type of build, options are: None Debug Release RelWithDebInfo MinSizeRel.")
endif()
endif() endif()
set(CMAKE_INSTALL_PREFIX "/opt/rocm" CACHE PATH "")
set(CMAKE_BUILD_RPATH "${CMAKE_BINARY_DIR}/lib") set(CMAKE_BUILD_RPATH "${CMAKE_BINARY_DIR}/lib")
project(migraphx) project(migraphx LANGUAGES C CXX)
include(CTest)
find_package(ROCM REQUIRED) find_package(ROCM REQUIRED)
find_package(Threads REQUIRED)
find_path(HALF_INCLUDE_DIR half.hpp PATH_SUFFIXES half) find_path(HALF_INCLUDE_DIR half.hpp PATH_SUFFIXES half)
if (NOT HALF_INCLUDE_DIR) if (NOT HALF_INCLUDE_DIR)
...@@ -69,7 +71,7 @@ include(ROCMSetupVersion) ...@@ -69,7 +71,7 @@ include(ROCMSetupVersion)
option(BUILD_DEV "Build for development purpose only" OFF) option(BUILD_DEV "Build for development purpose only" OFF)
rocm_setup_version(VERSION 2.6.0) rocm_setup_version(VERSION 2.7.0)
set(MIGRAPHX_SO_VERSION ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH}) set(MIGRAPHX_SO_VERSION ${PROJECT_VERSION_MAJOR}.${PROJECT_VERSION_MINOR}.${PROJECT_VERSION_PATCH})
option( BUILD_SHARED_LIBS "Build as a shared library" ON ) option( BUILD_SHARED_LIBS "Build as a shared library" ON )
...@@ -92,12 +94,6 @@ set(MIGRAPHX_ENABLE_FPGA Off CACHE BOOL "") ...@@ -92,12 +94,6 @@ set(MIGRAPHX_ENABLE_FPGA Off CACHE BOOL "")
set(CMAKE_CXX_STANDARD_DEFAULT "") set(CMAKE_CXX_STANDARD_DEFAULT "")
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:-std=c++17>) add_compile_options($<$<COMPILE_LANGUAGE:CXX>:-std=c++17>)
if(${CMAKE_VERSION} VERSION_LESS "3.12.0")
set(CONFIGURE_DEPENDS)
else()
set(CONFIGURE_DEPENDS CONFIGURE_DEPENDS)
endif()
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake) list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake)
include(EnableCompilerWarnings) include(EnableCompilerWarnings)
include(ROCMClangTidy) include(ROCMClangTidy)
...@@ -121,6 +117,7 @@ rocm_enable_clang_tidy( ...@@ -121,6 +117,7 @@ rocm_enable_clang_tidy(
llvm-namespace-comment llvm-namespace-comment
misc-* misc-*
-misc-confusable-identifiers -misc-confusable-identifiers
-misc-use-anonymous-namespace
modernize-* modernize-*
performance-* performance-*
readability-* readability-*
...@@ -128,6 +125,7 @@ rocm_enable_clang_tidy( ...@@ -128,6 +125,7 @@ rocm_enable_clang_tidy(
-bugprone-implicit-widening-of-multiplication-result -bugprone-implicit-widening-of-multiplication-result
-bugprone-macro-parentheses -bugprone-macro-parentheses
-bugprone-signed-char-misuse -bugprone-signed-char-misuse
-bugprone-unchecked-optional-access
# Disable the aliased reserved identifiers # Disable the aliased reserved identifiers
-cert-dcl37-c -cert-dcl37-c
-cert-dcl51-cpp -cert-dcl51-cpp
...@@ -151,6 +149,8 @@ rocm_enable_clang_tidy( ...@@ -151,6 +149,8 @@ rocm_enable_clang_tidy(
-clang-diagnostic-extern-c-compat -clang-diagnostic-extern-c-compat
-clang-diagnostic-disabled-macro-expansion -clang-diagnostic-disabled-macro-expansion
-clang-diagnostic-unused-command-line-argument -clang-diagnostic-unused-command-line-argument
-cppcoreguidelines-avoid-do-while
-cppcoreguidelines-avoid-const-or-ref-data-members
-cppcoreguidelines-explicit-virtual-functions -cppcoreguidelines-explicit-virtual-functions
-cppcoreguidelines-init-variables -cppcoreguidelines-init-variables
-cppcoreguidelines-pro-bounds-array-to-pointer-decay -cppcoreguidelines-pro-bounds-array-to-pointer-decay
...@@ -198,7 +198,6 @@ rocm_enable_clang_tidy( ...@@ -198,7 +198,6 @@ rocm_enable_clang_tidy(
EXTRA_ARGS EXTRA_ARGS
-UNDEBUG -UNDEBUG
-DMIGRAPHX_USE_CLANG_TIDY -DMIGRAPHX_USE_CLANG_TIDY
"-Dmain\\\\(...\\\\)=main\\\\(__VA_ARGS__\\\\) // NOLINT"
CLANG_ARGS CLANG_ARGS
-analyzer-max-loop 10 -analyzer-max-loop 10
-analyzer-inline-max-stack-depth 10 -analyzer-inline-max-stack-depth 10
...@@ -229,14 +228,12 @@ rocm_enable_cppcheck( ...@@ -229,14 +228,12 @@ rocm_enable_cppcheck(
shadowVar shadowVar
shadowVariable shadowVariable
unsafeClassDivZero unsafeClassDivZero
# Disable because of too many FPs
arithOperationsOnVoidPointer
definePrefix:*test/include/test.hpp definePrefix:*test/include/test.hpp
ctuOneDefinitionRuleViolation:*test/* ctuOneDefinitionRuleViolation:*test/*
useSmartPointer:*src/api/api.cpp useSmartPointer:*src/api/api.cpp
useSmartPointer:*make_shared_array.hpp useSmartPointer:*make_shared_array.hpp
constParameter:*src/targets/gpu/*.cpp
constParameter:*src/targets/gpu/*.hpp
# Suppress mlir_conv.cpp since this file will be deleted
*:*src/targets/gpu/mlir_conv.cpp
FORCE FORCE
INCONCLUSIVE INCONCLUSIVE
RULE_FILE RULE_FILE
...@@ -253,10 +250,13 @@ rocm_enable_cppcheck( ...@@ -253,10 +250,13 @@ rocm_enable_cppcheck(
${CMAKE_CURRENT_SOURCE_DIR}/src/targets/gpu/kernels/include ${CMAKE_CURRENT_SOURCE_DIR}/src/targets/gpu/kernels/include
${CMAKE_CURRENT_SOURCE_DIR}/test/include ${CMAKE_CURRENT_SOURCE_DIR}/test/include
DEFINE DEFINE
MIGRAPHX_MLIR=1
CPPCHECK=1 CPPCHECK=1
__device__= __device__=
__host__= __host__=
__global__= __global__=
UNDEFINE
MIGRAPHX_USE_CLANG_TIDY
) )
enable_testing() enable_testing()
...@@ -267,8 +267,10 @@ set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib) ...@@ -267,8 +267,10 @@ set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/bin) set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/bin)
add_subdirectory(src) add_subdirectory(src)
add_subdirectory(doc) add_subdirectory(docs)
add_subdirectory(test) if(BUILD_TESTING)
add_subdirectory(test)
endif()
add_subdirectory(tools) add_subdirectory(tools)
set(DEST_DIR ${CMAKE_BINARY_DIR}) set(DEST_DIR ${CMAKE_BINARY_DIR})
......
...@@ -7,16 +7,18 @@ RUN dpkg --add-architecture i386 ...@@ -7,16 +7,18 @@ RUN dpkg --add-architecture i386
# Install rocm key # Install rocm key
RUN apt-get update && apt-get install -y gnupg2 --no-install-recommends curl && \ RUN apt-get update && apt-get install -y gnupg2 --no-install-recommends curl && \
curl -sL http://repo.radeon.com/rocm/rocm.gpg.key | apt-key add - curl -sL http://repo.radeon.com/rocm/rocm.gpg.key | apt-key add -
# Add rocm repository # Add rocm repository
RUN sh -c 'echo deb [arch=amd64 trusted=yes] http://repo.radeon.com/rocm/apt/5.4.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list' RUN sh -c 'echo deb [arch=amd64 trusted=yes] http://repo.radeon.com/rocm/apt/5.6/ focal main > /etc/apt/sources.list.d/rocm.list'
# From docs.amd.com for installing rocm. Needed to install properly
RUN sh -c "echo 'Package: *\nPin: release o=repo.radeon.com\nPin-priority: 600' > /etc/apt/preferences.d/rocm-pin-600"
# Install dependencies # Install dependencies
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-unauthenticated \ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-unauthenticated \
apt-utils \ apt-utils \
build-essential \ build-essential \
clang-format-10 \
cmake \ cmake \
curl \ curl \
doxygen \ doxygen \
...@@ -75,6 +77,9 @@ ADD dev-requirements.txt /dev-requirements.txt ...@@ -75,6 +77,9 @@ ADD dev-requirements.txt /dev-requirements.txt
ADD requirements.txt /requirements.txt ADD requirements.txt /requirements.txt
ADD rbuild.ini /rbuild.ini ADD rbuild.ini /rbuild.ini
# Temporarily install a new cmake until switching to ubuntu 22.04
RUN pip3 install cmake==3.22.1
COPY ./tools/install_prereqs.sh / COPY ./tools/install_prereqs.sh /
RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh
RUN test -f /usr/local/hash || exit 1 RUN test -f /usr/local/hash || exit 1
...@@ -83,7 +88,7 @@ RUN test -f /usr/local/hash || exit 1 ...@@ -83,7 +88,7 @@ RUN test -f /usr/local/hash || exit 1
RUN pip3 install yapf==0.28.0 RUN pip3 install yapf==0.28.0
# Install doc requirements # Install doc requirements
ADD doc/requirements.txt /doc-requirements.txt ADD docs/.sphinx/requirements.txt /doc-requirements.txt
RUN pip3 install -r /doc-requirements.txt RUN pip3 install -r /doc-requirements.txt
# Download real models to run onnx unit tests # Download real models to run onnx unit tests
...@@ -94,7 +99,11 @@ RUN /download_models.sh && rm /download_models.sh ...@@ -94,7 +99,11 @@ RUN /download_models.sh && rm /download_models.sh
# Install latest ccache version # Install latest ccache version
RUN cget -p $PREFIX install facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cmake RUN cget -p $PREFIX install facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cmake
RUN cget -p $PREFIX install ccache@v4.1 -DENABLE_TESTING=OFF RUN cget -p $PREFIX install ccache@v4.1 -DENABLE_TESTING=OFF
RUN cget -p /opt/cmake install kitware/cmake@v3.24.3 RUN cget -p /opt/cmake install kitware/cmake@v3.26.4
# Install MLIR
ADD mlir-requirements.txt /mlir-requirements.txt
RUN cget -p /usr/local install -f /mlir-requirements.txt
COPY ./test/onnx/.onnxrt-commit / COPY ./test/onnx/.onnxrt-commit /
...@@ -110,8 +119,6 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR ...@@ -110,8 +119,6 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR
ADD tools/build_and_test_onnxrt.sh /onnxruntime/build_and_test_onnxrt.sh ADD tools/build_and_test_onnxrt.sh /onnxruntime/build_and_test_onnxrt.sh
RUN cget -p /usr/local install ROCmSoftwarePlatform/rocMLIR@55c6ee66cc7502db7950693b3e845676cbf400b1 -DBUILD_MIXR_TARGET=On -DLLVM_ENABLE_ZSTD=Off -DLLVM_ENABLE_THREADS=Off
ENV MIOPEN_FIND_DB_PATH=/tmp/miopen/find-db ENV MIOPEN_FIND_DB_PATH=/tmp/miopen/find-db
ENV MIOPEN_USER_DB_PATH=/tmp/miopen/user-db ENV MIOPEN_USER_DB_PATH=/tmp/miopen/user-db
ENV LD_LIBRARY_PATH=$PREFIX/lib ENV LD_LIBRARY_PATH=$PREFIX/lib
...@@ -120,4 +127,3 @@ ENV LD_LIBRARY_PATH=$PREFIX/lib ...@@ -120,4 +127,3 @@ ENV LD_LIBRARY_PATH=$PREFIX/lib
ENV UBSAN_OPTIONS=print_stacktrace=1 ENV UBSAN_OPTIONS=print_stacktrace=1
ENV ASAN_OPTIONS=detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1 ENV ASAN_OPTIONS=detect_stack_use_after_return=1:check_initialization_order=1:strict_init_order=1
RUN ln -s /opt/rocm/llvm/bin/llvm-symbolizer /usr/bin/llvm-symbolizer RUN ln -s /opt/rocm/llvm/bin/llvm-symbolizer /usr/bin/llvm-symbolizer
...@@ -15,21 +15,25 @@ def rocmtestnode(Map conf) { ...@@ -15,21 +15,25 @@ def rocmtestnode(Map conf) {
def compiler = bconf.get("compiler", "/opt/rocm/llvm/bin/clang++") def compiler = bconf.get("compiler", "/opt/rocm/llvm/bin/clang++")
def flags = bconf.get("flags", "") def flags = bconf.get("flags", "")
def gpu_debug = bconf.get("gpu_debug", "0") def gpu_debug = bconf.get("gpu_debug", "0")
def hiprtc_workarounds = bconf.get("hiprtc_workarounds", "0")
def cmd = """ def cmd = """
ulimit -c unlimited ulimit -c unlimited
echo "leak:dnnl::impl::malloc" > suppressions.txt echo "leak:dnnl::impl::malloc" > suppressions.txt
export LSAN_OPTIONS="suppressions=\$(pwd)/suppressions.txt" export LSAN_OPTIONS="suppressions=\$(pwd)/suppressions.txt"
export MIGRAPHX_GPU_DEBUG=${gpu_debug} export MIGRAPHX_GPU_DEBUG=${gpu_debug}
export MIGRAPHX_ENABLE_HIPRTC_WORKAROUNDS=${hiprtc_workarounds}
export CXX=${compiler} export CXX=${compiler}
export CXXFLAGS='-Werror' export CXXFLAGS='-Werror'
env env
rm -rf build rm -rf build
mkdir build mkdir build
cd build cd build
cmake -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DBUILD_DEV=On ${flags} .. cmake -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DBUILD_DEV=On -DCMAKE_EXECUTE_PROCESS_COMMAND_ECHO=STDOUT ${flags} ..
make -j\$(nproc) generate all doc package check VERBOSE=1 git diff
git diff-index --quiet HEAD || (echo "Git repo is not clean after running cmake." && exit 1)
make -j\$(nproc) generate VERBOSE=1
git diff
git diff-index --quiet HEAD || (echo "Generated files are different. Please run make generate and commit the changes." && exit 1)
make -j\$(nproc) all doc package check VERBOSE=1
md5sum ./*.deb
""" """
echo cmd echo cmd
sh cmd sh cmd
...@@ -83,8 +87,12 @@ def rocmnodename(name) { ...@@ -83,8 +87,12 @@ def rocmnodename(name) {
node_name = "${rocmtest_name} && vega"; node_name = "${rocmtest_name} && vega";
} else if(name == "navi21") { } else if(name == "navi21") {
node_name = "${rocmtest_name} && navi21"; node_name = "${rocmtest_name} && navi21";
} else if(name == "mi100+") {
node_name = "${rocmtest_name} && (gfx908 || gfx90a) && !vm";
} else if(name == "cdna") {
node_name = "${rocmtest_name} && (gfx908 || gfx90a || vega20) && !vm";
} else if(name == "nogpu") { } else if(name == "nogpu") {
return rocmtest_name; node_name = "${rocmtest_name} && nogpu";
} }
return node_name return node_name
} }
...@@ -95,31 +103,39 @@ def rocmnode(name, body) { ...@@ -95,31 +103,39 @@ def rocmnode(name, body) {
} }
} }
rocmtest clang_debug: rocmnode('vega') { cmake_build -> rocmtest clang_debug: rocmnode('cdna') { cmake_build ->
stage('Hip Clang Debug') { stage('hipRTC Debug') {
def sanitizers = "undefined" def sanitizers = "undefined"
def debug_flags = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize-recover=${sanitizers}" def debug_flags = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize-recover=${sanitizers}"
cmake_build(flags: "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}'") cmake_build(flags: "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}' -DMIGRAPHX_USE_HIPRTC=On", gpu_debug: true)
} }
}, clang_gpu_debug: rocmnode('vega') { cmake_build -> }, clang_release: rocmnode('cdna') { cmake_build ->
stage('Hip Clang GPU Debug') {
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release", gpu_debug: true)
}
}, clang_release: rocmnode('vega') { cmake_build ->
stage('Hip Clang Release') { stage('Hip Clang Release') {
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release") cmake_build(flags: "-DCMAKE_BUILD_TYPE=release")
stash includes: 'build/*.deb', name: 'migraphx-package' stash includes: 'build/*.deb', name: 'migraphx-package'
} }
}, hiprtc_gpu_debug: rocmnode('vega') { cmake_build -> }, hidden_symbols: rocmnode('cdna') { cmake_build ->
stage('HipRTC GPU Debug') { stage('Hidden symbols') {
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release -DMIGRAPHX_USE_HIPRTC=On", gpu_debug: true, hiprtc_workarounds: true) cmake_build(flags: "-DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_GPU=On -DMIGRAPHX_ENABLE_CPU=On -DCMAKE_CXX_VISIBILITY_PRESET=hidden -DCMAKE_C_VISIBILITY_PRESET=hidden")
}
}, all_targets_debug : rocmnode('cdna') { cmake_build ->
stage('All targets Release') {
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release -DMIGRAPHX_ENABLE_GPU=On -DMIGRAPHX_ENABLE_CPU=On -DMIGRAPHX_ENABLE_FPGA=On")
} }
}, mlir_debug: rocmnode('vega') { cmake_build -> }, mlir_debug: rocmnode('cdna') { cmake_build ->
stage('MLIR Debug') { stage('MLIR Debug') {
withEnv(['MIGRAPHX_ENABLE_MLIR=1']) { withEnv(['MIGRAPHX_ENABLE_MLIR=1']) {
def sanitizers = "undefined" def sanitizers = "undefined"
def debug_flags = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize-recover=${sanitizers}" // Note: the -fno-sanitize= is copied from upstream LLVM_UBSAN_FLAGS.
cmake_build(flags: "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_MLIR=On -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}'") def debug_flags_cxx = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize=vptr,function -fno-sanitize-recover=${sanitizers}"
def debug_flags = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize=vptr -fno-sanitize-recover=${sanitizers}"
cmake_build(flags: "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_MLIR=On -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags_cxx}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}'")
}
}
}, ck_release: rocmnode('mi100+') { cmake_build ->
stage('CK Release') {
withEnv(['MIGRAPHX_ENABLE_CK=1', 'MIGRAPHX_TUNE_CK=1']) {
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release")
} }
} }
}, clang_asan: rocmnode('nogpu') { cmake_build -> }, clang_asan: rocmnode('nogpu') { cmake_build ->
...@@ -143,11 +159,12 @@ def onnxnode(name, body) { ...@@ -143,11 +159,12 @@ def onnxnode(name, body) {
} }
} }
rocmtest onnx: onnxnode('rocmtest') { cmake_build -> rocmtest onnx: onnxnode('cdna') { cmake_build ->
stage("Onnx runtime") { stage("Onnx runtime") {
sh ''' sh '''
apt install half apt install half
ls -lR #ls -lR
md5sum ./build/*.deb
dpkg -i ./build/*.deb dpkg -i ./build/*.deb
cd /onnxruntime && ./build_and_test_onnxrt.sh cd /onnxruntime && ./build_and_test_onnxrt.sh
''' '''
......
...@@ -203,13 +203,19 @@ HTML and PDF documentation can be built using: ...@@ -203,13 +203,19 @@ HTML and PDF documentation can be built using:
`cmake --build . --config Release --target doc` **OR** `make doc` `cmake --build . --config Release --target doc` **OR** `make doc`
This will build a local searchable web site inside the doc/html folder. This will build a local searchable web site inside the docs/html folder.
Documentation is built using [Doxygen](http://www.stack.nl/~dimitri/doxygen/download.html), [Sphinx](http://www.sphinx-doc.org/en/stable/index.html), and [Breathe](https://breathe.readthedocs.io/en/latest/) Documentation is built using [Doxygen](http://www.stack.nl/~dimitri/doxygen/download.html) and [rocm-docs-core](https://github.com/RadeonOpenCompute/rocm-docs-core)
Requirements for both Sphinx and Breathe can be installed with: Run the steps below to build documentation locally.
`pip install -r doc/requirements.txt` ```
cd docs
pip3 install -r .sphinx/requirements.txt
python3 -m sphinx -T -E -b html -d _build/doctrees -D language=en . _build/html
```
Depending on your setup `sudo` may be required for the pip install. Depending on your setup `sudo` may be required for the pip install.
......
...@@ -24,10 +24,40 @@ ...@@ -24,10 +24,40 @@
find_program(EMBED_LD ld) find_program(EMBED_LD ld)
find_program(EMBED_OBJCOPY objcopy) find_program(EMBED_OBJCOPY objcopy)
option(EMBED_USE_LD "Use ld to embed data files" OFF)
function(wrap_string)
set(options)
set(oneValueArgs VARIABLE AT_COLUMN)
set(multiValueArgs)
cmake_parse_arguments(PARSE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
cmake_parse_arguments(WRAP_STRING "${options}" "${oneValueArgs}" "" ${ARGN})
string(LENGTH ${${PARSE_VARIABLE}} string_length)
math(EXPR offset "0")
while(string_length GREATER 0)
if(string_length GREATER ${PARSE_AT_COLUMN})
math(EXPR length "${PARSE_AT_COLUMN}")
else()
math(EXPR length "${string_length}")
endif()
string(SUBSTRING ${${PARSE_VARIABLE}} ${offset} ${length} line)
set(lines "${lines}\n${line}")
math(EXPR string_length "${string_length} - ${length}")
math(EXPR offset "${offset} + ${length}")
endwhile()
set(${PARSE_VARIABLE} "${lines}" PARENT_SCOPE)
endfunction()
function(generate_embed_source EMBED_NAME) function(generate_embed_source EMBED_NAME)
set(options) set(options)
set(oneValueArgs SRC HEADER) set(oneValueArgs SRC HEADER RELATIVE)
set(multiValueArgs OBJECTS SYMBOLS) set(multiValueArgs OBJECTS SYMBOLS FILES)
cmake_parse_arguments(PARSE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN}) cmake_parse_arguments(PARSE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
...@@ -44,16 +74,27 @@ function(generate_embed_source EMBED_NAME) ...@@ -44,16 +74,27 @@ function(generate_embed_source EMBED_NAME)
foreach(idx RANGE ${LEN}) foreach(idx RANGE ${LEN})
list(GET PARSE_SYMBOLS ${idx} SYMBOL) list(GET PARSE_SYMBOLS ${idx} SYMBOL)
list(GET PARSE_OBJECTS ${idx} OBJECT) list(GET PARSE_OBJECTS ${idx} OBJECT)
list(GET PARSE_FILES ${idx} FILE)
set(START_SYMBOL "_binary_${SYMBOL}_start") set(START_SYMBOL "_binary_${SYMBOL}_start")
set(END_SYMBOL "_binary_${SYMBOL}_end") set(END_SYMBOL "_binary_${SYMBOL}_end")
string(APPEND EXTERNS " if(EMBED_USE_LD)
extern const char ${START_SYMBOL}[]; string(APPEND EXTERNS "
extern const char ${END_SYMBOL}[]; extern const char ${START_SYMBOL}[];
") extern const char ${END_SYMBOL}[];
")
else()
string(APPEND EXTERNS "
extern const char ${START_SYMBOL}[];
extern const char* ${END_SYMBOL};
")
endif()
# TODO: Should use NAME_WLE if(PARSE_RELATIVE)
get_filename_component(BASE_NAME "${OBJECT}" NAME) file(RELATIVE_PATH BASE_NAME ${PARSE_RELATIVE} "${FILE}")
string(REGEX REPLACE ".[A-Za-z0-9_]$" "" BASE_NAME ${BASE_NAME}) else()
get_filename_component(BASE_NAME "${FILE}" NAME)
endif()
string(APPEND INIT_KERNELS " string(APPEND INIT_KERNELS "
{ \"${BASE_NAME}\", { ${START_SYMBOL}, ${END_SYMBOL}} }, { \"${BASE_NAME}\", { ${START_SYMBOL}, ${END_SYMBOL}} },
...@@ -86,21 +127,46 @@ function(embed_file OUTPUT_FILE OUTPUT_SYMBOL FILE) ...@@ -86,21 +127,46 @@ function(embed_file OUTPUT_FILE OUTPUT_SYMBOL FILE)
string(MAKE_C_IDENTIFIER "${REL_FILE}" SYMBOL) string(MAKE_C_IDENTIFIER "${REL_FILE}" SYMBOL)
get_filename_component(OUTPUT_FILE_DIR "${REL_FILE}" DIRECTORY) get_filename_component(OUTPUT_FILE_DIR "${REL_FILE}" DIRECTORY)
file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${OUTPUT_FILE_DIR}") file(MAKE_DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/${OUTPUT_FILE_DIR}")
set(OUT_FILE "${CMAKE_CURRENT_BINARY_DIR}/${REL_FILE}.o") if(EMBED_USE_LD)
set(OUT_FILE "${CMAKE_CURRENT_BINARY_DIR}/${REL_FILE}.o")
else()
set(OUT_FILE "${CMAKE_CURRENT_BINARY_DIR}/${REL_FILE}.cpp")
endif()
set(${OUTPUT_SYMBOL} ${SYMBOL} PARENT_SCOPE) set(${OUTPUT_SYMBOL} ${SYMBOL} PARENT_SCOPE)
set(${OUTPUT_FILE} "${OUT_FILE}" PARENT_SCOPE) set(${OUTPUT_FILE} "${OUT_FILE}" PARENT_SCOPE)
add_custom_command( if(EMBED_USE_LD)
OUTPUT "${OUT_FILE}" add_custom_command(
COMMAND ${EMBED_LD} -r -o "${OUT_FILE}" -z noexecstack --format=binary "${REL_FILE}" OUTPUT "${OUT_FILE}"
COMMAND ${EMBED_OBJCOPY} --rename-section .data=.rodata,alloc,load,readonly,data,contents "${OUT_FILE}" COMMAND ${EMBED_LD} -r -o "${OUT_FILE}" -z noexecstack --format=binary "${REL_FILE}"
WORKING_DIRECTORY ${WORKING_DIRECTORY} COMMAND ${EMBED_OBJCOPY} --rename-section .data=.rodata,alloc,load,readonly,data,contents "${OUT_FILE}"
DEPENDS ${FILE} WORKING_DIRECTORY ${WORKING_DIRECTORY}
VERBATIM DEPENDS ${FILE}
) VERBATIM
)
else()
set_property(DIRECTORY APPEND PROPERTY CMAKE_CONFIGURE_DEPENDS ${FILE})
# reads source file contents as hex string
file(READ ${FILE} HEX_STRING HEX)
# wraps the hex string into multiple lines
wrap_string(VARIABLE HEX_STRING AT_COLUMN 80)
# adds '0x' prefix and comma suffix before and after every byte respectively
string(REGEX REPLACE "([0-9a-f][0-9a-f])" "0x\\1, " ARRAY_VALUES ${HEX_STRING})
# removes trailing comma
string(REGEX REPLACE ", $" "" ARRAY_VALUES ${ARRAY_VALUES})
file(WRITE "${OUT_FILE}" "
extern const char _binary_${SYMBOL}_start[] = { ${ARRAY_VALUES} };
extern const char* _binary_${SYMBOL}_end = _binary_${SYMBOL}_start + sizeof(_binary_${SYMBOL}_start);
\n")
endif()
endforeach() endforeach()
endfunction() endfunction()
function(add_embed_library EMBED_NAME) function(add_embed_library EMBED_NAME)
set(options)
set(oneValueArgs RELATIVE)
set(multiValueArgs)
cmake_parse_arguments(PARSE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/embed) file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/embed)
file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/embed/${EMBED_NAME}) file(MAKE_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/embed/${EMBED_NAME})
set(EMBED_DIR ${CMAKE_CURRENT_BINARY_DIR}/embed/${EMBED_NAME}) set(EMBED_DIR ${CMAKE_CURRENT_BINARY_DIR}/embed/${EMBED_NAME})
...@@ -110,15 +176,26 @@ function(add_embed_library EMBED_NAME) ...@@ -110,15 +176,26 @@ function(add_embed_library EMBED_NAME)
set(OUTPUT_FILES) set(OUTPUT_FILES)
set(SYMBOLS) set(SYMBOLS)
message(STATUS "Embedding files") message(STATUS "Embedding files")
foreach(FILE ${ARGN}) foreach(FILE ${PARSE_UNPARSED_ARGUMENTS})
embed_file(OUTPUT_FILE OUTPUT_SYMBOL ${FILE}) embed_file(OUTPUT_FILE OUTPUT_SYMBOL ${FILE})
list(APPEND OUTPUT_FILES ${OUTPUT_FILE}) list(APPEND OUTPUT_FILES ${OUTPUT_FILE})
list(APPEND SYMBOLS ${OUTPUT_SYMBOL}) list(APPEND SYMBOLS ${OUTPUT_SYMBOL})
endforeach() endforeach()
message(STATUS "Generating embedding library ${EMBED_NAME}") message(STATUS "Generating embedding library ${EMBED_NAME}")
generate_embed_source(${EMBED_NAME} SRC ${SRC_FILE} HEADER ${HEADER_FILE} OBJECTS ${OUTPUT_FILES} SYMBOLS ${SYMBOLS}) generate_embed_source(${EMBED_NAME} SRC ${SRC_FILE} HEADER ${HEADER_FILE} OBJECTS ${OUTPUT_FILES} SYMBOLS ${SYMBOLS} RELATIVE ${PARSE_RELATIVE} FILES ${PARSE_UNPARSED_ARGUMENTS})
add_library(${EMBED_NAME} STATIC ${OUTPUT_FILES} "${SRC_FILE}")
target_include_directories(${EMBED_NAME} PUBLIC "${EMBED_DIR}/include") set(INTERNAL_EMBED_LIB embed_lib_${EMBED_NAME})
target_compile_options(${EMBED_NAME} PRIVATE -Wno-reserved-identifier) add_library(${INTERNAL_EMBED_LIB} OBJECT "${SRC_FILE}")
set_target_properties(${EMBED_NAME} PROPERTIES POSITION_INDEPENDENT_CODE On) target_include_directories(${INTERNAL_EMBED_LIB} PRIVATE "${EMBED_DIR}/include")
target_compile_options(${INTERNAL_EMBED_LIB} PRIVATE -Wno-reserved-identifier -Wno-extern-initializer -Wno-missing-variable-declarations)
set_target_properties(${INTERNAL_EMBED_LIB} PROPERTIES POSITION_INDEPENDENT_CODE On)
add_library(${EMBED_NAME} INTERFACE)
if(EMBED_USE_LD)
target_sources(${EMBED_NAME} INTERFACE ${OUTPUT_FILES})
else()
target_sources(${INTERNAL_EMBED_LIB} PRIVATE ${OUTPUT_FILES})
endif()
target_sources(${EMBED_NAME} INTERFACE $<TARGET_OBJECTS:${INTERNAL_EMBED_LIB}>)
target_include_directories(${EMBED_NAME} INTERFACE "${EMBED_DIR}/include")
endfunction() endfunction()
#####################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#####################################################################################
if(COMMAND migraphx_generate_export_header)
return()
endif()
include(GenerateExportHeader)
function(migraphx_generate_export_header TARGET)
set(options)
set(oneValueArgs DIRECTORY)
set(multiValueArgs)
cmake_parse_arguments(PARSE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(PARSE_DIRECTORY)
set(__directory ${PARSE_DIRECTORY})
else()
string(REPLACE "_" "/" __directory ${TARGET})
string(TOLOWER ${__directory} __directory)
endif()
set(__file_name ${CMAKE_CURRENT_BINARY_DIR}/include/${__directory}/export.h)
generate_export_header(${TARGET} EXPORT_FILE_NAME ${__file_name})
target_include_directories(${TARGET} PUBLIC $<BUILD_INTERFACE:${CMAKE_CURRENT_BINARY_DIR}/include>)
rocm_install(FILES ${__file_name} DESTINATION include/${__directory})
endfunction()
...@@ -38,12 +38,22 @@ macro(find_python version) ...@@ -38,12 +38,22 @@ macro(find_python version)
find_program(PYTHON_CONFIG_${version} python${version}-config) find_program(PYTHON_CONFIG_${version} python${version}-config)
if(EXISTS ${PYTHON_CONFIG_${version}}) if(EXISTS ${PYTHON_CONFIG_${version}})
py_exec(COMMAND ${PYTHON_CONFIG_${version}} --includes OUTPUT_VARIABLE _python_include_args) py_exec(COMMAND ${PYTHON_CONFIG_${version}} --includes OUTPUT_VARIABLE _python_include_args)
execute_process(COMMAND ${PYTHON_CONFIG_${version}} --ldflags --embed OUTPUT_VARIABLE _python_ldflags_args RESULT_VARIABLE _python_ldflags_result)
if(NOT _python_ldflags_result EQUAL 0)
py_exec(COMMAND ${PYTHON_CONFIG_${version}} --ldflags OUTPUT_VARIABLE _python_ldflags_args)
endif()
separate_arguments(_python_includes UNIX_COMMAND "${_python_include_args}") separate_arguments(_python_includes UNIX_COMMAND "${_python_include_args}")
separate_arguments(_python_ldflags UNIX_COMMAND "${_python_ldflags_args}")
string(REPLACE "-I" "" _python_includes "${_python_includes}") string(REPLACE "-I" "" _python_includes "${_python_includes}")
add_library(python${version}::headers INTERFACE IMPORTED GLOBAL) add_library(python${version}::headers INTERFACE IMPORTED GLOBAL)
set_target_properties(python${version}::headers PROPERTIES set_target_properties(python${version}::headers PROPERTIES
INTERFACE_INCLUDE_DIRECTORIES "${_python_includes}" INTERFACE_INCLUDE_DIRECTORIES "${_python_includes}"
) )
add_library(python${version}::runtime INTERFACE IMPORTED GLOBAL)
set_target_properties(python${version}::runtime PROPERTIES
INTERFACE_LINK_OPTIONS "${_python_ldflags}"
INTERFACE_LINK_LIBRARIES python${version}::headers
)
py_exec(COMMAND ${PYTHON_CONFIG_${version}} --prefix OUTPUT_VARIABLE _python_prefix) py_exec(COMMAND ${PYTHON_CONFIG_${version}} --prefix OUTPUT_VARIABLE _python_prefix)
string(STRIP "${_python_prefix}" _python_prefix) string(STRIP "${_python_prefix}" _python_prefix)
set(PYTHON_${version}_EXECUTABLE "${_python_prefix}/bin/python${version}" CACHE PATH "") set(PYTHON_${version}_EXECUTABLE "${_python_prefix}/bin/python${version}" CACHE PATH "")
......
...@@ -107,6 +107,24 @@ ...@@ -107,6 +107,24 @@
<summary>Use make_shared or make_unique instead of new</summary> <summary>Use make_shared or make_unique instead of new</summary>
</message> </message>
</rule> </rule>
<rule>
<tokenlist>raw</tokenlist>
<pattern><![CDATA[ [^\(,;{}:]+ \w+ && (\w+|\(|\+|\-|\*)]]></pattern>
<message>
<id>UseNamedLogicOperator</id>
<severity>style</severity>
<summary>Use 'and' instead of &&</summary>
</message>
</rule>
<rule>
<tokenlist>raw</tokenlist>
<pattern><![CDATA[ (if|while) \([^\)&]+&& ]]></pattern>
<message>
<id>UseNamedLogicOperator</id>
<severity>style</severity>
<summary>Use 'and' instead of &&</summary>
</message>
</rule>
<rule> <rule>
<tokenlist>raw</tokenlist> <tokenlist>raw</tokenlist>
<pattern><![CDATA[ \|\| ]]></pattern> <pattern><![CDATA[ \|\| ]]></pattern>
......
...@@ -25,6 +25,6 @@ ROCmSoftwarePlatform/rocm-recipes ...@@ -25,6 +25,6 @@ ROCmSoftwarePlatform/rocm-recipes
facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cmake facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cmake
ccache@v4.1 -DENABLE_TESTING=OFF ccache@v4.1 -DENABLE_TESTING=OFF
pcre,pfultz2/pcre@8.45 -H sha256:d6f7182602a775a7d500a0cedca6449af0400c6493951513046d17615ed0bf11 pcre,pfultz2/pcre@8.45 -H sha256:d6f7182602a775a7d500a0cedca6449af0400c6493951513046d17615ed0bf11
danmar/cppcheck@2.9 -DHAVE_RULES=1 danmar/cppcheck@bb2711c22a0be09efe7f1a8da3030876471026c8 -DHAVE_RULES=1 # 2.11
RadeonOpenCompute/rocm-cmake@027404a8326da6e7e9338e0b81f9428660190724 --build RadeonOpenCompute/rocm-cmake@189d497ed185683154ae9766393b9a10ff21201f --build
-f requirements.txt -f requirements.txt
#####################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#####################################################################################
# -*- coding: utf-8 -*-
#
# MIGraphX documentation build configuration file, created by
# sphinx-quickstart on Thu Jul 19 11:38:13 2018.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
from datetime import date
import re
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'breathe', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinx_rtd_theme',
'sphinx.ext.autosectionlabel'
]
autosectionlabel_prefix_document = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MIGraphX'
copyright = u'2018-{}, AMD'.format(date.today().year)
author = u'AMD'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
with open('../../CMakeLists.txt') as file:
version = next((re.findall('[0-9.]+', line)[0]
for line in file.readlines()
if 'rocm_setup_version' in line))
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = []
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'cpp'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'MIGraphXdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'MIGraphX.tex', u'MIGraphX Documentation', u'AMD', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, 'migraphx', u'MIGraphX Documentation', [author], 1)]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'MIGraphX', u'MIGraphX Documentation', author, 'MIGraphX',
'One line description of project.', 'Miscellaneous'),
]
breathe_default_members = ('members', 'undoc-members')
cpp_index_common_prefix = ['migraphx::']
default_role = 'any'
primary_domain = 'cpp'
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment