Commit 32b83c9c authored by Khalique Ahmed's avatar Khalique Ahmed
Browse files

Merge branch 'develop' of https://github.com/ROCmSoftwarePlatform/AMDMIGraphX into inner_bcast_fix

parents 92f5a6cd 434a06cf
...@@ -8,6 +8,12 @@ on: ...@@ -8,6 +8,12 @@ on:
- master - master
- 'release/**' - 'release/**'
env:
DOCKER_USER: ${{secrets.DOCKERHUB_USERID}}
DOCKER_TOKEN: ${{secrets.DOCKERHUB_TOKEN}}
DOCKER_IMAGE_UBUNTU: "rocm/migraphx-ci-ubuntu"
DOCKER_IMAGE_SLES: "rocm/migraphx-ci-sles"
jobs: jobs:
cancel: cancel:
...@@ -17,22 +23,102 @@ jobs: ...@@ -17,22 +23,102 @@ jobs:
uses: styfle/cancel-workflow-action@0.11.0 uses: styfle/cancel-workflow-action@0.11.0
with: with:
access_token: ${{ github.token }} access_token: ${{ github.token }}
tidy:
check_image:
name: Check if image exists in registry
runs-on: ubuntu-latest
outputs:
imageexists: ${{ steps.check_image.outputs.imageexists }}
imagetag: ${{ steps.image_hash.outputs.imagetag }}
imageexists_sles: ${{ steps.check_image.outputs.imageexists_sles }}
imagetag_sles: ${{ steps.image_hash.outputs.imagetag_sles }}
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Create Image Tag
id: image_hash
run: |
echo "imagetag=hip-clang-${{hashFiles('**/hip-clang.docker', '**/*requirements.txt', '**/install_prereqs.sh', '**/rbuild.ini')}}" >> $GITHUB_OUTPUT
echo "imagetag_sles=hip-clang-${{hashFiles('**/tools/docker/sles.docker', '**/*requirements.txt', '**/install_prereqs.sh', '**/rbuild.ini')}}" >> $GITHUB_OUTPUT
- name: Check if image is built already
id: check_image
env:
DOCKER_TAG_UBUNTU: ${{ steps.image_hash.outputs.imagetag }}
DOCKER_TAG_SLES: ${{ steps.image_hash.outputs.imagetag_sles }}
run: |
if [[ "$(docker manifest inspect $DOCKER_IMAGE_UBUNTU:$DOCKER_TAG_UBUNTU 2> /dev/null)" != "" ]]; then
echo "imageexists=true" >> $GITHUB_OUTPUT
echo "Image already exists, skip building available"
else
echo "imageexists=false" >> $GITHUB_OUTPUT
echo "Tag does not exist, build and publishing required"
fi
if [[ "$(docker manifest inspect $DOCKER_IMAGE_SLES:$DOCKER_TAG_SLES 2> /dev/null)" != "" ]]; then
echo "imageexists_sles=true" >> $GITHUB_OUTPUT
echo "SLES Image already exists, skip building available"
else
echo "imageexists_sles=false" >> $GITHUB_OUTPUT
echo "SLES Tag does not exist, build and publishing required"
fi
build_image:
name: Build image
runs-on: ROCM-Ubuntu runs-on: ROCM-Ubuntu
needs: check_image
if: ${{ needs.check_image.outputs.imageexists != 'true' }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
# In this step, this action saves a list of existing images, - name: Build and publish
# the cache is created without them in the post run. env:
# It also restores the cache if it exists. DOCKER_TAG_UBUNTU: ${{ needs.check_image.outputs.imagetag }}
- name: Docker layer cache run: |
uses: jpribyl/action-docker-layer-caching@v0.1.1 # The TOKEN and USERID are github secrets, Action failures at this step
with: # can come from a PR from a fork changing a file which forces a rebuild
key: docker-layer-caching-migraphx-${{hashFiles('hip-clang.docker', '**/*requirements.txt', '**/install_prereqs.sh', 'rbuild.ini')}} # Resolve by making an internal PR of the Forked PR
restore-keys: echo $DOCKER_TOKEN | docker login -u $DOCKER_USER --password-stdin
docker-layer-caching-migraphx-
# Ignore the failure of a step and avoid terminating the job. docker pull $DOCKER_IMAGE_UBUNTU:latest || true
continue-on-error: true docker build . --file hip-clang.docker --cache-from $DOCKER_IMAGE_UBUNTU:latest --tag $DOCKER_IMAGE_UBUNTU:$DOCKER_TAG_UBUNTU --tag $DOCKER_IMAGE_UBUNTU:latest;
docker push $DOCKER_IMAGE_UBUNTU:$DOCKER_TAG_UBUNTU;
docker push $DOCKER_IMAGE_UBUNTU:latest;
build_SLES_image:
name: Build SLES image
runs-on: ROCM-Ubuntu
needs: check_image
if: ${{ needs.check_image.outputs.imageexists_sles != 'true' }}
steps:
- uses: actions/checkout@v3
- name: Build and publish SLES
env:
DOCKER_TAG_SLES: ${{ needs.check_image.outputs.imagetag_sles }}
run: |
# The TOKEN and USERID are github secrets, Action failures at this step
# can come from a PR from a fork changing a file wichi forces a rebuild
# Resolve by making an internal PR of the Forked PR
echo $DOCKER_TOKEN | docker login -u $DOCKER_USER --password-stdin
docker pull $DOCKER_IMAGE_SLES:latest || true
docker build . --file ./tools/docker/sles.docker --cache-from $DOCKER_IMAGE_SLES:latest --tag $DOCKER_IMAGE_SLES:$DOCKER_TAG_SLES --tag $DOCKER_IMAGE_SLES:latest;
docker push $DOCKER_IMAGE_SLES:$DOCKER_TAG_SLES;
docker push $DOCKER_IMAGE_SLES:latest;
tidy:
runs-on: ROCM-Ubuntu
needs: [ build_image, check_image ]
env:
DOCKER_TAG_UBUNTU: ${{ needs.check_image.outputs.imagetag }}
if: ${{ !cancelled() && (needs.build_image.result == 'success' || needs.build_image.result == 'skipped') }}
steps:
- uses: actions/checkout@v3
- name: Restore cache files for tidy - name: Restore cache files for tidy
uses: actions/cache/restore@v3 uses: actions/cache/restore@v3
...@@ -42,12 +128,8 @@ jobs: ...@@ -42,12 +128,8 @@ jobs:
key: tidy-cache-${{ github.ref }} key: tidy-cache-${{ github.ref }}
restore-keys: tidy-cache- restore-keys: tidy-cache-
- name: Build the Docker image - name: Clang Tidy
run: | shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data $DOCKER_IMAGE_UBUNTU:$DOCKER_TAG_UBUNTU bash < {0}"
docker build . --file hip-clang.docker --tag migraphx
- name: Clang tidy
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}"
run: | run: |
mkdir build mkdir build
cd build cd build
...@@ -55,6 +137,7 @@ jobs: ...@@ -55,6 +137,7 @@ jobs:
-DMIGRAPHX_ENABLE_GPU=On \ -DMIGRAPHX_ENABLE_GPU=On \
-DMIGRAPHX_ENABLE_CPU=On \ -DMIGRAPHX_ENABLE_CPU=On \
-DMIGRAPHX_ENABLE_FPGA=On \ -DMIGRAPHX_ENABLE_FPGA=On \
-DMIGRAPHX_ENABLE_MLIR=On \
-DBUILD_DEV=On \ -DBUILD_DEV=On \
-DROCM_ENABLE_GH_ANNOTATIONS=On \ -DROCM_ENABLE_GH_ANNOTATIONS=On \
-DCLANG_TIDY_DEPEND_ON_TARGET=Off \ -DCLANG_TIDY_DEPEND_ON_TARGET=Off \
...@@ -64,6 +147,7 @@ jobs: ...@@ -64,6 +147,7 @@ jobs:
# GH actions can not update existing cache, as a workaround clear cache and then save it # GH actions can not update existing cache, as a workaround clear cache and then save it
- name: Clear tidy cache before saving - name: Clear tidy cache before saving
continue-on-error: true
if: ${{ steps.tidy_restore.outputs.cache-hit }} if: ${{ steps.tidy_restore.outputs.cache-hit }}
shell: bash shell: bash
env: env:
...@@ -71,7 +155,6 @@ jobs: ...@@ -71,7 +155,6 @@ jobs:
run: | run: |
gh extension install actions/gh-actions-cache --pin v1.0.1 gh extension install actions/gh-actions-cache --pin v1.0.1
gh actions-cache delete ${{ steps.tidy_restore.outputs.cache-matched-key }} --confirm gh actions-cache delete ${{ steps.tidy_restore.outputs.cache-matched-key }} --confirm
continue-on-error: true
- name: Save cache files for tidy - name: Save cache files for tidy
uses: actions/cache/save@v3 uses: actions/cache/save@v3
...@@ -83,21 +166,14 @@ jobs: ...@@ -83,21 +166,14 @@ jobs:
cppcheck: cppcheck:
runs-on: ROCM-Ubuntu runs-on: ROCM-Ubuntu
needs: [ build_image, check_image ]
env:
DOCKER_TAG_UBUNTU: ${{ needs.check_image.outputs.imagetag }}
if: ${{ !cancelled() && (needs.build_image.result == 'success' || needs.build_image.result == 'skipped') }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
# In this step, this action saves a list of existing images,
# the cache is created without them in the post run.
# It also restores the cache if it exists.
- name: Docker layer cache
uses: jpribyl/action-docker-layer-caching@v0.1.1
with:
key: docker-layer-caching-migraphx-${{hashFiles('hip-clang.docker', '**/*requirements.txt', '**/install_prereqs.sh', 'rbuild.ini')}}
restore-keys:
docker-layer-caching-migraphx-
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
- name: Restore cache files for cppcheck - name: Restore cache files for cppcheck
id: cppcheck_restore id: cppcheck_restore
uses: actions/cache/restore@v3 uses: actions/cache/restore@v3
...@@ -106,11 +182,8 @@ jobs: ...@@ -106,11 +182,8 @@ jobs:
key: cppcheck-cache-${{ hashFiles('cppcheck.rules', 'CMakeLists.txt') }}-${{ github.ref }} key: cppcheck-cache-${{ hashFiles('cppcheck.rules', 'CMakeLists.txt') }}-${{ github.ref }}
restore-keys: cppcheck-cache-${{ hashFiles('cppcheck.rules', 'CMakeLists.txt') }}- restore-keys: cppcheck-cache-${{ hashFiles('cppcheck.rules', 'CMakeLists.txt') }}-
- name: Build the Docker image
run: docker build . --file hip-clang.docker --tag migraphx
- name: Cppcheck - name: Cppcheck
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}" shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data $DOCKER_IMAGE_UBUNTU:$DOCKER_TAG_UBUNTU bash < {0}"
run: | run: |
mkdir build mkdir build
cd build cd build
...@@ -123,6 +196,7 @@ jobs: ...@@ -123,6 +196,7 @@ jobs:
# GH actions can not update existing cache, as a workaround clear cache and then save it # GH actions can not update existing cache, as a workaround clear cache and then save it
- name: Clear cppcheck cache before saving - name: Clear cppcheck cache before saving
continue-on-error: true
if: ${{ steps.cppcheck_restore.outputs.cache-hit }} if: ${{ steps.cppcheck_restore.outputs.cache-hit }}
shell: bash shell: bash
env: env:
...@@ -130,7 +204,6 @@ jobs: ...@@ -130,7 +204,6 @@ jobs:
run: | run: |
gh extension install actions/gh-actions-cache --pin v1.0.1 gh extension install actions/gh-actions-cache --pin v1.0.1
gh actions-cache delete ${{ steps.cppcheck_restore.outputs.cache-matched-key }} --confirm gh actions-cache delete ${{ steps.cppcheck_restore.outputs.cache-matched-key }} --confirm
continue-on-error: true
- name: Save cache files for cppcheck - name: Save cache files for cppcheck
uses: actions/cache/save@v3 uses: actions/cache/save@v3
...@@ -141,34 +214,91 @@ jobs: ...@@ -141,34 +214,91 @@ jobs:
format: format:
runs-on: ROCM-Ubuntu runs-on: ubuntu-latest
needs: [ build_image, check_image ]
env:
DOCKER_TAG_UBUNTU: ${{ needs.check_image.outputs.imagetag }}
if: ${{ !cancelled() && (needs.build_image.result == 'success' || needs.build_image.result == 'skipped') }}
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
# In this step, this action saves a list of existing images, - name: Free space
# the cache is created without them in the post run. uses: jlumbroso/free-disk-space@main
# It also restores the cache if it exists.
- name: Docker layer cache
uses: jpribyl/action-docker-layer-caching@v0.1.1
with: with:
key: docker-layer-caching-migraphx-${{hashFiles('hip-clang.docker', '**/*requirements.txt', '**/install_prereqs.sh', 'rbuild.ini')}} tool-cache: true
restore-keys: android: true
docker-layer-caching-migraphx- dotnet: true
# Ignore the failure of a step and avoid terminating the job. haskell: true
continue-on-error: true large-packages: true
swap-storage: true
- name: Build the Docker image docker-images: true
run: docker build . --file hip-clang.docker --tag migraphx
- name: Check formatting - name: Check formatting
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data migraphx bash < {0}" shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data $DOCKER_IMAGE_UBUNTU:$DOCKER_TAG_UBUNTU bash < {0}"
run: | run: |
set -e set -e
git config --global --add safe.directory /data git config --global --add safe.directory /data
python3 tools/format.py origin/${{ github.event_name == 'pull_request' && github.base_ref || 'develop' }} python3 tools/format.py origin/${{ github.event_name == 'pull_request' && github.base_ref || 'develop' }}
sles:
runs-on: ROCM-Ubuntu
needs: [ build_SLES_image, check_image ]
env:
DOCKER_TAG_SLES: ${{ needs.check_image.outputs.imagetag_sles }}
if: ${{ !cancelled() && (needs.build_SLES_image.result == 'success' || needs.build_SLES_image.result == 'skipped') }}
steps:
- uses: actions/checkout@v3
with:
fetch-depth: 0
- name: Restore cache files for ccache
uses: actions/cache/restore@v3
id: ccache_restore
with:
path: ${{ github.workspace }}/ccache
key: ccache-sles-${{ github.ref }}
restore-keys: ccache-sles-
- name: Build migraphx
shell: bash -c "docker run -i -v=$GITHUB_WORKSPACE:/data -w /data $DOCKER_IMAGE_SLES:$DOCKER_TAG_SLES bash < {0}"
run: |
set -e
export CCACHE_COMPRESSLEVEL=10
export CCACHE_DIR=/data/ccache
export CCACHE_NOHASHDIR=true
export CCACHE_BASEDIR=/data
export CCACHE_MAXSIZE=1
mkdir build
cd build
CXX=/opt/rocm/llvm/bin/clang++ CC=/opt/rocm/llvm/bin/clang cmake \
-DMIGRAPHX_DISABLE_LARGE_BUFFER_TESTS=On \
-DBUILD_DEV=On \
-DCMAKE_CXX_COMPILER_LAUNCHER=/usr/local/bin/ccache \
-DCMAKE_C_COMPILER_LAUNCHER=/usr/local/bin/ccache \
..
make -j$(nproc) tests driver
- name: Clear ccache cache before saving
continue-on-error: true
if: ${{ steps.ccache_restore.outputs.cache-hit }}
shell: bash
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
gh extension install actions/gh-actions-cache --pin v1.0.1
gh actions-cache delete ${{ steps.ccache_restore.outputs.cache-matched-key }} --confirm
- name: Save cache files for ccache
uses: actions/cache/save@v3
if: always()
with:
path: ${{ github.workspace }}/ccache
key: ccache-sles-${{ github.ref }}
pyflakes: pyflakes:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
...@@ -274,11 +404,10 @@ jobs: ...@@ -274,11 +404,10 @@ jobs:
# This path is specific to Ubuntu # This path is specific to Ubuntu
path: ${{ github.workspace }}/cget path: ${{ github.workspace }}/cget
# Look to see if there is a cache hit for the corresponding requirements file # Look to see if there is a cache hit for the corresponding requirements file
key: ${{ matrix.os }}-cget-4-${{ hashFiles('requirements.txt', 'dev-requirements.txt') }} key: ${{ matrix.os }}-cget-4-${{ hashFiles('requirements.txt', 'dev-requirements.txt', 'rbuild.ini') }}
restore-keys: ${{ matrix.os }}-cget-4- restore-keys: ${{ matrix.os }}-cget-4-
- name: Install dependencies - name: Install dependencies
if: steps.deps_cache.outputs.cache-hit != 'true'
run: rbuild prepare -d cget -s gh run: rbuild prepare -d cget -s gh
- name: Restore cache files for ccache - name: Restore cache files for ccache
...@@ -300,6 +429,7 @@ jobs: ...@@ -300,6 +429,7 @@ jobs:
rbuild build -d cget -s gh -T check \ rbuild build -d cget -s gh -T check \
-DCMAKE_BUILD_TYPE=${{matrix.configuration}} \ -DCMAKE_BUILD_TYPE=${{matrix.configuration}} \
-DMIGRAPHX_ENABLE_PYTHON=${{matrix.configuration == 'release' && 'On' || 'Off'}} \ -DMIGRAPHX_ENABLE_PYTHON=${{matrix.configuration == 'release' && 'On' || 'Off'}} \
-DMIGRAPHX_DISABLE_LARGE_BUFFER_TESTS=On \
-DBUILD_DEV=On \ -DBUILD_DEV=On \
-DCMAKE_CXX_FLAGS_DEBUG="-g1 -Os -fdebug-prefix-map=$PWD=. -fdebug-types-section -fno-omit-frame-pointer -fsanitize=undefined -fno-sanitize-recover=undefined" \ -DCMAKE_CXX_FLAGS_DEBUG="-g1 -Os -fdebug-prefix-map=$PWD=. -fdebug-types-section -fno-omit-frame-pointer -fsanitize=undefined -fno-sanitize-recover=undefined" \
-DCMAKE_CXX_FLAGS_CODECOV="-g1 -Og -fdebug-prefix-map=$PWD=. -fdebug-types-section -fprofile-arcs -ftest-coverage -fno-omit-frame-pointer" \ -DCMAKE_CXX_FLAGS_CODECOV="-g1 -Og -fdebug-prefix-map=$PWD=. -fdebug-types-section -fprofile-arcs -ftest-coverage -fno-omit-frame-pointer" \
...@@ -309,12 +439,12 @@ jobs: ...@@ -309,12 +439,12 @@ jobs:
# GH actions can not update existing cache, as a workaround clear cache and then save it # GH actions can not update existing cache, as a workaround clear cache and then save it
- name: Clear ccache cache before saving - name: Clear ccache cache before saving
continue-on-error: true
if: ${{ steps.ccache_restore.outputs.cache-hit }} if: ${{ steps.ccache_restore.outputs.cache-hit }}
shell: bash shell: bash
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
set +x
gh extension install actions/gh-actions-cache --pin v1.0.1 gh extension install actions/gh-actions-cache --pin v1.0.1
gh actions-cache delete ${{ steps.ccache_restore.outputs.cache-matched-key }} --confirm gh actions-cache delete ${{ steps.ccache_restore.outputs.cache-matched-key }} --confirm
...@@ -416,6 +546,7 @@ jobs: ...@@ -416,6 +546,7 @@ jobs:
rbuild build -d cget -s gh -T check \ rbuild build -d cget -s gh -T check \
-DCMAKE_BUILD_TYPE=${{matrix.configuration}} \ -DCMAKE_BUILD_TYPE=${{matrix.configuration}} \
-DMIGRAPHX_ENABLE_PYTHON=${{matrix.configuration == 'release' && 'On' || 'Off'}} \ -DMIGRAPHX_ENABLE_PYTHON=${{matrix.configuration == 'release' && 'On' || 'Off'}} \
-DMIGRAPHX_DISABLE_LARGE_BUFFER_TESTS=On \
-DBUILD_DEV=On \ -DBUILD_DEV=On \
-DCMAKE_CXX_FLAGS_DEBUG="-g1 -Os -fdebug-prefix-map=$PWD=. -fdebug-types-section -fno-omit-frame-pointer -fsanitize=undefined -fno-sanitize-recover=undefined" \ -DCMAKE_CXX_FLAGS_DEBUG="-g1 -Os -fdebug-prefix-map=$PWD=. -fdebug-types-section -fno-omit-frame-pointer -fsanitize=undefined -fno-sanitize-recover=undefined" \
-DCMAKE_CXX_FLAGS_CODECOV="-g1 -Og -fdebug-prefix-map=$PWD=. -fdebug-types-section -fprofile-arcs -ftest-coverage -fno-omit-frame-pointer" \ -DCMAKE_CXX_FLAGS_CODECOV="-g1 -Og -fdebug-prefix-map=$PWD=. -fdebug-types-section -fprofile-arcs -ftest-coverage -fno-omit-frame-pointer" \
...@@ -426,15 +557,14 @@ jobs: ...@@ -426,15 +557,14 @@ jobs:
# this is a workaround, with GH actions can not update existing cache # this is a workaround, with GH actions can not update existing cache
- name: Clear ccache cache before saving - name: Clear ccache cache before saving
continue-on-error: true
if: ${{ steps.ccache_restore_fpga.outputs.cache-hit }} if: ${{ steps.ccache_restore_fpga.outputs.cache-hit }}
shell: bash shell: bash
env: env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: | run: |
set +x
gh extension install actions/gh-actions-cache gh extension install actions/gh-actions-cache
gh actions-cache delete ${{ steps.ccache_restore_fpga.outputs.cache-matched-key }} --confirm gh actions-cache delete ${{ steps.ccache_restore_fpga.outputs.cache-matched-key }} --confirm
continue-on-error: true
- name: Save cache files for ccache - name: Save cache files for ccache
uses: actions/cache/save@v3 uses: actions/cache/save@v3
......
...@@ -47,6 +47,7 @@ jobs: ...@@ -47,6 +47,7 @@ jobs:
onnxruntime onnxruntime
dependancies dependancies
automated automated
skip bot checks
assignees: TedThemistokleous assignees: TedThemistokleous
reviewers: TedThemistokleous causten reviewers: TedThemistokleous causten
draft: false draft: false
......
...@@ -9,6 +9,10 @@ sphinx: ...@@ -9,6 +9,10 @@ sphinx:
formats: [htmlzip] formats: [htmlzip]
python: python:
version: "3.8"
install: install:
- requirements: docs/.sphinx/requirements.txt - requirements: docs/.sphinx/requirements.txt
build:
os: ubuntu-20.04
tools:
python: "3.8"
...@@ -2,8 +2,49 @@ ...@@ -2,8 +2,49 @@
Full documentation for MIGraphX is available at [MIGraphX Documentation](https://rocmdocs.amd.com/projects/AMDMIGraphX/en/latest/). Full documentation for MIGraphX is available at [MIGraphX Documentation](https://rocmdocs.amd.com/projects/AMDMIGraphX/en/latest/).
## MIGraphX 2.5 for ROCm 5.5.0 ## MIGraphX 2.7 for ROCm 5.7.0
### Added
- Enabled hipRTC to not require dev packages for migraphx runtime and allow the ROCm install to be in a different directory than it was during build time
- Add support for multi-target execution
- Added Dynamic Batch support with C++/Python APIs
- Add migraphx.create_argument to python API
- Added dockerfile example for Ubuntu 22.04
- Add TensorFlow supported ops in driver similar to exist onnx operator list
- Add a MIGRAPHX_TRACE_MATCHES_FOR env variable to filter the matcher trace
- Improved debugging by printing max,min,mean and stddev values for TRACE_EVAL = 2
- use fast_math flag instead of ENV flag for GELU
- Print message from driver if offload copy is set for compiled program
### Optimizations
- Optimized for ONNX Runtime 1.14.0
- Improved compile times by only building for the GPU on the system
- Improve performance of pointwise/reduction kernels when using NHWC layouts
- Load specific version of the migraphx_py library
- Annotate functions with the block size so the compiler can do a better job of optimizing
- Enable reshape on nonstandard shapes
- Use half HIP APIs to compute max and min
- Added support for broadcasted scalars to unsqueeze operator
- Improved multiplies with dot operator
- Handle broadcasts across dot and concat
- Add verify namespace for better symbol resolution
### Fixed
- Resolved accuracy issues with FP16 resnet50
- Update cpp generator to handle inf from float
- Fix assertion error during verify and make DCE work with tuples
- Fix convert operation for NaNs
- Fix shape typo in API test
- Fix compile warnings for shadowing variable names
- Add missing specialization for the `nullptr` for the hash function
### Changed
- Bumped version of half library to 5.6.0
- Bumped CI to support rocm 5.6
- Make building tests optional
- replace np.bool with bool as per numpy request
### Removed
- Removed int8x4 rocBlas calls due to deprecation
- removed std::reduce usage since not all OS' support it
## MIGraphX 2.5 for ROCm 5.5.0
### Added ### Added
- Y-Model feature to store tuning information with the optimized model - Y-Model feature to store tuning information with the optimized model
- Added Python 3.10 bindings - Added Python 3.10 bindings
...@@ -12,15 +53,11 @@ Full documentation for MIGraphX is available at [MIGraphX Documentation](https:/ ...@@ -12,15 +53,11 @@ Full documentation for MIGraphX is available at [MIGraphX Documentation](https:/
- Build support for ROCm MLIR - Build support for ROCm MLIR
- Added migraphx-driver flag to print optimizations in python (--python) - Added migraphx-driver flag to print optimizations in python (--python)
- Added JIT implementation of the Gather and Pad operator which results in better handling of larger tensor sizes. - Added JIT implementation of the Gather and Pad operator which results in better handling of larger tensor sizes.
### Optimizations ### Optimizations
- Improved performance of Transformer based models - Improved performance of Transformer based models
- Improved performance of the Pad, Concat, Gather, and Pointwise operators - Improved performance of the Pad, Concat, Gather, and Pointwise operators
- Improved onnx/pb file loading speed - Improved onnx/pb file loading speed
- Added general optimize pass which runs several passes such as simplify_reshapes/algebra and DCE in loop. - Added general optimize pass which runs several passes such as simplify_reshapes/algebra and DCE in loop.
### Fixed ### Fixed
- Improved parsing Tensorflow Protobuf files - Improved parsing Tensorflow Protobuf files
- Resolved various accuracy issues with some onnx models - Resolved various accuracy issues with some onnx models
...@@ -29,6 +66,5 @@ Full documentation for MIGraphX is available at [MIGraphX Documentation](https:/ ...@@ -29,6 +66,5 @@ Full documentation for MIGraphX is available at [MIGraphX Documentation](https:/
- Use --offload-arch instead of --cuda-gpu-arch for the HIP compiler - Use --offload-arch instead of --cuda-gpu-arch for the HIP compiler
- Changes inside JIT to use float accumulator for large reduce ops of half type to avoid overflow. - Changes inside JIT to use float accumulator for large reduce ops of half type to avoid overflow.
- Changes inside JIT to temporarily use cosine to compute sine function. - Changes inside JIT to temporarily use cosine to compute sine function.
### Changed ### Changed
- Changed version/location of 3rd party build dependencies to pick up fixes - Changed version/location of 3rd party build dependencies to pick up fixes
...@@ -51,6 +51,7 @@ project(migraphx LANGUAGES C CXX) ...@@ -51,6 +51,7 @@ project(migraphx LANGUAGES C CXX)
include(CTest) include(CTest)
find_package(ROCM REQUIRED) find_package(ROCM REQUIRED)
find_package(Threads REQUIRED)
find_path(HALF_INCLUDE_DIR half.hpp PATH_SUFFIXES half) find_path(HALF_INCLUDE_DIR half.hpp PATH_SUFFIXES half)
if (NOT HALF_INCLUDE_DIR) if (NOT HALF_INCLUDE_DIR)
...@@ -227,14 +228,12 @@ rocm_enable_cppcheck( ...@@ -227,14 +228,12 @@ rocm_enable_cppcheck(
shadowVar shadowVar
shadowVariable shadowVariable
unsafeClassDivZero unsafeClassDivZero
# Disable because of too many FPs
arithOperationsOnVoidPointer
definePrefix:*test/include/test.hpp definePrefix:*test/include/test.hpp
ctuOneDefinitionRuleViolation:*test/* ctuOneDefinitionRuleViolation:*test/*
useSmartPointer:*src/api/api.cpp useSmartPointer:*src/api/api.cpp
useSmartPointer:*make_shared_array.hpp useSmartPointer:*make_shared_array.hpp
constParameter:*src/targets/gpu/*.cpp
constParameter:*src/targets/gpu/*.hpp
# Suppress mlir_conv.cpp since this file will be deleted
*:*src/targets/gpu/mlir_conv.cpp
FORCE FORCE
INCONCLUSIVE INCONCLUSIVE
RULE_FILE RULE_FILE
...@@ -251,15 +250,19 @@ rocm_enable_cppcheck( ...@@ -251,15 +250,19 @@ rocm_enable_cppcheck(
${CMAKE_CURRENT_SOURCE_DIR}/src/targets/gpu/kernels/include ${CMAKE_CURRENT_SOURCE_DIR}/src/targets/gpu/kernels/include
${CMAKE_CURRENT_SOURCE_DIR}/test/include ${CMAKE_CURRENT_SOURCE_DIR}/test/include
DEFINE DEFINE
MIGRAPHX_MLIR=1
CPPCHECK=1 CPPCHECK=1
__device__= __device__=
__host__= __host__=
__global__= __global__=
UNDEFINE
MIGRAPHX_USE_CLANG_TIDY
) )
enable_testing() enable_testing()
include(ROCMCreatePackage) include(ROCMCreatePackage)
include(ROCMTest)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
...@@ -267,6 +270,7 @@ set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/bin) ...@@ -267,6 +270,7 @@ set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/bin)
add_subdirectory(src) add_subdirectory(src)
add_subdirectory(docs) add_subdirectory(docs)
if(BUILD_TESTING) if(BUILD_TESTING)
rocm_enable_test_package(migraphx)
add_subdirectory(test) add_subdirectory(test)
endif() endif()
add_subdirectory(tools) add_subdirectory(tools)
......
...@@ -77,6 +77,9 @@ ADD dev-requirements.txt /dev-requirements.txt ...@@ -77,6 +77,9 @@ ADD dev-requirements.txt /dev-requirements.txt
ADD requirements.txt /requirements.txt ADD requirements.txt /requirements.txt
ADD rbuild.ini /rbuild.ini ADD rbuild.ini /rbuild.ini
# Temporarily install a new cmake until switching to ubuntu 22.04
RUN pip3 install cmake==3.22.1
COPY ./tools/install_prereqs.sh / COPY ./tools/install_prereqs.sh /
RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh
RUN test -f /usr/local/hash || exit 1 RUN test -f /usr/local/hash || exit 1
...@@ -98,6 +101,9 @@ RUN cget -p $PREFIX install facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cma ...@@ -98,6 +101,9 @@ RUN cget -p $PREFIX install facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cma
RUN cget -p $PREFIX install ccache@v4.1 -DENABLE_TESTING=OFF RUN cget -p $PREFIX install ccache@v4.1 -DENABLE_TESTING=OFF
RUN cget -p /opt/cmake install kitware/cmake@v3.26.4 RUN cget -p /opt/cmake install kitware/cmake@v3.26.4
# Install MLIR
ADD mlir-requirements.txt /mlir-requirements.txt
RUN cget -p /usr/local install -f /mlir-requirements.txt
COPY ./test/onnx/.onnxrt-commit / COPY ./test/onnx/.onnxrt-commit /
...@@ -113,9 +119,6 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR ...@@ -113,9 +119,6 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR
ADD tools/build_and_test_onnxrt.sh /onnxruntime/build_and_test_onnxrt.sh ADD tools/build_and_test_onnxrt.sh /onnxruntime/build_and_test_onnxrt.sh
# Use the /opt/cmake install because LLVM/MLIR need cmake >= 3.20
RUN env PATH=/opt/cmake/bin:$PATH cget -p /usr/local install ROCmSoftwarePlatform/rocMLIR@1ad9d6df32acc6d29d58e8ed6710e36746d0a4d6 -DBUILD_FAT_LIBROCKCOMPILER=On
ENV MIOPEN_FIND_DB_PATH=/tmp/miopen/find-db ENV MIOPEN_FIND_DB_PATH=/tmp/miopen/find-db
ENV MIOPEN_USER_DB_PATH=/tmp/miopen/user-db ENV MIOPEN_USER_DB_PATH=/tmp/miopen/user-db
ENV LD_LIBRARY_PATH=$PREFIX/lib ENV LD_LIBRARY_PATH=$PREFIX/lib
......
...@@ -114,6 +114,10 @@ rocmtest clang_debug: rocmnode('cdna') { cmake_build -> ...@@ -114,6 +114,10 @@ rocmtest clang_debug: rocmnode('cdna') { cmake_build ->
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release") cmake_build(flags: "-DCMAKE_BUILD_TYPE=release")
stash includes: 'build/*.deb', name: 'migraphx-package' stash includes: 'build/*.deb', name: 'migraphx-package'
} }
// }, hidden_symbols: rocmnode('cdna') { cmake_build ->
// stage('Hidden symbols') {
// cmake_build(flags: "-DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_GPU=On -DMIGRAPHX_ENABLE_CPU=On -DCMAKE_CXX_VISIBILITY_PRESET=hidden -DCMAKE_C_VISIBILITY_PRESET=hidden")
// }
}, all_targets_debug : rocmnode('cdna') { cmake_build -> }, all_targets_debug : rocmnode('cdna') { cmake_build ->
stage('All targets Release') { stage('All targets Release') {
cmake_build(flags: "-DCMAKE_BUILD_TYPE=release -DMIGRAPHX_ENABLE_GPU=On -DMIGRAPHX_ENABLE_CPU=On -DMIGRAPHX_ENABLE_FPGA=On") cmake_build(flags: "-DCMAKE_BUILD_TYPE=release -DMIGRAPHX_ENABLE_GPU=On -DMIGRAPHX_ENABLE_CPU=On -DMIGRAPHX_ENABLE_FPGA=On")
......
...@@ -29,7 +29,10 @@ endif() ...@@ -29,7 +29,10 @@ endif()
include(GenerateExportHeader) include(GenerateExportHeader)
function(migraphx_generate_export_header TARGET) function(migraphx_generate_export_header TARGET)
cmake_parse_arguments(PARSE "" "DIRECTORY" "" ${ARGN}) set(options)
set(oneValueArgs DIRECTORY)
set(multiValueArgs)
cmake_parse_arguments(PARSE "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN})
if(PARSE_DIRECTORY) if(PARSE_DIRECTORY)
set(__directory ${PARSE_DIRECTORY}) set(__directory ${PARSE_DIRECTORY})
else() else()
......
...@@ -86,7 +86,7 @@ function(py_add_module NAME) ...@@ -86,7 +86,7 @@ function(py_add_module NAME)
) )
endfunction() endfunction()
set(PYTHON_SEARCH_VERSIONS 2.7 3.5 3.6 3.7 3.8 3.9 3.10) set(PYTHON_SEARCH_VERSIONS 3.5 3.6 3.7 3.8 3.9 3.10)
set(PYTHON_DISABLE_VERSIONS "" CACHE STRING "") set(PYTHON_DISABLE_VERSIONS "" CACHE STRING "")
foreach(PYTHON_DISABLE_VERSION ${PYTHON_DISABLE_VERSIONS}) foreach(PYTHON_DISABLE_VERSION ${PYTHON_DISABLE_VERSIONS})
list(REMOVE_ITEM PYTHON_SEARCH_VERSIONS ${PYTHON_DISABLE_VERSION}) list(REMOVE_ITEM PYTHON_SEARCH_VERSIONS ${PYTHON_DISABLE_VERSION})
......
...@@ -107,6 +107,24 @@ ...@@ -107,6 +107,24 @@
<summary>Use make_shared or make_unique instead of new</summary> <summary>Use make_shared or make_unique instead of new</summary>
</message> </message>
</rule> </rule>
<rule>
<tokenlist>raw</tokenlist>
<pattern><![CDATA[ [^\(,;{}:]+ \w+ && (\w+|\(|\+|\-|\*)]]></pattern>
<message>
<id>UseNamedLogicOperator</id>
<severity>style</severity>
<summary>Use 'and' instead of &&</summary>
</message>
</rule>
<rule>
<tokenlist>raw</tokenlist>
<pattern><![CDATA[ (if|while) \([^\)&]+&& ]]></pattern>
<message>
<id>UseNamedLogicOperator</id>
<severity>style</severity>
<summary>Use 'and' instead of &&</summary>
</message>
</rule>
<rule> <rule>
<tokenlist>raw</tokenlist> <tokenlist>raw</tokenlist>
<pattern><![CDATA[ \|\| ]]></pattern> <pattern><![CDATA[ \|\| ]]></pattern>
......
...@@ -25,6 +25,6 @@ ROCmSoftwarePlatform/rocm-recipes ...@@ -25,6 +25,6 @@ ROCmSoftwarePlatform/rocm-recipes
facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cmake facebook/zstd@v1.4.5 -X subdir -DCMAKE_DIR=build/cmake
ccache@v4.1 -DENABLE_TESTING=OFF ccache@v4.1 -DENABLE_TESTING=OFF
pcre,pfultz2/pcre@8.45 -H sha256:d6f7182602a775a7d500a0cedca6449af0400c6493951513046d17615ed0bf11 pcre,pfultz2/pcre@8.45 -H sha256:d6f7182602a775a7d500a0cedca6449af0400c6493951513046d17615ed0bf11
danmar/cppcheck@2.9 -DHAVE_RULES=1 danmar/cppcheck@bb2711c22a0be09efe7f1a8da3030876471026c8 -DHAVE_RULES=1 # 2.11
RadeonOpenCompute/rocm-cmake@027404a8326da6e7e9338e0b81f9428660190724 --build RadeonOpenCompute/rocm-cmake@5a34e72d9f113eb5d028e740c2def1f944619595 --build
-f requirements.txt -f requirements.txt
rocm-docs-core==0.11.0 rocm-docs-core>=0.20.0
# Copyright (c) 2023 Advanced Micro Devices, Inc. All rights reserved. # Copyright (c) 2023 Advanced Micro Devices, Inc. All rights reserved.
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
# This file is autogenerated by pip-compile with Python 3.8
# by the following command:
#
# pip-compile requirements.in
#
accessible-pygments==0.0.4 accessible-pygments==0.0.4
# via pydata-sphinx-theme # via pydata-sphinx-theme
alabaster==0.7.13 alabaster==0.7.13
...@@ -46,7 +21,7 @@ charset-normalizer==3.1.0 ...@@ -46,7 +21,7 @@ charset-normalizer==3.1.0
# via requests # via requests
click==8.1.3 click==8.1.3
# via sphinx-external-toc # via sphinx-external-toc
cryptography==40.0.2 cryptography==41.0.3
# via pyjwt # via pyjwt
deprecated==1.2.13 deprecated==1.2.13
# via pygithub # via pygithub
...@@ -60,22 +35,16 @@ fastjsonschema==2.16.3 ...@@ -60,22 +35,16 @@ fastjsonschema==2.16.3
# via rocm-docs-core # via rocm-docs-core
gitdb==4.0.10 gitdb==4.0.10
# via gitpython # via gitpython
gitpython==3.1.31 gitpython==3.1.32
# via rocm-docs-core # via rocm-docs-core
idna==3.4 idna==3.4
# via requests # via requests
imagesize==1.4.1 imagesize==1.4.1
# via sphinx # via sphinx
importlib-metadata==6.4.1
# via sphinx
importlib-resources==5.12.0
# via rocm-docs-core
jinja2==3.1.2 jinja2==3.1.2
# via # via
# myst-parser # myst-parser
# sphinx # sphinx
linkify-it-py==1.0.3
# via myst-parser
markdown-it-py==2.2.0 markdown-it-py==2.2.0
# via # via
# mdit-py-plugins # mdit-py-plugins
...@@ -86,7 +55,7 @@ mdit-py-plugins==0.3.5 ...@@ -86,7 +55,7 @@ mdit-py-plugins==0.3.5
# via myst-parser # via myst-parser
mdurl==0.1.2 mdurl==0.1.2
# via markdown-it-py # via markdown-it-py
myst-parser[linkify]==1.0.0 myst-parser==1.0.0
# via rocm-docs-core # via rocm-docs-core
packaging==23.1 packaging==23.1
# via # via
...@@ -109,8 +78,6 @@ pyjwt[crypto]==2.6.0 ...@@ -109,8 +78,6 @@ pyjwt[crypto]==2.6.0
# via pygithub # via pygithub
pynacl==1.5.0 pynacl==1.5.0
# via pygithub # via pygithub
pytz==2023.3
# via babel
pyyaml==6.0 pyyaml==6.0
# via # via
# myst-parser # myst-parser
...@@ -120,7 +87,7 @@ requests==2.28.2 ...@@ -120,7 +87,7 @@ requests==2.28.2
# via # via
# pygithub # pygithub
# sphinx # sphinx
rocm-docs-core==0.11.0 rocm-docs-core>=0.20.0
# via -r requirements.in # via -r requirements.in
smmap==5.0.0 smmap==5.0.0
# via gitdb # via gitdb
...@@ -163,13 +130,7 @@ sphinxcontrib-serializinghtml==1.1.5 ...@@ -163,13 +130,7 @@ sphinxcontrib-serializinghtml==1.1.5
# via sphinx # via sphinx
typing-extensions==4.5.0 typing-extensions==4.5.0
# via pydata-sphinx-theme # via pydata-sphinx-theme
uc-micro-py==1.0.1
# via linkify-it-py
urllib3==1.26.15 urllib3==1.26.15
# via requests # via requests
wrapt==1.15.0 wrapt==1.15.0
# via deprecated # via deprecated
zipp==3.15.0
# via
# importlib-metadata
# importlib-resources
...@@ -114,7 +114,7 @@ int main(int argc, char** argv) ...@@ -114,7 +114,7 @@ int main(int argc, char** argv)
char* getCmdOption(char** begin, char** end, const std::string& option) char* getCmdOption(char** begin, char** end, const std::string& option)
{ {
char** itr = std::find(begin, end, option); char** itr = std::find(begin, end, option);
if(itr != end && ++itr != end) if(itr != end and ++itr != end)
{ {
return *itr; return *itr;
} }
......
...@@ -161,7 +161,7 @@ int main(int argc, char** argv) ...@@ -161,7 +161,7 @@ int main(int argc, char** argv)
auto num_results = auto num_results =
std::accumulate(lengths.begin(), lengths.end(), 1, std::multiplies<size_t>()); std::accumulate(lengths.begin(), lengths.end(), 1, std::multiplies<size_t>());
float* results = reinterpret_cast<float*>(outputs[0].data()); float* results = reinterpret_cast<float*>(outputs[0].data());
float* max = std::max_element(results, results + num_results); const float* max = std::max_element(results, results + num_results);
int answer = max - results; int answer = max - results;
std::cout << std::endl std::cout << std::endl
...@@ -192,12 +192,12 @@ void read_nth_digit(const int n, std::vector<float>& digit) ...@@ -192,12 +192,12 @@ void read_nth_digit(const int n, std::vector<float>& digit)
for(int i = 0; i < HEIGHT * WIDTH; ++i) for(int i = 0; i < HEIGHT * WIDTH; ++i)
{ {
unsigned char temp = 0; unsigned char temp = 0;
file.read((char*)&temp, sizeof(temp)); file.read(reinterpret_cast<char*>(&temp), sizeof(temp));
if(d == n) if(d == n)
{ {
float data = temp / 255.0; float data = temp / 255.0;
digit.push_back(data); digit.push_back(data);
std::cout << SYMBOLS[(int)(data * 10) % 11]; std::cout << SYMBOLS[static_cast<int>(data * 10) % 11];
if((i + 1) % WIDTH == 0) if((i + 1) % WIDTH == 0)
std::cout << std::endl; std::cout << std::endl;
} }
......
...@@ -54,5 +54,12 @@ ADD dev-requirements.txt /dev-requirements.txt ...@@ -54,5 +54,12 @@ ADD dev-requirements.txt /dev-requirements.txt
ADD requirements.txt /requirements.txt ADD requirements.txt /requirements.txt
ADD rbuild.ini /rbuild.ini ADD rbuild.ini /rbuild.ini
# Temporarily install a new cmake until switching to ubuntu 22.04
RUN pip3 install cmake==3.22.1
COPY ./tools/install_prereqs.sh / COPY ./tools/install_prereqs.sh /
RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh
# Install MLIR
ADD mlir-requirements.txt /mlir-requirements.txt
RUN cget -p /usr/local install -f /mlir-requirements.txt
#####################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#####################################################################################
ROCmSoftwarePlatform/rocMLIR@2c519c48eaa278d13e6c40bc0941119826d71512 -DBUILD_FAT_LIBROCKCOMPILER=On
...@@ -6,7 +6,9 @@ deps = ...@@ -6,7 +6,9 @@ deps =
-f requirements.txt -f requirements.txt
[gh] [gh]
ignore = danmar/cppcheck ignore =
danmar/cppcheck
ROCmSoftwarePlatform/rocMLIR
deps = deps =
-f dev-requirements.txt -f dev-requirements.txt
oneapi-src/oneDNN@v1.7 oneapi-src/oneDNN@v1.7
......
##################################################################################### #####################################################################################
# The MIT License (MIT) # The MIT License (MIT)
# #
# Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved. # Copyright (c) 2015-2023 Advanced Micro Devices, Inc. All rights reserved.
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy # Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal # of this software and associated documentation files (the "Software"), to deal
...@@ -29,6 +29,7 @@ include(ROCMPackageConfigHelpers) ...@@ -29,6 +29,7 @@ include(ROCMPackageConfigHelpers)
include(RegisterOp) include(RegisterOp)
include(CheckCXXLinkerFlag) include(CheckCXXLinkerFlag)
add_library(migraphx add_library(migraphx
adjust_allocation.cpp adjust_allocation.cpp
analyze_streams.cpp analyze_streams.cpp
...@@ -36,6 +37,7 @@ add_library(migraphx ...@@ -36,6 +37,7 @@ add_library(migraphx
argument.cpp argument.cpp
auto_contiguous.cpp auto_contiguous.cpp
common.cpp common.cpp
common_dims.cpp
compile_src.cpp compile_src.cpp
convert_to_json.cpp convert_to_json.cpp
cpp_generator.cpp cpp_generator.cpp
...@@ -94,6 +96,7 @@ add_library(migraphx ...@@ -94,6 +96,7 @@ add_library(migraphx
serialize.cpp serialize.cpp
shape.cpp shape.cpp
simplify_algebra.cpp simplify_algebra.cpp
simplify_dyn_ops.cpp
simplify_reshapes.cpp simplify_reshapes.cpp
split_single_dyn_dim.cpp split_single_dyn_dim.cpp
target.cpp target.cpp
...@@ -140,6 +143,7 @@ register_migraphx_ops( ...@@ -140,6 +143,7 @@ register_migraphx_ops(
equal equal
erf erf
exp exp
fill
flatten flatten
floor floor
fmod fmod
...@@ -183,6 +187,8 @@ register_migraphx_ops( ...@@ -183,6 +187,8 @@ register_migraphx_ops(
quant_convolution quant_convolution
quant_dot quant_dot
quantizelinear quantizelinear
random_uniform
random_seed
recip recip
reduce_max reduce_max
reduce_mean reduce_mean
...@@ -248,8 +254,6 @@ endif() ...@@ -248,8 +254,6 @@ endif()
target_link_libraries(migraphx PRIVATE -ldl) target_link_libraries(migraphx PRIVATE -ldl)
target_include_directories(migraphx SYSTEM PUBLIC $<BUILD_INTERFACE:${HALF_INCLUDE_DIR}>) target_include_directories(migraphx SYSTEM PUBLIC $<BUILD_INTERFACE:${HALF_INCLUDE_DIR}>)
find_package(Threads)
target_link_libraries(migraphx PUBLIC Threads::Threads) target_link_libraries(migraphx PUBLIC Threads::Threads)
find_package(nlohmann_json 3.8.0 REQUIRED) find_package(nlohmann_json 3.8.0 REQUIRED)
......
...@@ -899,7 +899,7 @@ migraphx_dynamic_dimensions_assign_to(migraphx_dynamic_dimensions_t output, ...@@ -899,7 +899,7 @@ migraphx_dynamic_dimensions_assign_to(migraphx_dynamic_dimensions_t output,
extern "C" migraphx_status extern "C" migraphx_status
migraphx_dynamic_dimensions_create(migraphx_dynamic_dimensions_t* dynamic_dimensions, migraphx_dynamic_dimensions_create(migraphx_dynamic_dimensions_t* dynamic_dimensions,
const_migraphx_dynamic_dimension_t* ptr, const const_migraphx_dynamic_dimension_t* ptr,
size_t size) size_t size)
{ {
auto api_error_result = migraphx::try_([&] { auto api_error_result = migraphx::try_([&] {
...@@ -1432,7 +1432,7 @@ extern "C" migraphx_status migraphx_instructions_assign_to(migraphx_instructions ...@@ -1432,7 +1432,7 @@ extern "C" migraphx_status migraphx_instructions_assign_to(migraphx_instructions
} }
extern "C" migraphx_status migraphx_instructions_create(migraphx_instructions_t* instructions, extern "C" migraphx_status migraphx_instructions_create(migraphx_instructions_t* instructions,
const_migraphx_instruction_t* ptr, const const_migraphx_instruction_t* ptr,
size_t size) size_t size)
{ {
auto api_error_result = migraphx::try_([&] { auto api_error_result = migraphx::try_([&] {
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment