Unverified Commit 2ba401f0 authored by Ted Themistokleous's avatar Ted Themistokleous Committed by GitHub
Browse files

Merge branch 'simplify_1_mul_div_ops' into divide_by_zero_check

parents a330d428 8398fb19
......@@ -46,7 +46,7 @@ else
fi
# do the formatting
for file in $(git diff-index --cached --name-only $against | grep -E '\.h$|\.hpp$|\.cpp$|\.cl$|\.h\.in$|\.hpp\.in$|\.cpp\.in$|\.py$')
for file in $(git diff-index --cached --name-only $against | grep -E '\.h$|\.hpp$|\.cpp$|\.cl$|\.c$|\.h\.in$|\.hpp\.in$|\.cpp\.in$|\.py$')
do
if [ -e "$file" ]
then
......
name: MiGraphX Benchmark
on:
workflow_dispatch:
jobs:
benchmark:
uses: ROCmSoftwarePlatform/actions/.github/workflows/benchmarks.yml@main
with:
rocm_version: 5.2
script_repo: migraphx-benchmark/benchmark-utils
result_path: /usr/share/migraphx/test-results
result_repo: ROCmSoftwarePlatform/comparison-results
secrets:
gh_token: ${{ secrets.MIGRAPHX_BOT_TOKEN }}
......@@ -133,6 +133,7 @@ jobs:
-o -iname '*.hpp.in' \
-o -iname '*.cpp.in' \
-o -iname '*.cl' \
-o -iname '*.c' \
| grep -v 'build/' \
| xargs -n 1 -P 1 -I{} -t sh -c 'clang-format-10 -style=file {} | diff - {}'
find . -iname '*.py' \
......@@ -269,4 +270,98 @@ jobs:
curl -s https://codecov.io/bash | bash
echo "Uploaded"
linux-fpga:
continue-on-error: true
runs-on: ${{ matrix.os }}
env:
CCACHE_COMPRESSLEVEL: 10
CCACHE_DIR: ${{github.workspace}}/ccache
CCACHE_NOHASHDIR: true
CCACHE_BASEDIR: ${{github.workspace}}
CCACHE_MAXSIZE: 1
strategy:
matrix:
os:
- ubuntu-18.04
- ubuntu-20.04
configuration:
- debug
#- release Uncomment when ready to test release builds
#- codecov Uncomment when ready for codecov
steps:
- name: Free space
run: sudo rm -rf /usr/local/android /usr/share/dotnet /usr/local/share/boost /opt/ghc /usr/local/share/chrom* /usr/share/swift /usr/local/julia* /usr/local/lib/android
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: 3.6
- name: Cache dependencies
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
uses: actions/cache@v2
with:
# This path is specific to Ubuntu
path: ${{ github.workspace }}/cget
# Look to see if there is a cache hit for the corresponding requirements file
key:
${{ matrix.os }}-cget-4-${{ hashFiles('requirements.txt', 'dev-requirements.txt') }}
${{ matrix.os }}-cget-4-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install https://github.com/RadeonOpenCompute/rbuild/archive/master.tar.gz
rbuild prepare -d cget -s gh
- name: Prepare timestamp
id: cache_timestamp
shell: cmake -P {0}
run: |
string(TIMESTAMP current_date "%Y-%m-%d-%H;%M;%S" UTC)
message("::set-output name=timestamp::${current_date}")
- name: Cache files for ccache
# Ignore the failure of a step and avoid terminating the job.
continue-on-error: true
uses: pat-s/always-upload-cache@v2.1.3
with:
path: ccache
key: ${{ matrix.os }}-${{ matrix.configuration }}-ccache-${{ steps.cache_timestamp.outputs.timestamp }}
restore-keys: |
${{ matrix.os }}-${{ matrix.configuration }}-ccache-${{ steps.cache_timestamp.outputs.timestamp }}
${{ matrix.os }}-${{ matrix.configuration }}-ccache-
- name: Build and test
env:
CMAKE_PREFIX_PATH: ${{ github.workspace }}/cget
CCACHE_LOGFILE: /tmp/ccache.log
CXXFLAGS: -Werror -pthread --param ggc-min-expand=5 --param ggc-min-heapsize=8192
run: |
echo "leak:dnnl::impl::malloc" > suppressions.txt
export LSAN_OPTIONS="suppressions=$(pwd)/suppressions.txt"
rbuild build -d cget -s gh -T check \
-DCMAKE_BUILD_TYPE=${{matrix.configuration}} \
-DMIGRAPHX_ENABLE_PYTHON=${{matrix.configuration == 'release' && 'On' || 'Off'}} \
-DCMAKE_CXX_FLAGS_DEBUG="-g1 -Os -fdebug-prefix-map=$PWD=. -fdebug-types-section -fno-omit-frame-pointer -fsanitize=undefined -fno-sanitize-recover=undefined" \
-DCMAKE_CXX_FLAGS_CODECOV="-g1 -Og -fdebug-prefix-map=$PWD=. -fdebug-types-section -fprofile-arcs -ftest-coverage -fno-omit-frame-pointer" \
-DCMAKE_EXE_LINKER_FLAGS='-fuse-ld=gold' \
-DCMAKE_SHARED_LINKER_FLAGS='-fuse-ld=gold' \
-DMIGRAPHX_ENABLE_FPGA=On
${{ github.workspace }}/cget/bin/ccache -s
#- name: Upload code coverage
# if: "matrix.configuration == 'codecov'"
# env:
# CODECOV_TOKEN: "8545af1c-f90b-4345-92a5-0d075503ca56"
# run: |
# sudo apt-get install -y lcov
# cd build
# lcov --directory . --capture --output-file $(pwd)/coverage.info
# lcov --remove $(pwd)/coverage.info '/usr/*' --output-file $(pwd)/coverage.info
# lcov --list $(pwd)/coverage.info
# curl -s https://codecov.io/bash | bash
# echo "Uploaded"
\ No newline at end of file
name: MIGraphX Performance Tests
on:
push:
branches: [develop]
pull_request:
branches: [develop]
schedule:
- cron: "0 5 * * 1-6"
workflow_dispatch:
inputs:
rocm_release:
description: ROCm Version
required: true
default: '5.2'
performance_reports_repo:
description: Result repository
required: true
default: 'ROCmSoftwarePlatform/migraphx-reports'
result_number:
description: Last N results
required: true
default: '10'
flags:
description: -m for Max value; -s for Std dev; -r for Threshold file
required: true
default: '-s'
concurrency: benchmark
jobs:
release:
uses: rocmsoftwareplatform/migraphx-benchmark/.github/workflows/perf-test.yml@main
with:
rocm_release: ${{ github.event.inputs.rocm_release || '5.2' }}
result_number: ${{ github.event.inputs.result_number || '10' }}
flags: ${{ github.event.inputs.flags || '-s' }}
performance_reports_repo: ${{ github.event.inputs.performance_reports_repo || 'ROCmSoftwarePlatform/migraphx-reports' }}
secrets:
gh_token: ${{ secrets.MIGRAPHX_BOT_TOKEN }}
mail_user: ${{ secrets.MAIL_USERNAME }}
mail_pass: ${{ secrets.MAIL_PASSWORD }}
name: ROCM Docker image build
on:
workflow_dispatch:
inputs:
rocm_release:
description: ROCm release version
required: true
jobs:
release:
uses: ROCmSoftwarePlatform/actions/.github/workflows/rocm-release.yml@main
with:
rocm_release: ${{ github.event.inputs.rocm_release }}
secrets:
gh_token: ${{ secrets.MIGRAPHX_BOT_TOKEN }}
......@@ -82,8 +82,11 @@ endif()
# Disable cpu backend by default
set(MIGRAPHX_ENABLE_CPU Off CACHE BOOL "")
# Disable fpga backend by default
set(MIGRAPHX_ENABLE_FPGA Off CACHE BOOL "")
set(CMAKE_CXX_STANDARD_DEFAULT "")
add_compile_options(-std=c++17)
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:-std=c++17>)
if(${CMAKE_VERSION} VERSION_LESS "3.12.0")
set(CONFIGURE_DEPENDS)
......@@ -253,14 +256,6 @@ rocm_enable_cppcheck(
enable_testing()
include(ROCMCreatePackage)
rocm_create_package(
NAME MIGraphX
DESCRIPTION "AMD's graph optimizer"
MAINTAINER "Paul Fultz II <paul.fultz@amd.com>"
LDCONFIG
PTH
DEPENDS miopen-hip rocblas hip-rocclr hip-base half
)
set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_CURRENT_BINARY_DIR}/lib)
......@@ -277,3 +272,12 @@ foreach(py_file ${backend_files})
configure_file(${py_file} ${DEST_DIR}/lib/onnx_migraphx/. COPYONLY)
endforeach(py_file)
configure_file(${CMAKE_SOURCE_DIR}/test/py/onnx_backend_test.py ${DEST_DIR}/onnx_backend_test.py COPYONLY)
rocm_create_package(
NAME MIGraphX
DESCRIPTION "AMD's graph optimizer"
MAINTAINER "AMDMIGraphX Maintainer <migraphx-lib.support@amd.com>"
LDCONFIG
PTH
DEPENDS miopen-hip rocblas hip-rocclr hip-base half
)
......@@ -86,7 +86,7 @@ RUN git clone --single-branch --branch ${ONNXRUNTIME_BRANCH} --recursive ${ONNXR
ADD tools/build_and_test_onnxrt.sh /onnxruntime/build_and_test_onnxrt.sh
RUN PATH=/opt/cmake/bin:$PATH cget -p /usr/local install ROCmSoftwarePlatform/llvm-project-mlir@02078ce236ad90e3aec04c0c770ef5bfc99e49c2
RUN cget -p /usr/local install ROCmSoftwarePlatform/llvm-project-mlir@26a4b3cfc0a1a15181490f24ae461608fef1b04e -DBUILD_MIXR_TARGET=On
ENV MIOPEN_FIND_DB_PATH=/tmp/miopen/find-db
ENV MIOPEN_USER_DB_PATH=/tmp/miopen/user-db
......
......@@ -93,7 +93,7 @@ rocmtest clang_debug: rocmnode('vega') { cmake_build ->
stage('Hip Clang Debug') {
def sanitizers = "undefined"
def debug_flags = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize-recover=${sanitizers}"
cmake_build("/opt/rocm/llvm/bin/clang++", "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}'")
cmake_build("/opt/rocm/llvm/bin/clang++", "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}'")
}
}, clang_release: rocmnode('vega') { cmake_build ->
stage('Hip Clang Release') {
......@@ -104,13 +104,13 @@ rocmtest clang_debug: rocmnode('vega') { cmake_build ->
stage('MLIR Debug') {
def sanitizers = "undefined"
def debug_flags = "-g -O2 -fsanitize=${sanitizers} -fno-sanitize-recover=${sanitizers}"
cmake_build("/opt/rocm/llvm/bin/clang++", "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_MLIR=On -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}'")
cmake_build("/opt/rocm/llvm/bin/clang++", "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_MLIR=On -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}'")
}
}, clang_asan: rocmnode('nogpu') { cmake_build ->
stage('Clang ASAN') {
def sanitizers = "undefined,address"
def debug_flags = "-g -O2 -fno-omit-frame-pointer -fsanitize=${sanitizers} -fno-sanitize-recover=${sanitizers}"
cmake_build("/opt/rocm/llvm/bin/clang++", "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_GPU=Off -DMIGRAPHX_ENABLE_CPU=On -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}'")
cmake_build("/opt/rocm/llvm/bin/clang++", "-DCMAKE_BUILD_TYPE=debug -DMIGRAPHX_ENABLE_PYTHON=Off -DMIGRAPHX_ENABLE_GPU=Off -DMIGRAPHX_ENABLE_CPU=On -DCMAKE_CXX_FLAGS_DEBUG='${debug_flags}' -DCMAKE_C_FLAGS_DEBUG='${debug_flags}'")
}
}//, clang_release_navi: rocmnode('navi21') { cmake_build ->
// stage('HIP Clang Release Navi') {
......
......@@ -23,7 +23,7 @@
#####################################################################################
google/protobuf@v3.11.0 -DCMAKE_POSITION_INDEPENDENT_CODE=On -X subdir -Dprotobuf_BUILD_TESTS=Off
nlohmann/json@v3.8.0
blaze,https://bitbucket.org/blaze-lib/blaze/get/f0755dea0e03.tar.gz -X header -DHEADER_DIR=blaze
live-clones/blaze@v3.8 -X header -DHEADER_DIR=blaze -H sha256:d0ff011f47538285178908ea5f2cab46bb6a8f55b1edb6e03224a82dbc1a3212
half,https://github.com/pfultz2/half/archive/1.12.0.tar.gz -X header -H sha256:0a08660b68abb176ebc2a0cdf8de46e3182a7f46c66443bb80dbfaaec98cf969
pybind/pybind11@d159a563383d10c821ba7b2a71905d1207db6de4 --build
msgpack/msgpack-c@cpp-3.3.0 -DMSGPACK_BUILD_TESTS=Off
......@@ -88,6 +88,7 @@ add_library(migraphx
shape.cpp
simplify_algebra.cpp
simplify_reshapes.cpp
target_assignments.cpp
tmp_dir.cpp
value.cpp
verify_args.cpp
......@@ -266,6 +267,11 @@ add_subdirectory(targets/gpu)
target_link_libraries(migraphx_all_targets INTERFACE migraphx_gpu)
target_compile_definitions(migraphx_all_targets INTERFACE -DHAVE_GPU)
endif()
if(MIGRAPHX_ENABLE_FPGA)
add_subdirectory(targets/fpga)
target_link_libraries(migraphx_all_targets INTERFACE migraphx_fpga)
target_compile_definitions(migraphx_all_targets INTERFACE -DHAVE_FPGA)
endif()
if(HAVE_HALF_EXPR)
target_compile_definitions(migraphx PUBLIC -DHAS_HALF_V1)
......
......@@ -39,34 +39,47 @@
#include <migraphx/convert_to_json.hpp>
#include <algorithm>
#include <cstdarg>
namespace migraphx {
static thread_local bool disable_exception_catch = false; // NOLINT
extern "C" void migraphx_test_private_disable_exception_catch(bool b)
{
disable_exception_catch = b;
}
template <class F>
migraphx_status try_(F f, bool output = true) // NOLINT
{
try
if(disable_exception_catch)
{
f();
}
catch(const migraphx::exception& ex)
else
{
if(output)
std::cerr << "MIGraphX Error: " << ex.what() << std::endl;
if(ex.error > 0)
return migraphx_status(ex.error);
else
try
{
f();
}
catch(const migraphx::exception& ex)
{
if(output)
std::cerr << "MIGraphX Error: " << ex.what() << std::endl;
if(ex.error > 0)
return migraphx_status(ex.error);
else
return migraphx_status_unknown_error;
}
catch(const std::exception& ex)
{
if(output)
std::cerr << "MIGraphX Error: " << ex.what() << std::endl;
return migraphx_status_unknown_error;
}
catch(const std::exception& ex)
{
if(output)
std::cerr << "MIGraphX Error: " << ex.what() << std::endl;
return migraphx_status_unknown_error;
}
catch(...)
{
return migraphx_status_unknown_error;
}
catch(...)
{
return migraphx_status_unknown_error;
}
}
return migraphx_status_success;
}
......@@ -305,6 +318,7 @@ void destroy(T* x)
{
delete x; // NOLINT
}
// TODO: Move to interface preamble
template <class C, class D>
struct manage_generic_ptr
......@@ -313,30 +327,35 @@ struct manage_generic_ptr
manage_generic_ptr(std::nullptr_t) {}
manage_generic_ptr(void* pdata, C pcopier, D pdeleter)
: data(nullptr), copier(pcopier), deleter(pdeleter)
manage_generic_ptr(void* pdata, const char* obj_tname, C pcopier, D pdeleter)
: data(nullptr), obj_typename(obj_tname), copier(pcopier), deleter(pdeleter)
{
copier(&data, pdata);
}
manage_generic_ptr(const manage_generic_ptr& rhs)
: data(nullptr), copier(rhs.copier), deleter(rhs.deleter)
: data(nullptr), obj_typename(rhs.obj_typename), copier(rhs.copier), deleter(rhs.deleter)
{
if(copier)
copier(&data, rhs.data);
}
manage_generic_ptr(manage_generic_ptr&& other) noexcept
: data(other.data), copier(other.copier), deleter(other.deleter)
: data(other.data),
obj_typename(other.obj_typename),
copier(other.copier),
deleter(other.deleter)
{
other.data = nullptr;
other.copier = nullptr;
other.deleter = nullptr;
other.data = nullptr;
other.obj_typename = "";
other.copier = nullptr;
other.deleter = nullptr;
}
manage_generic_ptr& operator=(manage_generic_ptr rhs)
{
std::swap(data, rhs.data);
std::swap(obj_typename, rhs.obj_typename);
std::swap(copier, rhs.copier);
std::swap(deleter, rhs.deleter);
return *this;
......@@ -348,9 +367,10 @@ struct manage_generic_ptr
deleter(data);
}
void* data = nullptr;
C copier = nullptr;
D deleter = nullptr;
void* data = nullptr;
const char* obj_typename = "";
C copier = nullptr;
D deleter = nullptr;
};
extern "C" struct migraphx_shape;
......@@ -580,8 +600,9 @@ struct migraphx_experimental_custom_op
migraphx_experimental_custom_op(void* p,
migraphx_experimental_custom_op_copy c,
migraphx_experimental_custom_op_delete d,
const char* obj_typename,
Ts&&... xs)
: object_ptr(p, c, d), xobject(std::forward<Ts>(xs)...)
: object_ptr(p, obj_typename, c, d), xobject(std::forward<Ts>(xs)...)
{
}
manage_generic_ptr<migraphx_experimental_custom_op_copy, migraphx_experimental_custom_op_delete>
......@@ -595,13 +616,21 @@ struct migraphx_experimental_custom_op
std::remove_pointer_t<migraphx_argument_t> out;
if(compute_f == nullptr)
throw std::runtime_error("compute function is missing.");
std::array<char, 256> exception_msg;
exception_msg.front() = '\0';
auto api_error_result = compute_f(&out,
object_ptr.data,
exception_msg.data(),
exception_msg.size(),
object_cast<migraphx_context_t>(&(ctx)),
object_cast<migraphx_shape_t>(&(output)),
object_cast<migraphx_arguments_t>(&(inputs)));
if(api_error_result != migraphx_status_success)
throw std::runtime_error("Error in compute.");
{
const std::string exception_str(exception_msg.data());
throw std::runtime_error("Error in compute of: " +
std::string(object_ptr.obj_typename) + ": " + exception_str);
}
return (&out)->object;
}
......@@ -611,10 +640,19 @@ struct migraphx_experimental_custom_op
std::remove_pointer_t<migraphx_shape_t> out;
if(compute_shape_f == nullptr)
throw std::runtime_error("compute_shape function is missing.");
auto api_error_result =
compute_shape_f(&out, object_ptr.data, object_cast<migraphx_shapes_t>(&(inputs)));
std::array<char, 256> exception_msg;
exception_msg.front() = '\0';
auto api_error_result = compute_shape_f(&out,
object_ptr.data,
exception_msg.data(),
exception_msg.size(),
object_cast<migraphx_shapes_t>(&(inputs)));
if(api_error_result != migraphx_status_success)
throw std::runtime_error("Error in compute_shape.");
{
const std::string exception_str(exception_msg.data());
throw std::runtime_error("Error in compute_shape of: " +
std::string(object_ptr.obj_typename) + ": " + exception_str);
}
return (&out)->object;
}
};
......@@ -743,6 +781,16 @@ migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_sha
return api_error_result;
}
extern "C" migraphx_status migraphx_shape_standard(bool* out, const_migraphx_shape_t shape)
{
auto api_error_result = migraphx::try_([&] {
if(shape == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter shape: Null pointer");
*out = (shape->object).standard();
});
return api_error_result;
}
extern "C" migraphx_status migraphx_argument_destroy(migraphx_argument_t argument)
{
auto api_error_result = migraphx::try_([&] { destroy((argument)); });
......@@ -1806,11 +1854,12 @@ migraphx_experimental_custom_op_create(migraphx_experimental_custom_op_t* experi
void* obj,
migraphx_experimental_custom_op_copy c,
migraphx_experimental_custom_op_delete d,
const char* obj_typename,
const char* name)
{
auto api_error_result = migraphx::try_([&] {
*experimental_custom_op =
allocate<migraphx_experimental_custom_op_t>((obj), (c), (d), (name));
allocate<migraphx_experimental_custom_op_t>((obj), (c), (d), (obj_typename), (name));
});
return api_error_result;
}
......
......@@ -25,6 +25,7 @@
#define MIGRAPHX_GUARD_C_API_MIGRAPHX_H
#include <stdlib.h>
#include <stdbool.h>
// Add new types here
// clang-format off
......@@ -131,12 +132,16 @@ typedef const struct migraphx_experimental_custom_op* const_migraphx_experimenta
typedef migraphx_status (*migraphx_experimental_custom_op_compute)(migraphx_argument_t out,
void* obj,
char* exception_msg,
size_t exception_msg_size,
migraphx_context_t ctx,
migraphx_shape_t output,
migraphx_arguments_t inputs);
typedef migraphx_status (*migraphx_experimental_custom_op_compute_shape)(migraphx_shape_t out,
void* obj,
char* exception_msg,
size_t exception_msg_size,
migraphx_shapes_t inputs);
typedef migraphx_status (*migraphx_experimental_custom_op_copy)(void** out, void* input);
......@@ -175,6 +180,8 @@ migraphx_status migraphx_shape_bytes(size_t* out, const_migraphx_shape_t shape);
migraphx_status
migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_shape_t x);
migraphx_status migraphx_shape_standard(bool* out, const_migraphx_shape_t shape);
migraphx_status migraphx_argument_destroy(migraphx_argument_t argument);
migraphx_status migraphx_argument_assign_to(migraphx_argument_t output,
......@@ -485,6 +492,7 @@ migraphx_experimental_custom_op_create(migraphx_experimental_custom_op_t* experi
void* obj,
migraphx_experimental_custom_op_copy c,
migraphx_experimental_custom_op_delete d,
const char* obj_typename,
const char* name);
migraphx_status
......
......@@ -25,6 +25,7 @@
#define MIGRAPHX_GUARD_API_RTGLIB_MIGRAPHX_HPP
#include "migraphx.h"
#include <cstring>
#include <initializer_list>
#include <migraphx/migraphx.h>
#include <memory>
......@@ -58,6 +59,42 @@ struct rank<0>
{
};
template <class PrivateMigraphTypeNameProbe>
std::string compute_type_name()
{
std::string name;
#ifdef _MSC_VER
name = typeid(PrivateMigraphTypeNameProbe).name();
name = name.substr(7);
#else
const char parameter_name[] = "PrivateMigraphTypeNameProbe ="; // NOLINT
name = __PRETTY_FUNCTION__;
auto begin = name.find(parameter_name) + sizeof(parameter_name);
#if(defined(__GNUC__) && !defined(__clang__) && __GNUC__ == 4 && __GNUC_MINOR__ < 7)
auto length = name.find_last_of(",") - begin;
#else
auto length = name.find_first_of("];", begin) - begin;
#endif
name = name.substr(begin, length);
#endif
return name;
}
template <class T>
const std::string& get_type_name()
{
static const std::string name = compute_type_name<T>();
return name;
}
template <class T>
const std::string& get_type_name(const T&)
{
return get_type_name<T>();
}
template <class T, class F, class... Ts>
T* make(F f, Ts&&... xs)
{
......@@ -310,13 +347,22 @@ struct interface_base : Base
protected:
template <class F>
static migraphx_status try_(F f) // NOLINT
static migraphx_status try_(F f, char* ex_msg = nullptr, size_t ex_msg_size = 0) // NOLINT
{
try
{
f();
return migraphx_status_success;
}
catch(const std::exception& ex)
{
if(ex_msg)
{
std::strncpy(ex_msg, ex.what(), ex_msg_size);
ex_msg[ex_msg_size - 1] = '\0';
}
return migraphx_status_unknown_error;
}
catch(...)
{
return migraphx_status_unknown_error;
......@@ -349,9 +395,13 @@ struct interface_base : Base
{
static F f = pf;
(void)f; // avoid warning on gcc
call(setter, this->get_handle_ptr(), [](auto... xs) -> migraphx_status {
return try_([&] { call_cast_arg<T>(rank<1>{}, f, xs...); });
});
call(setter,
this->get_handle_ptr(),
[](auto out, void* obj, char* ex_msg, size_t ex_msg_size, auto... xs)
-> migraphx_status {
return try_(
[&] { call_cast_arg<T>(rank<1>{}, f, out, obj, xs...); }, ex_msg, ex_msg_size);
});
}
template <class T, class Setter, class F>
......@@ -524,6 +574,13 @@ struct shape : MIGRAPHX_CONST_HANDLE_BASE(shape)
return pout;
}
bool standard() const
{
bool result = false;
call(&migraphx_shape_standard, &result, this->get_handle_ptr());
return result;
}
friend bool operator==(const shape& px, const shape& py)
{
bool pout;
......@@ -1206,7 +1263,10 @@ struct experimental_custom_op : interface_base<MIGRAPHX_HANDLE_BASE(experimental
template <class T>
experimental_custom_op(T& obj)
{
this->make_interface(&migraphx_experimental_custom_op_create, obj, obj.name().c_str());
this->make_interface(&migraphx_experimental_custom_op_create,
obj,
get_type_name(obj).c_str(),
obj.name().c_str());
MIGRAPHX_INTERFACE_LIFT(T, experimental_custom_op, compute_shape);
MIGRAPHX_INTERFACE_LIFT(T, experimental_custom_op, compute);
}
......
......@@ -121,6 +121,7 @@ def shape(h):
invoke='migraphx::equal($@)',
returns='bool',
const=True)
h.method('standard', returns='bool', const=True)
@auto_handle()
......@@ -439,7 +440,8 @@ def context(h):
@api.interface('migraphx_experimental_custom_op',
'migraphx::experimental_custom_op')
def experimental_custom_op(h):
h.constructor('create', api.params(name='const char*'))
h.constructor('create',
api.params(obj_typename='const char*', name='const char*'))
h.virtual('compute',
api.params(ctx='migraphx::context',
output='migraphx::shape',
......
......@@ -93,9 +93,11 @@ static bool try_compute_shape(instruction_ref ins,
return try_compute_shape(ins, inputs, mods);
}
void eliminate_contiguous::apply(module& m) const
template <class F>
static void remove_contiguous(const std::string& op_name, module& m, F f)
{
std::vector<instruction_ref> const_instruction;
auto last = std::prev(m.end());
std::vector<instruction_ref> const_instructions;
for(auto ins : iterator_for(m))
{
......@@ -103,6 +105,12 @@ void eliminate_contiguous::apply(module& m) const
if(ins->name() == "@return")
continue;
if(ins != last and ins->outputs().empty())
continue;
if(not f(ins))
continue;
// Make a copy so we can modify it while we iterate
auto args = ins->inputs();
auto new_args = args;
......@@ -110,36 +118,46 @@ void eliminate_contiguous::apply(module& m) const
for(auto arg : ins->inputs())
{
if(arg->name() == op_name)
if(arg->name() != op_name)
continue;
auto prev = arg->inputs().front();
replace(new_args, arg, prev);
if(try_compute_shape(ins, new_args, mod_args))
{
instruction::replace_argument(ins, arg, prev);
}
else if(prev->can_eval())
{
auto prev = arg->inputs().front();
replace(new_args, arg, prev);
if(try_compute_shape(ins, new_args, mod_args))
{
instruction::replace_argument(ins, arg, prev);
}
else if(prev->can_eval())
{
const_instruction.push_back(arg);
}
const_instructions.push_back(arg);
}
}
}
// Perform evaluations in parallel
std::vector<argument> literals(const_instruction.size());
par_for(const_instruction.size(), 1, [&](const auto i) {
std::vector<argument> literals(const_instructions.size());
par_for(const_instructions.size(), 1, [&](const auto i) {
auto c = op::contiguous{};
auto prev = const_instruction[i]->inputs().front();
auto prev = const_instructions[i]->inputs().front();
literals[i] = c.compute(c.compute_shape({prev->get_shape()}), {prev->eval()});
});
for(size_t i = 0; i < const_instruction.size(); i++)
for(size_t i = 0; i < const_instructions.size(); i++)
{
auto l = m.add_literal(literals[i].get_shape(), literals[i].data());
m.replace_instruction(const_instruction[i], l);
m.replace_instruction(const_instructions[i], l);
}
}
void eliminate_contiguous::apply(module& m) const
{
// Skip contiguous from splits first
remove_contiguous(op_name, m, [](auto ins) {
if(ins->name() != "slice")
return true;
return (ins->inputs().front()->outputs().size() == 1);
});
remove_contiguous(op_name, m, [](auto) { return true; });
}
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#ifndef MIGRAPHX_GUARD_RTGLIB_ASSIGNMENT_OPTIONS_HPP
#define MIGRAPHX_GUARD_RTGLIB_ASSIGNMENT_OPTIONS_HPP
#include <migraphx/support_metric.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
struct assignment_options
{
support_metric metric = support_metric::latency;
};
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
#endif // MIGRAPHX_GUARD_RTGLIB_ASSIGNMENT_OPTIONS_HPP
......@@ -38,20 +38,34 @@ struct check_shapes
const shape* begin;
const shape* end;
const std::string name;
const bool dynamic_allowed;
check_shapes(const shape* b, const shape* e, const std::string& n) : begin(b), end(e), name(n)
check_shapes(const shape* b, const shape* e, const std::string& n, const bool d = false)
: begin(b), end(e), name(n), dynamic_allowed(d)
{
check_dynamic();
}
template <class Op>
check_shapes(const shape* b, const shape* e, const Op& op) : begin(b), end(e), name(op.name())
check_shapes(const shape* b, const shape* e, const Op& op, const bool d = false)
: begin(b), end(e), name(op.name()), dynamic_allowed(d)
{
check_dynamic();
}
template <class Op>
check_shapes(const std::vector<shape>& s, const Op& op)
: begin(s.data()), end(s.data() + s.size()), name(op.name())
check_shapes(const std::vector<shape>& s, const Op& op, const bool d = false)
: begin(s.data()), end(s.data() + s.size()), name(op.name()), dynamic_allowed(d)
{
check_dynamic();
}
void check_dynamic() const
{
if(not dynamic_allowed and this->any_of([&](const shape& s) { return s.dynamic(); }))
{
MIGRAPHX_THROW(prefix() + "Dynamic shapes not supported");
}
}
std::string prefix() const
......@@ -71,6 +85,11 @@ struct check_shapes
return end - begin;
}
/*!
* Check if the number of shape objects is equal to atleast one of the
* given sizes.
* \param ns template parameter pack of sizes to check against
*/
template <class... Ts>
const check_shapes& has(Ts... ns) const
{
......@@ -87,44 +106,62 @@ struct check_shapes
return *this;
}
/*!
* Check that the first shape has exactly n dimensions.
* Do nothing if the container is empty.
* \param n number of dimensions
*/
const check_shapes& only_dims(std::size_t n) const
{
assert(begin != nullptr);
assert(end != nullptr);
if(begin != end)
{
if(begin->lens().size() != n)
if(begin->max_lens().size() != n)
MIGRAPHX_THROW(prefix() + "Only " + std::to_string(n) + "d supported");
}
return *this;
}
/*!
* Check that the first shape has a maximum of n dimensions.
* Do nothing if the container is empty.
* \param n number of dimensions
*/
const check_shapes& max_ndims(std::size_t n) const
{
assert(begin != nullptr);
assert(end != nullptr);
if(begin != end)
{
if(begin->lens().size() > n)
if(begin->max_lens().size() > n)
MIGRAPHX_THROW(prefix() + "Shape must have at most " + std::to_string(n) +
" dimensions");
}
return *this;
}
/*!
* Check that the first shape has a minimum of n dimensions.
* Do nothing if the container is empty.
* \param n number of dimensions
*/
const check_shapes& min_ndims(std::size_t n) const
{
assert(begin != nullptr);
assert(end != nullptr);
if(begin != end)
{
if(begin->lens().size() < n)
if(begin->max_lens().size() < n)
MIGRAPHX_THROW(prefix() + "Shape must have at least " + std::to_string(n) +
" dimensions");
}
return *this;
}
/*!
* Check all shapes have the same shape.
*/
const check_shapes& same_shape() const
{
if(!this->same([](const shape& s) { return s; }))
......@@ -132,6 +169,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes have the same type.
*/
const check_shapes& same_type() const
{
if(!this->same([](const shape& s) { return s.type(); }))
......@@ -139,20 +179,32 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes have the same lens.
*/
const check_shapes& same_dims() const
{
if(!this->same([](const shape& s) { return s.lens(); }))
if(!this->same([](const shape& s) { return s.max_lens(); }))
MIGRAPHX_THROW(prefix() + "Dimensions do not match");
if(this->any_of([&](const shape& s) { return s.dynamic(); }))
if(!this->same([](const shape& s) { return s.min_lens(); }))
MIGRAPHX_THROW(prefix() + "Min dynamic dimensions do not match");
return *this;
}
/*!
* Check all shapes have the same number of dimensions.
*/
const check_shapes& same_ndims() const
{
if(!this->same([](const shape& s) { return s.lens().size(); }))
if(!this->same([](const shape& s) { return s.max_lens().size(); }))
MIGRAPHX_THROW(prefix() + "Number of dimensions do not match");
return *this;
}
/*!
* Check all shapes are standard.
*/
const check_shapes& standard() const
{
if(!this->all_of([](const shape& s) { return s.standard(); }))
......@@ -160,6 +212,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes are standard or scalar.
*/
const check_shapes& standard_or_scalar() const
{
if(!this->all_of([](const shape& s) { return s.standard() or s.scalar(); }))
......@@ -167,6 +222,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes are packed.
*/
const check_shapes& packed() const
{
if(!this->all_of([](const shape& s) { return s.packed(); }))
......@@ -174,6 +232,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes are packed or broadcasted.
*/
const check_shapes& packed_or_broadcasted() const
{
if(!this->all_of([](const shape& s) { return s.packed() or s.broadcasted(); }))
......@@ -181,6 +242,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes are tuples.
*/
const check_shapes& tuple_type() const
{
if(!this->all_of([](const shape& s) { return s.type() == shape::tuple_type; }))
......@@ -188,6 +252,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes are not transposed.
*/
const check_shapes& not_transposed() const
{
if(!this->all_of([](const shape& s) { return not s.transposed(); }))
......@@ -195,6 +262,9 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes are not broadcasted.
*/
const check_shapes& not_broadcasted() const
{
if(!this->all_of([](const shape& s) { return not s.broadcasted(); }))
......@@ -202,6 +272,10 @@ struct check_shapes
return *this;
}
/*!
* Check all shapes have the same n elements.
* \param n number of elements
*/
const check_shapes& elements(std::size_t n) const
{
if(!this->all_of([&](const shape& s) { return s.elements() == n; }))
......@@ -209,6 +283,9 @@ struct check_shapes
return *this;
}
/*!
* Check the batches of all the shapes do not have transposed strides.
*/
const check_shapes& batch_not_transposed() const
{
if(!this->all_of([&](const shape& s) { return batch_not_transposed_strides(s.strides()); }))
......@@ -237,6 +314,16 @@ struct check_shapes
return std::all_of(begin, end, p);
}
template <class Predicate>
bool any_of(Predicate p) const
{
if(begin == end)
return false;
assert(begin != nullptr);
assert(end != nullptr);
return std::any_of(begin, end, p);
}
const shape* get(long i) const
{
if(i >= size())
......
......@@ -349,25 +349,27 @@ match::matcher_result find_match(module& modl, M&& m)
MIGRAPHX_DECLARE_ENV_VAR(MIGRAPHX_TRACE_MATCHES)
/// Find matches for an instruction in the module
template <class... Ms>
void find_matches(module& mod, instruction_ref ins, Ms&&... ms)
template <class Mod, class... Ms>
void find_matches(Mod& mod, instruction_ref ins, Ms&&... ms)
{
#if !defined(__GNUC__) || defined(__clang__) || __GNUC__ > 5
const
#endif
bool trace = enabled(MIGRAPHX_TRACE_MATCHES{});
bool match = false;
int trace = value_of(MIGRAPHX_TRACE_MATCHES{});
bool match = false;
each_args(
[&](auto&& m) {
if(match)
return;
auto r = match_instruction(mod, ins, m.matcher());
if(r.result == mod.end())
if(trace > 1)
std::cout << "Match: " << get_type_name(m) << std::endl;
auto r = match_instruction(get_module(mod), ins, m.matcher());
if(r.result == get_module(mod).end())
return;
if(trace)
if(trace > 0)
{
std::cout << "Matched by " << get_type_name(m) << std::endl;
mod.debug_print(ins);
get_module(mod).debug_print(ins);
}
m.apply(mod, r);
match = true;
......@@ -376,10 +378,10 @@ void find_matches(module& mod, instruction_ref ins, Ms&&... ms)
}
/// Find matches in a module
template <class... Ms>
void find_matches(module& mod, Ms&&... ms)
template <class Mod, class... Ms>
void find_matches(Mod& mod, Ms&&... ms)
{
for(auto ins : iterator_for(mod))
for(auto ins : iterator_for(get_module(mod)))
{
find_matches(mod, ins, ms...);
}
......
......@@ -124,7 +124,7 @@ struct module
std::unordered_map<instruction_ref, instruction_ref> map_ins = {});
std::vector<instruction_ref>
add_instructions(module_ref m,
add_instructions(const_module_ref m,
std::unordered_map<instruction_ref, instruction_ref> map_ins = {});
std::vector<instruction_ref>
......@@ -139,7 +139,7 @@ struct module
std::vector<instruction_ref>
insert_instructions(instruction_ref ins,
module_ref m,
const_module_ref m,
std::unordered_map<instruction_ref, instruction_ref> map_ins = {});
std::vector<instruction_ref>
......@@ -168,6 +168,10 @@ struct module
instruction_ref add_divzero(std::vector<instruction_ref> args, shape s);
instruction_ref insert_literal(instruction_ref ins, literal l);
instruction_ref insert_parameter(instruction_ref ins, std::string name, shape s);
std::vector<std::string> get_parameter_names() const;
shape get_parameter_shape(std::string name) const;
......@@ -232,6 +236,8 @@ struct module
std::unique_ptr<module_impl> impl;
};
inline module& get_module(module& m) { return m; }
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
......
......@@ -32,7 +32,8 @@ namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
struct module;
using module_ref = module*;
using module_ref = module*;
using const_module_ref = const module*;
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment