Commit cd4ab535 authored by Khalique Ahmed's avatar Khalique Ahmed
Browse files

manual merge

parents 3891ee58 a0fa3742
...@@ -3,18 +3,10 @@ ...@@ -3,18 +3,10 @@
You can adapt this file completely to your liking, but it should at least You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive. contain the root `toctree` directive.
Welcome to AMD MIGraphX's documentation! AMD MIGraphX documentation
======================================== ==========================
.. toctree::
:maxdepth: 3
:caption: Contents:
py_user_guide
cpp_user_guide
driver
contributor_guide
AMD MIGraphX is AMD's graph inference engine that accelerates machine learning model inference.
Indices and tables Indices and tables
================== ==================
......
...@@ -6,9 +6,9 @@ Python Reference ...@@ -6,9 +6,9 @@ Python Reference
shape shape
----- -----
.. py:class:: shape(type, lens, strides=None) .. py:class:: shape(type, lens, strides=None, dyn_dims)
Describes the shape of a tensor. This includes size, layout, and data type/ Describes the shape of a tensor. This includes size, layout, and data type. Can be a dynamic shape by using dyn_dims.
.. py:method:: type() .. py:method:: type()
...@@ -34,6 +34,12 @@ shape ...@@ -34,6 +34,12 @@ shape
:rtype: int :rtype: int
.. py:method:: dyn_dims()
The dynamic dimensions of the shape.
:rtype: list[dynamic_dimension]
.. py:method:: bytes() .. py:method:: bytes()
The number of bytes the shape uses. The number of bytes the shape uses.
...@@ -46,6 +52,12 @@ shape ...@@ -46,6 +52,12 @@ shape
:rtype: int :rtype: int
.. py:method:: ndim()
The number of dimensions for the shape.
:rtype: int
.. py:method:: packed() .. py:method:: packed()
Returns true if the shape is packed. Returns true if the shape is packed.
...@@ -64,6 +76,12 @@ shape ...@@ -64,6 +76,12 @@ shape
:rtype: bool :rtype: bool
.. py:method:: dynamic()
Returns true if the shape is dynamic.
:rtype: bool
.. py:method:: standard() .. py:method:: standard()
Returns true if the shape is a standard shape. That is, the shape is both packed and not transposed. Returns true if the shape is a standard shape. That is, the shape is both packed and not transposed.
...@@ -76,6 +94,18 @@ shape ...@@ -76,6 +94,18 @@ shape
:rtype: bool :rtype: bool
dynamic_dimension
--------
.. py:class:: dynamic_dimension(min, max, optimals)
Construct a dynamic_dimension from a minimum, a maximum, and optionally a set of optimals.
.. py:method:: is_fixed()
Returns true if the dynamic_dimension is fixed.
:rtype : int
argument argument
-------- --------
...@@ -292,8 +322,10 @@ parse_onnx ...@@ -292,8 +322,10 @@ parse_onnx
Load and parse an onnx file. Load and parse an onnx file.
:param str filename: Path to file. :param str filename: Path to file.
:param str default_dim_value: default batch size to use (if not specified in onnx file). :param str default_dim_value: default dimension to use (if not specified in onnx file).
:param dynamic_dimension default_dyn_dim_value: default dynamic_dimension value to use.
:param str map_input_dims: Explicitly specify the dims of an input. :param str map_input_dims: Explicitly specify the dims of an input.
:param list[dynamic_dimension] map_dyn_input_dims: Explicitly specify the dynamic_dimensions of an input.
:param str skip_unknown_operators: Continue parsing onnx file if an unknown operator is found. :param str skip_unknown_operators: Continue parsing onnx file if an unknown operator is found.
:param str print_program_on_error: Print program if an error occurs. :param str print_program_on_error: Print program if an error occurs.
:param int max_loop_iterations: Maximum iteration number for the loop operator. :param int max_loop_iterations: Maximum iteration number for the loop operator.
......
#####################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2015-2023 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#####################################################################################
cmake_minimum_required(VERSION 3.5)
project (cpp_dynamic_batch)
set (CMAKE_CXX_STANDARD 14)
set (EXAMPLE dynamic_batch)
list (APPEND CMAKE_PREFIX_PATH /opt/rocm)
find_package (migraphx)
message("source file: " ${EXAMPLE}.cpp " ---> bin: " ${EXAMPLE})
add_executable(${EXAMPLE} ${EXAMPLE}.cpp)
target_link_libraries(${EXAMPLE} migraphx::c)
# Running ONNX model with dynamic batch
## Description
This examples demonstrates how to run a graph program with dynamic batch using the MIGraphX C++ API.
## Creating dynamic dimension objects
`dynamic_dimension` objects are used in MIGraphX to specify a range of dimension values from a minimum value to a maximum value and optimal values that the tensor can be at model evaluation time.
A dynamic shape is defined by a list of `dynamic_dimensions` while a static shape only has fixed dimension values.
For example, a `dynamic_dimension` with `{min:1, max:10, optimals:{1, 4, 10}}` means that the dimension can be any value from 1 through 10 with the optimal values being 1, 4, and 10.
Supplied optimal values may allow MIGraphX to optimize the program for those specific shapes.
A fixed `dynamic_dimension` can be specified by setting the `min` and `max` to the same value (ex. `{min:3, max:3}`).
A dynamic shape specified solely by fixed `dynamic_dimension` objects will be converted to a static shape during parsing.
This can be useful for setting a static shape using the `set_dyn_input_parameter_shape()` method discussed later in this document.
## Parsing
ONNX graphs [ONNX](https://onnx.ai/get-started.html) can be parsed by MIGraphX to create a runnable program with dynamic batch sizes.
The dynamic batch range must be specified by a `dynamic_dimension` object.
One method to set the `dynamic_dimension` object works for ONNX files that only have symbolic variables for the batch dimensions:
```
migraphx::program p;
migraphx::onnx_options options;
options.set_default_dyn_dim_value(migraphx::dynamic_dimension{1, 4, {2, 4}});
p = parse_onnx(input_file, options);
```
Another option that can run any ONNX model with dynamic batch sizes uses the dynamic input map where the entire shape of the input parameter is supplied:
```
migraphx::program p;
migraphx::onnx_options options;
migraphx::dynamic_dimensions dyn_dims = {migraphx::dynamic_dimension{1, 4, {2, 4}},
migraphx::dynamic_dimension{3, 3},
migraphx::dynamic_dimension{4, 4},
migraphx::dynamic_dimension{4, 4}};
options.set_dyn_input_parameter_shape("input", dyn_dims);
p = parse_onnx(input_file, options);
```
## Compiling
Currently the MIGraphX C/C++ API requires that `offload_copy` be enabled for compiling dynamic batch programs.
Here is a snippet of compiling a model with `offload_copy` enabled:
```
migraphx::compile_options c_options;
c_options.set_offload_copy();
p.compile(migraphx::target("gpu"), c_options);
```
where `p` is the `migraphx::program`.
## Saving and Loading
A dynamic batch MIGraphX program can be saved and loaded to/from a MXR file the same way as a fully static shape program.
## Executing the dynamic batch model
The compiled dynamic batch model can be executed the same way as a static model by supplying the input data as `arguments` in a `program_parameters` object.
## Running the Example
Your ROCm installation could be installed in a location other than the one specified in the CMakeLists.txt.
You can set `LD_LIBRARY_PATH` or `CMAKE_PREFIX_PATH` to that location so that this program can still build.
The provided example is [`dynamic_batch.cpp`](./dynamic_batch.cpp)
To compile and run the example from this directory:
```
$ mkdir build
$ cd build
$ cmake ..
$ make
```
There will now be an executable named `dynamic_batch` with the following usage:
```
$ ./dynamic_batch
```
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2023 Advanced Micro Devices, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include <iostream>
#include <fstream>
#include <vector>
#include <string>
#include <algorithm>
// MIGraphX C++ API
#include <migraphx/migraphx.hpp>
int main(int argc, char** argv)
{
migraphx::onnx_options o_options;
migraphx::dynamic_dimensions dyn_dims = {migraphx::dynamic_dimension{1, 4, {2, 4}},
migraphx::dynamic_dimension{3, 3},
migraphx::dynamic_dimension{4, 4},
migraphx::dynamic_dimension{5, 5}};
o_options.set_dyn_input_parameter_shape("0", dyn_dims);
auto p = migraphx::parse_onnx("../add_scalar_test.onnx", o_options);
migraphx::compile_options c_options;
c_options.set_offload_copy();
p.compile(migraphx::target("gpu"), c_options);
// batch size = 2
std::vector<uint8_t> a(2 * 3 * 4 * 5, 3);
std::vector<uint8_t> b = {2};
migraphx::program_parameters pp;
migraphx::shape s = migraphx::shape(migraphx_shape_uint8_type, {2, 3, 4, 5});
pp.add("0", migraphx::argument(s, a.data()));
pp.add("1", migraphx::argument(migraphx::shape(migraphx_shape_uint8_type, {1}, {0}), b.data()));
auto outputs = p.eval(pp);
auto result = outputs[0];
std::vector<uint8_t> c(2 * 3 * 4 * 5, 5);
if(bool{result == migraphx::argument(s, c.data())})
{
std::cout << "Successfully executed dynamic batch add\n";
}
else
{
std::cout << "Failed dynamic batch add\n";
}
return 0;
}
...@@ -27,7 +27,7 @@ project (PLS) ...@@ -27,7 +27,7 @@ project (PLS)
set (CMAKE_CXX_STANDARD 14) set (CMAKE_CXX_STANDARD 14)
set (EXAMPLE parse_load_save) set (EXAMPLE parse_load_save)
list (APPEND CMAKE_PREFIX_PATH /opt/rocm/hip /opt/rocm) list (APPEND CMAKE_PREFIX_PATH /opt/rocm)
find_package (migraphx) find_package (migraphx)
message("source file: " ${EXAMPLE}.cpp " ---> bin: " ${EXAMPLE}) message("source file: " ${EXAMPLE}.cpp " ---> bin: " ${EXAMPLE})
......
...@@ -27,7 +27,7 @@ project (custom_miopen_kernel) ...@@ -27,7 +27,7 @@ project (custom_miopen_kernel)
set (CMAKE_CXX_STANDARD 14) set (CMAKE_CXX_STANDARD 14)
set (EXAMPLE custom_op_miopen_kernel) set (EXAMPLE custom_op_miopen_kernel)
list (APPEND CMAKE_PREFIX_PATH /opt/rocm/hip /opt/rocm) list (APPEND CMAKE_PREFIX_PATH /opt/rocm)
find_package (migraphx REQUIRED) find_package (migraphx REQUIRED)
find_package (miopen REQUIRED) find_package (miopen REQUIRED)
......
...@@ -28,7 +28,7 @@ set (CMAKE_CXX_STANDARD 14) ...@@ -28,7 +28,7 @@ set (CMAKE_CXX_STANDARD 14)
set (EXAMPLE custom_op_rocblas_kernel) set (EXAMPLE custom_op_rocblas_kernel)
list (APPEND CMAKE_PREFIX_PATH /opt/rocm/hip /opt/rocm) list (APPEND CMAKE_PREFIX_PATH /opt/rocm)
find_package (migraphx REQUIRED) find_package (migraphx REQUIRED)
find_package (rocblas REQUIRED) find_package (rocblas REQUIRED)
......
...@@ -29,7 +29,7 @@ See below for a comprehensive list of commands and option arguments, as well as ...@@ -29,7 +29,7 @@ See below for a comprehensive list of commands and option arguments, as well as
| --tf | Load file as a tensorflow graph | | --tf | Load file as a tensorflow graph |
| --migraphx | Load file as a migraphx graph | | --migraphx | Load file as a migraphx graph |
| --migraphx-json | Load file as a migraphx JSON graph | | --migraphx-json | Load file as a migraphx JSON graph |
| --batch | Set batch size for the model | | --batch | For a static model, set batch size. For a dynamic batch model, sets the batch size at runtime.|
| --nhwc | Treat tensorflow format as nhwc | | --nhwc | Treat tensorflow format as nhwc |
| --nchw | Treat tensorflow format as nchw | | --nchw | Treat tensorflow format as nchw |
| --skip-unknown-operators | Skip unknown operators when parsing and continue to parse | | --skip-unknown-operators | Skip unknown operators when parsing and continue to parse |
...@@ -44,6 +44,9 @@ See below for a comprehensive list of commands and option arguments, as well as ...@@ -44,6 +44,9 @@ See below for a comprehensive list of commands and option arguments, as well as
| --output \| -o | Output to file | | --output \| -o | Output to file |
| --fill0 | Fill parameter with 0s | | --fill0 | Fill parameter with 0s |
| --fill1 | Fill parameter with 1s | | --fill1 | Fill parameter with 1s |
| --input-dim | Set static dimensions of a parameter |
| --dyn-input-dim | Set dynamic dimensions of a parameter |
| --default-dyn-dim | Set default dynamic dimension |
| --gpu | Compile on the gpu | | --gpu | Compile on the gpu |
| --cpu | Compile on the cpu | | --cpu | Compile on the cpu |
| --ref | Compile on the reference implementation | | --ref | Compile on the reference implementation |
...@@ -88,7 +91,7 @@ batch_norm_inference ...@@ -88,7 +91,7 @@ batch_norm_inference
broadcast broadcast
capture capture
ceil ceil
check_context::migraphx::version_1::gpu::context check_context::migraphx::gpu::context
clip clip
concat concat
contiguous contiguous
...@@ -304,7 +307,7 @@ $ /opt/rocm/bin/migraphx-driver run --onnx simple_graph.onnx ...@@ -304,7 +307,7 @@ $ /opt/rocm/bin/migraphx-driver run --onnx simple_graph.onnx
``` ```
Compiling ... Compiling ...
Reading: simple_graph.onnx Reading: simple_graph.onnx
@0 = check_context::migraphx::version_1::gpu::context -> float_type, {}, {} @0 = check_context::migraphx::gpu::context -> float_type, {}, {}
@1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1} @1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1}
@2 = hip::hip_copy_literal[id=@literal:1] -> float_type, {784, 128}, {128, 1} @2 = hip::hip_copy_literal[id=@literal:1] -> float_type, {784, 128}, {128, 1}
x:0 = @param:x:0 -> float_type, {1, 28, 28}, {784, 28, 1} x:0 = @param:x:0 -> float_type, {1, 28, 28}, {784, 28, 1}
...@@ -327,7 +330,7 @@ x:0 = @param:x:0 -> float_type, {1, 28, 28}, {784, 28, 1} ...@@ -327,7 +330,7 @@ x:0 = @param:x:0 -> float_type, {1, 28, 28}, {784, 28, 1}
@18 = @return(@17) @18 = @return(@17)
Allocating params ... Allocating params ...
@0 = check_context::migraphx::version_1::gpu::context -> float_type, {}, {} @0 = check_context::migraphx::gpu::context -> float_type, {}, {}
@1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1} @1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1}
@2 = hip::hip_copy_literal[id=@literal:1] -> float_type, {784, 128}, {128, 1} @2 = hip::hip_copy_literal[id=@literal:1] -> float_type, {784, 128}, {128, 1}
x:0 = @param:x:0 -> float_type, {1, 28, 28}, {784, 28, 1} x:0 = @param:x:0 -> float_type, {1, 28, 28}, {784, 28, 1}
...@@ -399,7 +402,7 @@ $ /opt/rocm/bin/migraphx-driver compile --gpu --fp16 simple_graph.pb ...@@ -399,7 +402,7 @@ $ /opt/rocm/bin/migraphx-driver compile --gpu --fp16 simple_graph.pb
``` ```
Compiling ... Compiling ...
Reading: simple_graph.pb Reading: simple_graph.pb
@0 = check_context::migraphx::version_1::gpu::context -> float_type, {}, {} @0 = check_context::migraphx::gpu::context -> float_type, {}, {}
@1 = hip::hip_allocate_memory[shape=float_type, {456}, {1},id=scratch] -> float_type, {456}, {1} @1 = hip::hip_allocate_memory[shape=float_type, {456}, {1},id=scratch] -> float_type, {456}, {1}
@2 = hip::hip_copy_literal[id=@literal:0] -> half_type, {784, 128}, {128, 1} @2 = hip::hip_copy_literal[id=@literal:0] -> half_type, {784, 128}, {128, 1}
@3 = load[offset=256,end=1824](@1) -> half_type, {1, 28, 28}, {784, 28, 1} @3 = load[offset=256,end=1824](@1) -> half_type, {1, 28, 28}, {784, 28, 1}
...@@ -502,7 +505,7 @@ x = @param:x -> float_type, {1, 28, 28}, {784, 28, 1} ...@@ -502,7 +505,7 @@ x = @param:x -> float_type, {1, 28, 28}, {784, 28, 1}
@18 = ref::softmax[axis=1](@17) -> float_type, {1, 10}, {10, 1} @18 = ref::softmax[axis=1](@17) -> float_type, {1, 10}, {10, 1}
@19 = ref::identity(@18) -> float_type, {1, 10}, {10, 1} @19 = ref::identity(@18) -> float_type, {1, 10}, {10, 1}
@0 = check_context::migraphx::version_1::gpu::context -> float_type, {}, {} @0 = check_context::migraphx::gpu::context -> float_type, {}, {}
@1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1} @1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1}
@2 = hip::hip_copy_literal[id=@literal:3] -> float_type, {784, 128}, {128, 1} @2 = hip::hip_copy_literal[id=@literal:3] -> float_type, {784, 128}, {128, 1}
x = @param:x -> float_type, {1, 28, 28}, {784, 28, 1} x = @param:x -> float_type, {1, 28, 28}, {784, 28, 1}
...@@ -538,7 +541,7 @@ $ /opt/rocm/bin/migraphx-driver perf simple_graph.pb ...@@ -538,7 +541,7 @@ $ /opt/rocm/bin/migraphx-driver perf simple_graph.pb
``` ```
Compiling ... Compiling ...
Reading: simple_graph.pb Reading: simple_graph.pb
@0 = check_context::migraphx::version_1::gpu::context -> float_type, {}, {} @0 = check_context::migraphx::gpu::context -> float_type, {}, {}
@1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1} @1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1}
@2 = hip::hip_copy_literal[id=@literal:3] -> float_type, {784, 128}, {128, 1} @2 = hip::hip_copy_literal[id=@literal:3] -> float_type, {784, 128}, {128, 1}
@3 = load[offset=0,end=512](@1) -> float_type, {1, 128}, {128, 1} @3 = load[offset=0,end=512](@1) -> float_type, {1, 128}, {128, 1}
...@@ -561,7 +564,7 @@ output = @param:output -> float_type, {1, 10}, {10, 1} ...@@ -561,7 +564,7 @@ output = @param:output -> float_type, {1, 10}, {10, 1}
Allocating params ... Allocating params ...
Running performance report ... Running performance report ...
@0 = check_context::migraphx::version_1::gpu::context -> float_type, {}, {}: 0.00057782ms, 1% @0 = check_context::migraphx::gpu::context -> float_type, {}, {}: 0.00057782ms, 1%
@1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1}: 0.000295ms, 1% @1 = hip::hip_allocate_memory[shape=float_type, {256}, {1},id=scratch] -> float_type, {256}, {1}: 0.000295ms, 1%
@2 = hip::hip_copy_literal[id=@literal:3] -> float_type, {784, 128}, {128, 1}: 0.00027942ms, 1% @2 = hip::hip_copy_literal[id=@literal:3] -> float_type, {784, 128}, {128, 1}: 0.00027942ms, 1%
@3 = load[offset=0,end=512](@1) -> float_type, {1, 128}, {128, 1}: 0.000232ms, 1% @3 = load[offset=0,end=512](@1) -> float_type, {1, 128}, {128, 1}: 0.000232ms, 1%
...@@ -591,7 +594,7 @@ hip::hip_copy_literal: 0.00186824ms, 1% ...@@ -591,7 +594,7 @@ hip::hip_copy_literal: 0.00186824ms, 1%
load: 0.0016288ms, 1% load: 0.0016288ms, 1%
@param: 0.0013428ms, 1% @param: 0.0013428ms, 1%
broadcast: 0.00118042ms, 1% broadcast: 0.00118042ms, 1%
check_context::migraphx::version_1::gpu::context: 0.00057782ms, 1% check_context::migraphx::gpu::context: 0.00057782ms, 1%
reshape: 0.00033842ms, 1% reshape: 0.00033842ms, 1%
hip::hip_allocate_memory: 0.000295ms, 1% hip::hip_allocate_memory: 0.000295ms, 1%
......
...@@ -21,6 +21,6 @@ ...@@ -21,6 +21,6 @@
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE. # THE SOFTWARE.
##################################################################################### #####################################################################################
tensorflow==2.9.3 tensorflow==2.11.1
onnxruntime onnxruntime
tokenizers tokenizers
\ No newline at end of file
...@@ -27,7 +27,7 @@ project (CAI) ...@@ -27,7 +27,7 @@ project (CAI)
set (CMAKE_CXX_STANDARD 14) set (CMAKE_CXX_STANDARD 14)
set (EXAMPLE mnist_inference) set (EXAMPLE mnist_inference)
list (APPEND CMAKE_PREFIX_PATH /opt/rocm/hip /opt/rocm) list (APPEND CMAKE_PREFIX_PATH /opt/rocm)
find_package (migraphx) find_package (migraphx)
message("source file: " ${EXAMPLE}.cpp " ---> bin: " ${EXAMPLE}) message("source file: " ${EXAMPLE}.cpp " ---> bin: " ${EXAMPLE})
......
...@@ -6,7 +6,7 @@ ARG PREFIX=/usr/local ...@@ -6,7 +6,7 @@ ARG PREFIX=/usr/local
RUN dpkg --add-architecture i386 RUN dpkg --add-architecture i386
# Add rocm repository # Add rocm repository
RUN sh -c 'echo deb [arch=amd64 trusted=yes] http://repo.radeon.com/rocm/apt/5.4.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list' RUN sh -c 'echo deb [arch=amd64 trusted=yes] http://repo.radeon.com/rocm/apt/5.5/ focal main > /etc/apt/sources.list.d/rocm.list'
# Install dependencies # Install dependencies
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-unauthenticated \ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-unauthenticated \
...@@ -47,7 +47,7 @@ ENV LANG=C.UTF-8 ...@@ -47,7 +47,7 @@ ENV LANG=C.UTF-8
RUN pip3 install yapf==0.28.0 RUN pip3 install yapf==0.28.0
# Install doc requirements # Install doc requirements
ADD doc/requirements.txt /doc-requirements.txt ADD docs/.sphinx/requirements.txt /doc-requirements.txt
RUN pip3 install -r /doc-requirements.txt RUN pip3 install -r /doc-requirements.txt
# Install dependencies # Install dependencies
...@@ -57,4 +57,3 @@ ADD rbuild.ini /rbuild.ini ...@@ -57,4 +57,3 @@ ADD rbuild.ini /rbuild.ini
COPY ./tools/install_prereqs.sh / COPY ./tools/install_prereqs.sh /
RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh RUN /install_prereqs.sh /usr/local / && rm /install_prereqs.sh
...@@ -14,6 +14,7 @@ define = ...@@ -14,6 +14,7 @@ define =
CMAKE_C_COMPILER_LAUNCHER=${deps_dir}/bin/ccache CMAKE_C_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
CMAKE_CXX_COMPILER_LAUNCHER=${deps_dir}/bin/ccache CMAKE_CXX_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
MIGRAPHX_ENABLE_CPU=On MIGRAPHX_ENABLE_CPU=On
BUILD_DEV=On
[develop] [develop]
cxx = ${rocm_path}/llvm/bin/clang++ cxx = ${rocm_path}/llvm/bin/clang++
...@@ -25,3 +26,4 @@ define = ...@@ -25,3 +26,4 @@ define =
CMAKE_C_COMPILER_LAUNCHER=${deps_dir}/bin/ccache CMAKE_C_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
CMAKE_CXX_COMPILER_LAUNCHER=${deps_dir}/bin/ccache CMAKE_CXX_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
MIGRAPHX_ENABLE_CPU=On MIGRAPHX_ENABLE_CPU=On
BUILD_DEV=On
...@@ -28,3 +28,4 @@ ROCmSoftwarePlatform/half@rocm-5.4.2 ...@@ -28,3 +28,4 @@ ROCmSoftwarePlatform/half@rocm-5.4.2
pybind/pybind11@d159a563383d10c821ba7b2a71905d1207db6de4 --build pybind/pybind11@d159a563383d10c821ba7b2a71905d1207db6de4 --build
msgpack/msgpack-c@cpp-3.3.0 -DMSGPACK_BUILD_TESTS=Off msgpack/msgpack-c@cpp-3.3.0 -DMSGPACK_BUILD_TESTS=Off
sqlite3@3.17 -DCMAKE_POSITION_INDEPENDENT_CODE=On sqlite3@3.17 -DCMAKE_POSITION_INDEPENDENT_CODE=On
ROCmSoftwarePlatform/composable_kernel@ac580f77a84c705c678816ef7195adfcc02bdda5 -DCK_BUILD_JIT_LIB=On -DCMAKE_POSITION_INDEPENDENT_CODE=On
...@@ -51,6 +51,7 @@ add_library(migraphx ...@@ -51,6 +51,7 @@ add_library(migraphx
env.cpp env.cpp
file_buffer.cpp file_buffer.cpp
fuse_pointwise.cpp fuse_pointwise.cpp
fuse_reduce.cpp
generate.cpp generate.cpp
inline_module.cpp inline_module.cpp
insert_pad.cpp insert_pad.cpp
...@@ -74,6 +75,7 @@ add_library(migraphx ...@@ -74,6 +75,7 @@ add_library(migraphx
process.cpp process.cpp
program.cpp program.cpp
propagate_constant.cpp propagate_constant.cpp
promote_literals.cpp
quantization.cpp quantization.cpp
quantize_fp16.cpp quantize_fp16.cpp
quantize_int8.cpp quantize_int8.cpp
...@@ -92,6 +94,7 @@ add_library(migraphx ...@@ -92,6 +94,7 @@ add_library(migraphx
shape.cpp shape.cpp
simplify_algebra.cpp simplify_algebra.cpp
simplify_reshapes.cpp simplify_reshapes.cpp
split_single_dyn_dim.cpp
tmp_dir.cpp tmp_dir.cpp
value.cpp value.cpp
verify_args.cpp verify_args.cpp
...@@ -193,6 +196,7 @@ register_migraphx_ops( ...@@ -193,6 +196,7 @@ register_migraphx_ops(
roialign roialign
round round
rsqrt rsqrt
run_on_target
scalar scalar
scatter_add scatter_add
scatter_mul scatter_mul
......
...@@ -24,6 +24,7 @@ ...@@ -24,6 +24,7 @@
#include <migraphx/execution_environment.hpp> #include <migraphx/execution_environment.hpp>
#include <migraphx/migraphx.h> #include <migraphx/migraphx.h>
#include <migraphx/rank.hpp> #include <migraphx/rank.hpp>
#include <migraphx/ranges.hpp>
#include <migraphx/shape.hpp> #include <migraphx/shape.hpp>
#include <migraphx/program.hpp> #include <migraphx/program.hpp>
#include <migraphx/onnx.hpp> #include <migraphx/onnx.hpp>
...@@ -145,6 +146,11 @@ void set_default_dim_value(onnx_options& options, size_t value) ...@@ -145,6 +146,11 @@ void set_default_dim_value(onnx_options& options, size_t value)
options.default_dim_value = value; options.default_dim_value = value;
} }
void set_default_dyn_dim_value(onnx_options& options, const shape::dynamic_dimension& dd)
{
options.default_dyn_dim_value = dd;
}
void set_default_loop_iterations(onnx_options& options, int64_t value) void set_default_loop_iterations(onnx_options& options, int64_t value)
{ {
options.max_loop_iterations = value; options.max_loop_iterations = value;
...@@ -161,6 +167,13 @@ void set_input_parameter_shape(onnx_options& options, ...@@ -161,6 +167,13 @@ void set_input_parameter_shape(onnx_options& options,
options.map_input_dims[std::string(name)] = std::move(dims); options.map_input_dims[std::string(name)] = std::move(dims);
} }
void set_dyn_input_parameter_shape(onnx_options& options,
const char* name,
std::vector<shape::dynamic_dimension> dyn_dims)
{
options.map_dyn_input_dims[std::string(name)] = std::move(dyn_dims);
}
void set_input_parameter_shape(tf_options& options, const char* name, std::vector<std::size_t> dims) void set_input_parameter_shape(tf_options& options, const char* name, std::vector<std::size_t> dims)
{ {
options.map_input_dims[std::string(name)] = std::move(dims); options.map_input_dims[std::string(name)] = std::move(dims);
...@@ -187,6 +200,12 @@ std::vector<const char*> get_names(const std::unordered_map<std::string, Value>& ...@@ -187,6 +200,12 @@ std::vector<const char*> get_names(const std::unordered_map<std::string, Value>&
return result; return result;
} }
template <class T>
std::set<T> make_set(const T* x, std::size_t n)
{
return {x, x + n};
}
void quantize_fp16_with_op_names(program& prog, std::vector<std::string>& names) void quantize_fp16_with_op_names(program& prog, std::vector<std::string>& names)
{ {
if(names.empty()) if(names.empty())
...@@ -346,6 +365,9 @@ const Target* object_cast(const U* x) ...@@ -346,6 +365,9 @@ const Target* object_cast(const U* x)
template <class T, class... Ts, class Target = std::remove_pointer_t<T>> template <class T, class... Ts, class Target = std::remove_pointer_t<T>>
Target* allocate(Ts&&... xs) Target* allocate(Ts&&... xs)
{ {
if constexpr(std::is_aggregate<Target>{})
return new Target{std::forward<Ts>(xs)...}; // NOLINT
else
return new Target(std::forward<Ts>(xs)...); // NOLINT return new Target(std::forward<Ts>(xs)...); // NOLINT
} }
...@@ -409,6 +431,39 @@ struct manage_generic_ptr ...@@ -409,6 +431,39 @@ struct manage_generic_ptr
D deleter = nullptr; D deleter = nullptr;
}; };
extern "C" struct migraphx_optimals;
struct migraphx_optimals
{
template <class... Ts>
migraphx_optimals(Ts&&... xs)
: object(std::forward<Ts>(xs)...) // NOLINT(readability-redundant-member-init)
{
}
std::set<size_t> object;
};
extern "C" struct migraphx_dynamic_dimension;
struct migraphx_dynamic_dimension
{
template <class... Ts>
migraphx_dynamic_dimension(Ts&&... xs)
: object(std::forward<Ts>(xs)...) // NOLINT(readability-redundant-member-init)
{
}
migraphx::shape::dynamic_dimension object;
};
extern "C" struct migraphx_dynamic_dimensions;
struct migraphx_dynamic_dimensions
{
template <class... Ts>
migraphx_dynamic_dimensions(Ts&&... xs)
: object(std::forward<Ts>(xs)...) // NOLINT(readability-redundant-member-init)
{
}
std::vector<migraphx::shape::dynamic_dimension> object;
};
extern "C" struct migraphx_shape; extern "C" struct migraphx_shape;
struct migraphx_shape struct migraphx_shape
{ {
...@@ -736,6 +791,152 @@ struct migraphx_experimental_custom_op ...@@ -736,6 +791,152 @@ struct migraphx_experimental_custom_op
} }
}; };
extern "C" migraphx_status migraphx_optimals_destroy(migraphx_optimals_t optimals)
{
auto api_error_result = migraphx::try_([&] { destroy((optimals)); });
return api_error_result;
}
extern "C" migraphx_status migraphx_optimals_assign_to(migraphx_optimals_t output,
const_migraphx_optimals_t input)
{
auto api_error_result = migraphx::try_([&] { *output = *input; });
return api_error_result;
}
extern "C" migraphx_status
migraphx_optimals_create(migraphx_optimals_t* optimals, const size_t* ptr, size_t size)
{
auto api_error_result = migraphx::try_([&] {
*optimals = object_cast<migraphx_optimals_t>(
allocate<std::set<size_t>>(migraphx::make_set<size_t>((ptr), (size))));
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimension_destroy(migraphx_dynamic_dimension_t dynamic_dimension)
{
auto api_error_result = migraphx::try_([&] { destroy((dynamic_dimension)); });
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimension_assign_to(migraphx_dynamic_dimension_t output,
const_migraphx_dynamic_dimension_t input)
{
auto api_error_result = migraphx::try_([&] { *output = *input; });
return api_error_result;
}
extern "C" migraphx_status migraphx_dynamic_dimension_create_min_max(
migraphx_dynamic_dimension_t* dynamic_dimension, size_t min, size_t max)
{
auto api_error_result = migraphx::try_([&] {
*dynamic_dimension = object_cast<migraphx_dynamic_dimension_t>(
allocate<migraphx::shape::dynamic_dimension>((min), (max)));
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimension_create_min_max_optimals(migraphx_dynamic_dimension_t* dynamic_dimension,
size_t min,
size_t max,
migraphx_optimals_t optimals)
{
auto api_error_result = migraphx::try_([&] {
if(optimals == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter optimals: Null pointer");
*dynamic_dimension = object_cast<migraphx_dynamic_dimension_t>(
allocate<migraphx::shape::dynamic_dimension>((min), (max), (optimals->object)));
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimension_is_fixed(bool* out, const_migraphx_dynamic_dimension_t dynamic_dimension)
{
auto api_error_result = migraphx::try_([&] {
if(dynamic_dimension == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param,
"Bad parameter dynamic_dimension: Null pointer");
*out = (dynamic_dimension->object).is_fixed();
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimension_equal(bool* out,
const_migraphx_dynamic_dimension_t dynamic_dimension,
const_migraphx_dynamic_dimension_t x)
{
auto api_error_result = migraphx::try_([&] {
if(dynamic_dimension == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param,
"Bad parameter dynamic_dimension: Null pointer");
if(x == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter x: Null pointer");
*out = migraphx::equal((dynamic_dimension->object), (x->object));
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimensions_destroy(migraphx_dynamic_dimensions_t dynamic_dimensions)
{
auto api_error_result = migraphx::try_([&] { destroy((dynamic_dimensions)); });
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimensions_assign_to(migraphx_dynamic_dimensions_t output,
const_migraphx_dynamic_dimensions_t input)
{
auto api_error_result = migraphx::try_([&] { *output = *input; });
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimensions_create(migraphx_dynamic_dimensions_t* dynamic_dimensions,
const_migraphx_dynamic_dimension_t* ptr,
size_t size)
{
auto api_error_result = migraphx::try_([&] {
*dynamic_dimensions = object_cast<migraphx_dynamic_dimensions_t>(
allocate<std::vector<migraphx::shape::dynamic_dimension>>(
migraphx::to_obj_vector<const_migraphx_dynamic_dimension_t>((ptr), (size))));
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimensions_size(size_t* out, migraphx_dynamic_dimensions_t dynamic_dimensions)
{
auto api_error_result = migraphx::try_([&] {
if(dynamic_dimensions == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param,
"Bad parameter dynamic_dimensions: Null pointer");
*out = (dynamic_dimensions->object).size();
});
return api_error_result;
}
extern "C" migraphx_status
migraphx_dynamic_dimensions_get(const_migraphx_dynamic_dimension_t* out,
migraphx_dynamic_dimensions_t dynamic_dimensions,
size_t idx)
{
auto api_error_result = migraphx::try_([&] {
if(dynamic_dimensions == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param,
"Bad parameter dynamic_dimensions: Null pointer");
*out = object_cast<const_migraphx_dynamic_dimension_t>(
&((dynamic_dimensions->object).at((idx))));
});
return api_error_result;
}
extern "C" migraphx_status migraphx_shape_destroy(migraphx_shape_t shape) extern "C" migraphx_status migraphx_shape_destroy(migraphx_shape_t shape)
{ {
auto api_error_result = migraphx::try_([&] { destroy((shape)); }); auto api_error_result = migraphx::try_([&] { destroy((shape)); });
...@@ -794,6 +995,19 @@ extern "C" migraphx_status migraphx_shape_create_scalar(migraphx_shape_t* shape, ...@@ -794,6 +995,19 @@ extern "C" migraphx_status migraphx_shape_create_scalar(migraphx_shape_t* shape,
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status migraphx_shape_create_dynamic(migraphx_shape_t* shape,
migraphx_shape_datatype_t type,
migraphx_dynamic_dimensions_t dims)
{
auto api_error_result = migraphx::try_([&] {
if(dims == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter dims: Null pointer");
*shape = object_cast<migraphx_shape_t>(
allocate<migraphx::shape>((migraphx::to_shape_type(type)), (dims->object)));
});
return api_error_result;
}
extern "C" migraphx_status extern "C" migraphx_status
migraphx_shape_lengths(const size_t** out, size_t* out_size, const_migraphx_shape_t shape) migraphx_shape_lengths(const size_t** out, size_t* out_size, const_migraphx_shape_t shape)
{ {
...@@ -824,6 +1038,17 @@ migraphx_shape_strides(const size_t** out, size_t* out_size, const_migraphx_shap ...@@ -824,6 +1038,17 @@ migraphx_shape_strides(const size_t** out, size_t* out_size, const_migraphx_shap
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status migraphx_shape_dyn_dims(migraphx_dynamic_dimensions_t* out,
const_migraphx_shape_t shape)
{
auto api_error_result = migraphx::try_([&] {
if(shape == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter shape: Null pointer");
*out = allocate<migraphx_dynamic_dimensions_t>((shape->object).dyn_dims());
});
return api_error_result;
}
extern "C" migraphx_status migraphx_shape_type(migraphx_shape_datatype_t* out, extern "C" migraphx_status migraphx_shape_type(migraphx_shape_datatype_t* out,
const_migraphx_shape_t shape) const_migraphx_shape_t shape)
{ {
...@@ -857,6 +1082,16 @@ extern "C" migraphx_status migraphx_shape_bytes(size_t* out, const_migraphx_shap ...@@ -857,6 +1082,16 @@ extern "C" migraphx_status migraphx_shape_bytes(size_t* out, const_migraphx_shap
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status migraphx_shape_ndim(size_t* out, const_migraphx_shape_t shape)
{
auto api_error_result = migraphx::try_([&] {
if(shape == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter shape: Null pointer");
*out = (shape->object).ndim();
});
return api_error_result;
}
extern "C" migraphx_status extern "C" migraphx_status
migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_shape_t x) migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_shape_t x)
{ {
...@@ -880,6 +1115,16 @@ extern "C" migraphx_status migraphx_shape_standard(bool* out, const_migraphx_sha ...@@ -880,6 +1115,16 @@ extern "C" migraphx_status migraphx_shape_standard(bool* out, const_migraphx_sha
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status migraphx_shape_dynamic(bool* out, const_migraphx_shape_t shape)
{
auto api_error_result = migraphx::try_([&] {
if(shape == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter shape: Null pointer");
*out = (shape->object).dynamic();
});
return api_error_result;
}
extern "C" migraphx_status migraphx_shape_index(size_t* out, const_migraphx_shape_t shape, size_t i) extern "C" migraphx_status migraphx_shape_index(size_t* out, const_migraphx_shape_t shape, size_t i)
{ {
auto api_error_result = migraphx::try_([&] { auto api_error_result = migraphx::try_([&] {
...@@ -915,6 +1160,17 @@ migraphx_argument_create(migraphx_argument_t* argument, const_migraphx_shape_t s ...@@ -915,6 +1160,17 @@ migraphx_argument_create(migraphx_argument_t* argument, const_migraphx_shape_t s
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status migraphx_argument_create_empty(migraphx_argument_t* argument,
const_migraphx_shape_t shape)
{
auto api_error_result = migraphx::try_([&] {
if(shape == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter shape: Null pointer");
*argument = object_cast<migraphx_argument_t>(allocate<migraphx::argument>((shape->object)));
});
return api_error_result;
}
extern "C" migraphx_status migraphx_argument_shape(const_migraphx_shape_t* out, extern "C" migraphx_status migraphx_argument_shape(const_migraphx_shape_t* out,
const_migraphx_argument_t argument) const_migraphx_argument_t argument)
{ {
...@@ -1590,6 +1846,19 @@ extern "C" migraphx_status migraphx_onnx_options_set_input_parameter_shape( ...@@ -1590,6 +1846,19 @@ extern "C" migraphx_status migraphx_onnx_options_set_input_parameter_shape(
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status migraphx_onnx_options_set_dyn_input_parameter_shape(
migraphx_onnx_options_t onnx_options, const char* name, migraphx_dynamic_dimensions_t dims)
{
auto api_error_result = migraphx::try_([&] {
if(onnx_options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter onnx_options: Null pointer");
if(dims == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter dims: Null pointer");
migraphx::set_dyn_input_parameter_shape((onnx_options->object), (name), (dims->object));
});
return api_error_result;
}
extern "C" migraphx_status extern "C" migraphx_status
migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options, size_t value) migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options, size_t value)
{ {
...@@ -1601,6 +1870,20 @@ migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options ...@@ -1601,6 +1870,20 @@ migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options
return api_error_result; return api_error_result;
} }
extern "C" migraphx_status
migraphx_onnx_options_set_default_dyn_dim_value(migraphx_onnx_options_t onnx_options,
const_migraphx_dynamic_dimension_t dd)
{
auto api_error_result = migraphx::try_([&] {
if(onnx_options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter onnx_options: Null pointer");
if(dd == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter dd: Null pointer");
migraphx::set_default_dyn_dim_value((onnx_options->object), (dd->object));
});
return api_error_result;
}
extern "C" migraphx_status extern "C" migraphx_status
migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_options, migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_options,
int64_t value) int64_t value)
......
...@@ -66,6 +66,15 @@ typedef enum ...@@ -66,6 +66,15 @@ typedef enum
} migraphx_shape_datatype_t; } migraphx_shape_datatype_t;
#undef MIGRAPHX_SHAPE_GENERATE_ENUM_TYPES #undef MIGRAPHX_SHAPE_GENERATE_ENUM_TYPES
typedef struct migraphx_optimals* migraphx_optimals_t;
typedef const struct migraphx_optimals* const_migraphx_optimals_t;
typedef struct migraphx_dynamic_dimension* migraphx_dynamic_dimension_t;
typedef const struct migraphx_dynamic_dimension* const_migraphx_dynamic_dimension_t;
typedef struct migraphx_dynamic_dimensions* migraphx_dynamic_dimensions_t;
typedef const struct migraphx_dynamic_dimensions* const_migraphx_dynamic_dimensions_t;
typedef struct migraphx_shape* migraphx_shape_t; typedef struct migraphx_shape* migraphx_shape_t;
typedef const struct migraphx_shape* const_migraphx_shape_t; typedef const struct migraphx_shape* const_migraphx_shape_t;
...@@ -157,6 +166,55 @@ typedef migraphx_status (*migraphx_experimental_custom_op_copy)(void** out, void ...@@ -157,6 +166,55 @@ typedef migraphx_status (*migraphx_experimental_custom_op_copy)(void** out, void
typedef migraphx_status (*migraphx_experimental_custom_op_delete)(void* input); typedef migraphx_status (*migraphx_experimental_custom_op_delete)(void* input);
migraphx_status migraphx_optimals_destroy(migraphx_optimals_t optimals);
migraphx_status migraphx_optimals_assign_to(migraphx_optimals_t output,
const_migraphx_optimals_t input);
migraphx_status
migraphx_optimals_create(migraphx_optimals_t* optimals, const size_t* ptr, size_t size);
migraphx_status migraphx_dynamic_dimension_destroy(migraphx_dynamic_dimension_t dynamic_dimension);
migraphx_status migraphx_dynamic_dimension_assign_to(migraphx_dynamic_dimension_t output,
const_migraphx_dynamic_dimension_t input);
migraphx_status migraphx_dynamic_dimension_create_min_max(
migraphx_dynamic_dimension_t* dynamic_dimension, size_t min, size_t max);
migraphx_status
migraphx_dynamic_dimension_create_min_max_optimals(migraphx_dynamic_dimension_t* dynamic_dimension,
size_t min,
size_t max,
migraphx_optimals_t optimals);
migraphx_status
migraphx_dynamic_dimension_is_fixed(bool* out,
const_migraphx_dynamic_dimension_t dynamic_dimension);
migraphx_status
migraphx_dynamic_dimension_equal(bool* out,
const_migraphx_dynamic_dimension_t dynamic_dimension,
const_migraphx_dynamic_dimension_t x);
migraphx_status
migraphx_dynamic_dimensions_destroy(migraphx_dynamic_dimensions_t dynamic_dimensions);
migraphx_status migraphx_dynamic_dimensions_assign_to(migraphx_dynamic_dimensions_t output,
const_migraphx_dynamic_dimensions_t input);
migraphx_status
migraphx_dynamic_dimensions_create(migraphx_dynamic_dimensions_t* dynamic_dimensions,
const_migraphx_dynamic_dimension_t* ptr,
size_t size);
migraphx_status migraphx_dynamic_dimensions_size(size_t* out,
migraphx_dynamic_dimensions_t dynamic_dimensions);
migraphx_status migraphx_dynamic_dimensions_get(const_migraphx_dynamic_dimension_t* out,
migraphx_dynamic_dimensions_t dynamic_dimensions,
size_t idx);
migraphx_status migraphx_shape_destroy(migraphx_shape_t shape); migraphx_status migraphx_shape_destroy(migraphx_shape_t shape);
migraphx_status migraphx_shape_assign_to(migraphx_shape_t output, const_migraphx_shape_t input); migraphx_status migraphx_shape_assign_to(migraphx_shape_t output, const_migraphx_shape_t input);
...@@ -176,23 +234,34 @@ migraphx_status migraphx_shape_create_with_strides(migraphx_shape_t* shape, ...@@ -176,23 +234,34 @@ migraphx_status migraphx_shape_create_with_strides(migraphx_shape_t* shape,
migraphx_status migraphx_shape_create_scalar(migraphx_shape_t* shape, migraphx_status migraphx_shape_create_scalar(migraphx_shape_t* shape,
migraphx_shape_datatype_t type); migraphx_shape_datatype_t type);
migraphx_status migraphx_shape_create_dynamic(migraphx_shape_t* shape,
migraphx_shape_datatype_t type,
migraphx_dynamic_dimensions_t dims);
migraphx_status migraphx_status
migraphx_shape_lengths(const size_t** out, size_t* out_size, const_migraphx_shape_t shape); migraphx_shape_lengths(const size_t** out, size_t* out_size, const_migraphx_shape_t shape);
migraphx_status migraphx_status
migraphx_shape_strides(const size_t** out, size_t* out_size, const_migraphx_shape_t shape); migraphx_shape_strides(const size_t** out, size_t* out_size, const_migraphx_shape_t shape);
migraphx_status migraphx_shape_dyn_dims(migraphx_dynamic_dimensions_t* out,
const_migraphx_shape_t shape);
migraphx_status migraphx_shape_type(migraphx_shape_datatype_t* out, const_migraphx_shape_t shape); migraphx_status migraphx_shape_type(migraphx_shape_datatype_t* out, const_migraphx_shape_t shape);
migraphx_status migraphx_shape_elements(size_t* out, const_migraphx_shape_t shape); migraphx_status migraphx_shape_elements(size_t* out, const_migraphx_shape_t shape);
migraphx_status migraphx_shape_bytes(size_t* out, const_migraphx_shape_t shape); migraphx_status migraphx_shape_bytes(size_t* out, const_migraphx_shape_t shape);
migraphx_status migraphx_shape_ndim(size_t* out, const_migraphx_shape_t shape);
migraphx_status migraphx_status
migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_shape_t x); migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_shape_t x);
migraphx_status migraphx_shape_standard(bool* out, const_migraphx_shape_t shape); migraphx_status migraphx_shape_standard(bool* out, const_migraphx_shape_t shape);
migraphx_status migraphx_shape_dynamic(bool* out, const_migraphx_shape_t shape);
migraphx_status migraphx_shape_index(size_t* out, const_migraphx_shape_t shape, size_t i); migraphx_status migraphx_shape_index(size_t* out, const_migraphx_shape_t shape, size_t i);
migraphx_status migraphx_argument_destroy(migraphx_argument_t argument); migraphx_status migraphx_argument_destroy(migraphx_argument_t argument);
...@@ -203,6 +272,9 @@ migraphx_status migraphx_argument_assign_to(migraphx_argument_t output, ...@@ -203,6 +272,9 @@ migraphx_status migraphx_argument_assign_to(migraphx_argument_t output,
migraphx_status migraphx_status
migraphx_argument_create(migraphx_argument_t* argument, const_migraphx_shape_t shape, void* buffer); migraphx_argument_create(migraphx_argument_t* argument, const_migraphx_shape_t shape, void* buffer);
migraphx_status migraphx_argument_create_empty(migraphx_argument_t* argument,
const_migraphx_shape_t shape);
migraphx_status migraphx_argument_shape(const_migraphx_shape_t* out, migraphx_status migraphx_argument_shape(const_migraphx_shape_t* out,
const_migraphx_argument_t argument); const_migraphx_argument_t argument);
...@@ -397,9 +469,16 @@ migraphx_status migraphx_onnx_options_create(migraphx_onnx_options_t* onnx_optio ...@@ -397,9 +469,16 @@ migraphx_status migraphx_onnx_options_create(migraphx_onnx_options_t* onnx_optio
migraphx_status migraphx_onnx_options_set_input_parameter_shape( migraphx_status migraphx_onnx_options_set_input_parameter_shape(
migraphx_onnx_options_t onnx_options, const char* name, size_t* dims, size_t dims_size); migraphx_onnx_options_t onnx_options, const char* name, size_t* dims, size_t dims_size);
migraphx_status migraphx_onnx_options_set_dyn_input_parameter_shape(
migraphx_onnx_options_t onnx_options, const char* name, migraphx_dynamic_dimensions_t dims);
migraphx_status migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options, migraphx_status migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options,
size_t value); size_t value);
migraphx_status
migraphx_onnx_options_set_default_dyn_dim_value(migraphx_onnx_options_t onnx_options,
const_migraphx_dynamic_dimension_t dd);
migraphx_status migraphx_status
migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_options, migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_options,
int64_t value); int64_t value);
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment