Commit 11e155c2 authored by Paul's avatar Paul
Browse files

Merge

parents 8a9c5bce aa7ff911
......@@ -12,31 +12,31 @@ shape
.. py:method:: type()
An integer that represents the type
An integer that represents the type.
:rtype: int
.. py:method:: lens()
A list of the lengths of the shape
A list of the lengths of the shape.
:rtype: list[int]
.. py:method:: strides()
A list of the strides of the shape
A list of the strides of the shape.
:rtype: list[int]
.. py:method:: elements()
The number of elements in the shape
The number of elements in the shape.
:rtype: int
.. py:method:: bytes()
The number of bytes the shape uses
The number of bytes the shape uses.
:rtype: int
......@@ -102,30 +102,73 @@ argument
Generate an argument with random data.
:param shape s: Shape of argument to generate.
:param int seed: The seed used for random number generation
:param int seed: The seed used for random number generation.
:rtype: argument
.. py:function:: fill_argument(s, value)
Fill argument of shape s with value.
:param shape s: Shape of argument to fill.
:param int value: Value to fill in the argument.
:rtype argument
target
------
.. py:class:: target()
This represents the compiliation target.
This represents the compilation target.
.. py:function:: get_target(name)
Constructs the target.
:param str name: The name of the target to construct. This can either be 'cpu' or 'gpu'.
:param str name: The name of the target to construct. This can either be 'gpu' or 'ref'.
:rtype: target
module
------
.. py:method:: print()
Prints the contents of the module as list of instructions.
.. py:method:: add_instruction(op, args, mod_args=[])
Adds instruction into the module.
:param operation op: 'migraphx.op' to be added as instruction.
:param list[instruction] args: list of inputs to the op.
:param list[module] mod_args: optional list of module arguments to the operator.
:rtype instruction
.. py:method:: add_literal(data)
Adds constant or literal data of provided shape into the module from python buffer which includes numpy array.
:param py::buffer data: Python buffer or numpy array
:rtype instruction
.. py:method:: add_parameter(name, shape)
Adds a parameter to the module with provided name and shape.
:param str name: name of the parameter.
:param shape shape: shape of the parameter.
:rtype instruction
.. py:method:: add_return(args)
Adds a return instruction into the module.
:param list[instruction] args: instruction arguments which need to be returned from the module.
:rtype instruction
program
-------
......@@ -135,21 +178,27 @@ program
.. py:method:: clone()
Make a copy of the program
Make a copy of the program.
:rtype: program
.. py:method:: get_parameter_names()
Get all the input arguments' or parameters' names to the program as a list.
:rtype list[str]
.. py:method:: get_parameter_shapes()
Get the shapes of all the input parameters in the program.
:rtype: dict[str, shape]
.. py:method:: get_shape()
.. py:method:: get_output_shapes()
Get the shape of the final output of the program.
Get the shapes of the final outputs of the program.
:rtype: shape
:rtype: list[shape]
.. py:method:: compile(t, offload_copy=True, fast_math=True)
......@@ -159,6 +208,19 @@ program
:param bool offload_copy: For targets with offloaded memory(such as the gpu), this will insert instructions during compilation to copy the input parameters to the offloaded memory and to copy the final result from the offloaded memory back to main memory.
:param bool fast_math: Optimize math functions to use faster approximate versions. There may be slight accuracy degredation when enabled.
.. py:method:: get_main_module()
Get main module of the program.
:rtype module
.. py:method:: create_module(name)
Create and add a module of provided name into the program.
:param str name : name of the new module.
:rtype module
.. py:method:: run(params)
Run the program.
......@@ -167,7 +229,11 @@ program
:type params: dict[str, argument]
:return: The result of the last instruction.
:rtype: argument
:rtype: list[argument]
.. py:method:: sort()
Sort the modules of the program such that instructions appear in topologically sorted order.
.. py:function:: quantize_fp16(prog, ins_names=["all"])
......@@ -190,10 +256,22 @@ program
:type ins_names: list[str]
op
--
.. py::class:: op(name, kwargs)
Construct an operation with name and arguments.
:param str name : name of the operation, must be supported by MIGraphX.
:param dict[str, any] kwargs: arguments to the operation.
:rtype operation
parse_onnx
----------
.. py:function:: parse_onnx(filename, default_dim_value=1, map_input_dims={}, skip_unknown_operators=false, print_program_on_error=false)
.. py:function:: parse_onnx(filename, default_dim_value=1, map_input_dims={}, skip_unknown_operators=false, print_program_on_error=false, max_loop_iterations=10)
Load and parse an onnx file.
......@@ -202,20 +280,21 @@ parse_onnx
:param str map_input_dims: Explicitly specify the dims of an input.
:param str skip_unknown_operators: Continue parsing onnx file if an unknown operator is found.
:param str print_program_on_error: Print program if an error occurs.
:param int max_loop_iterations: Maximum iteration number for the loop operator.
:rtype: program
parse_tf
--------
.. py:function:: parse_tf(filename, is_nhwc=True, batch_size=1)
.. py:function:: parse_tf(filename, is_nhwc=True, batch_size=1, map_input_dims=dict(), output_names=[])
Load and parse an tensorflow protobuf file file.
:param str filename: Path to file.
:param bool is_nhwc: Use nhwc as default format.
:param str batch_size: default batch size to use (if not specified in protobuf).
:param dict[str, list[int]] map_input_dims: Optional arg to explictly specify dimensions of the inputs.
:param list[str] output_names: Optional argument specify names of the output nodes.
:rtype: program
load
......@@ -223,7 +302,7 @@ load
.. py:function:: load(filename, format='msgpack')
Load a MIGraphX program
Load a MIGraphX program.
:param str filename: Path to file.
:param str format: Format of file. Valid options are msgpack or json.
......@@ -235,7 +314,7 @@ save
.. py:function:: save(p, filename, format='msgpack')
Save a MIGraphX program
Save a MIGraphX program.
:param program p: Program to save.
:param str filename: Path to file.
......
......@@ -15,7 +15,7 @@ p = parse_onnx(input_file, options);
```
## Saving
An instantiated migraphx::program object can then be serialized to MessagePack (.msgpack) format and saved so that it can be loaded for future uses.
An instantiated migraphx::program object can then be serialized to MessagePack (.mxr) format and saved so that it can be loaded for future uses.
A program can be saved with either of the following:
```
......
......@@ -77,7 +77,7 @@ int main(int argc, char** argv)
std::cout << "Saving program..." << std::endl;
std::string output_file;
output_file = save_arg == nullptr ? "out" : save_arg;
output_file.append(".msgpack");
output_file.append(".mxr");
migraphx::file_options options;
options.set_file_format("msgpack");
......
......@@ -62,7 +62,7 @@
"metadata": {},
"outputs": [],
"source": [
"!wget -nc https://github.com/onnx/models/raw/master/text/machine_comprehension/bert-squad/model/bertsquad-10.onnx"
"!wget -nc https://github.com/onnx/models/raw/main/text/machine_comprehension/bert-squad/model/bertsquad-10.onnx"
]
},
{
......
......@@ -23,7 +23,7 @@ unzip uncased_L-12_H-768_A-12.zip
```
5) Get BERT ONNX model (bertsquad-10.onnx):
```
wget https://github.com/onnx/models/raw/master/text/machine_comprehension/bert-squad/model/bertsquad-10.onnx
wget https://github.com/onnx/models/raw/main/text/machine_comprehension/bert-squad/model/bertsquad-10.onnx
```
6) Run the inference, it will compile and run the model on three questions and small data provided in `inputs.json`:
```
......
tensorflow==2.5.2
tensorflow==2.7.2
onnxruntime
tokenizers
\ No newline at end of file
# Modifications Copyright (C) 2022, Advanced Micro Devices, Inc. All rights reserved
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
......
......@@ -106,8 +106,8 @@
"outputs": [],
"source": [
"if not path.exists(\"./resnet50.onnx\"):\n",
" !wget https://github.com/onnx/models/blob/master/vision/classification/resnet/model/resnet50-v2-7.onnx?raw=true\n",
" !mv 'resnet50-v2-7.onnx?raw=true' resnet50.onnx"
" !wget https://github.com/onnx/models/raw/main/vision/classification/resnet/model/resnet50-v2-7.onnx",
" !mv resnet50-v2-7.onnx resnet50.onnx"
]
},
{
......
......@@ -24,16 +24,16 @@
"import os.path\n",
"\n",
"if not os.path.exists(\"./utilities/coco.names\"):\n",
" !wget https://github.com/onnx/models/raw/master/vision/object_detection_segmentation/yolov4/dependencies/coco.names -P ./utilities/\n",
" !wget https://github.com/onnx/models/raw/main/vision/object_detection_segmentation/yolov4/dependencies/coco.names -P ./utilities/\n",
"if not os.path.exists(\"./utilities/yolov4_anchors.txt\"):\n",
" !wget https://github.com/onnx/models/raw/master/vision/object_detection_segmentation/yolov4/dependencies/yolov4_anchors.txt -P ./utilities/\n",
" !wget https://github.com/onnx/models/raw/main/vision/object_detection_segmentation/yolov4/dependencies/yolov4_anchors.txt -P ./utilities/\n",
"if not os.path.exists(\"./utilities/input.jpg\"):\n",
" # The image used is from the COCO dataset (https://cocodataset.org/#explore)\n",
" # Other images can be tested by replacing the link below\n",
" image_link = \"https://farm3.staticflickr.com/2009/2306189268_88cc86b30f_z.jpg\"\n",
" !wget -O ./utilities/input.jpg $image_link\n",
"if not os.path.exists(\"./utilities/yolov4.onnx\"):\n",
" !wget https://github.com/onnx/models/raw/master/vision/object_detection_segmentation/yolov4/model/yolov4.onnx -P ./utilities/"
" !wget https://github.com/onnx/models/raw/main/vision/object_detection_segmentation/yolov4/model/yolov4.onnx -P ./utilities/"
]
},
{
......@@ -50,10 +50,10 @@
"metadata": {},
"outputs": [],
"source": [
"if not os.path.exists(\"yolov4_fp16.msgpack\"):\n",
" !/opt/rocm/bin/migraphx-driver compile ./utilities/yolov4.onnx --gpu --enable-offload-copy --fp16ref --binary -o yolov4_fp16.msgpack\n",
"if not os.path.exists(\"yolov4.msgpack\"):\n",
" !/opt/rocm/bin/migraphx-driver compile ./utilities/yolov4.onnx --gpu --enable-offload-copy --binary -o yolov4.msgpack"
"if not os.path.exists(\"yolov4_fp16.mxr\"):\n",
" !/opt/rocm/bin/migraphx-driver compile ./utilities/yolov4.onnx --gpu --enable-offload-copy --fp16ref --binary -o yolov4_fp16.mxr\n",
"if not os.path.exists(\"yolov4.mxr\"):\n",
" !/opt/rocm/bin/migraphx-driver compile ./utilities/yolov4.onnx --gpu --enable-offload-copy --binary -o yolov4.mxr"
]
},
{
......@@ -115,8 +115,8 @@
"outputs": [],
"source": [
"# Load serialized model (either single- or half-precision)\n",
"model = migraphx.load(\"yolov4.msgpack\", format=\"msgpack\")\n",
"#model = migraphx.load(\"yolov4_fp16.msgpack\", format=\"msgpack\")\n",
"model = migraphx.load(\"yolov4.mxr\", format=\"msgpack\")\n",
"#model = migraphx.load(\"yolov4_fp16.mxr\", format=\"msgpack\")\n",
"\n",
"# Get the name of the input parameter and convert image data to an MIGraphX argument\n",
"input_name = next(iter(model.get_parameter_shapes()))\n",
......@@ -192,4 +192,4 @@
},
"nbformat": 4,
"nbformat_minor": 2
}
\ No newline at end of file
}
FROM ubuntu:18.04
FROM ubuntu:20.04
ARG PREFIX=/usr/local
......@@ -6,13 +6,13 @@ ARG PREFIX=/usr/local
RUN dpkg --add-architecture i386
# Add rocm repository
RUN sh -c 'echo deb [arch=amd64 trusted=yes] http://repo.radeon.com/rocm/apt/4.5/ ubuntu main > /etc/apt/sources.list.d/rocm.list'
RUN sh -c 'echo deb [arch=amd64 trusted=yes] http://repo.radeon.com/rocm/apt/5.0.2/ ubuntu main > /etc/apt/sources.list.d/rocm.list'
# Install dependencies
RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-unauthenticated \
apt-utils \
build-essential \
clang-format-5.0 \
clang-format-10 \
cmake \
curl \
doxygen \
......@@ -20,9 +20,6 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive apt-get install -y --allow-
git \
lcov \
pkg-config \
python \
python-dev \
python-pip \
python3 \
python3-dev \
python3-pip \
......
......@@ -38,6 +38,7 @@ add_library(migraphx
msgpack.cpp
normalize_attributes.cpp
normalize_ops.cpp
op_enums.cpp
operation.cpp
opt/memory_coloring.cpp
opt/memory_coloring_impl.cpp
......@@ -108,12 +109,14 @@ register_migraphx_ops(
flatten
floor
gather
gathernd
get_tuple_elem
greater
gru
identity
if_op
im2col
isnan
leaky_relu
less
load
......@@ -160,7 +163,12 @@ register_migraphx_ops(
round
rsqrt
scalar
scatter
scatter_add
scatter_mul
scatter_none
scatternd_add
scatternd_mul
scatternd_none
sigmoid
sign
sinh
......@@ -211,7 +219,6 @@ target_link_libraries(migraphx PRIVATE msgpackc-cxx)
target_link_libraries(migraphx INTERFACE $<BUILD_INTERFACE:msgpackc-cxx>)
add_library(migraphx_all_targets INTERFACE)
target_link_libraries(migraphx_all_targets INTERFACE migraphx_ref)
set(PACKAGE_DEPENDS)
......@@ -222,6 +229,7 @@ add_subdirectory(tf)
add_subdirectory(py)
add_subdirectory(targets/ref)
target_link_libraries(migraphx_all_targets INTERFACE migraphx_ref)
if(MIGRAPHX_ENABLE_CPU)
add_subdirectory(targets/cpu)
target_link_libraries(migraphx_all_targets INTERFACE migraphx_cpu)
......@@ -239,7 +247,7 @@ if(HAVE_HALF_EXPR)
endif()
rocm_export_targets(
TARGETS migraphx::migraphx migraphx_all_targets
TARGETS migraphx::migraphx_c
NAMESPACE migraphx::
DEPENDS
Threads
......
......@@ -8,9 +8,9 @@
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
void adjust_allocation::apply(module& p) const
void adjust_allocation::apply(module& m) const
{
for(auto ins : iterator_for(p))
for(auto ins : iterator_for(m))
{
// skip instruction with no input
if(ins->inputs().empty())
......@@ -27,13 +27,13 @@ void adjust_allocation::apply(module& p) const
// of the instruction, reallocate and replace the previous one
if(alias_ins->get_shape() == ins->get_shape())
continue;
auto alloc_ins = p.insert_instruction(ins, model.allocate(ins->get_shape()));
p.replace_instruction(alias_ins, alloc_ins);
auto alloc_ins = m.insert_instruction(ins, model.allocate(ins->get_shape()));
m.replace_instruction(alias_ins, alloc_ins);
// If the memory is an output parameter then copy the memory to the parameter
if(alias_ins->name() == "@param")
{
auto copy = p.insert_instruction(std::next(ins), make_op(model.copy()), ins, alias_ins);
auto tail = range(std::next(copy), p.end());
auto copy = m.insert_instruction(std::next(ins), make_op(model.copy()), ins, alias_ins);
auto tail = range(std::next(copy), m.end());
for(auto i : iterator_for(tail))
{
if(contains(i->inputs(), ins))
......
......@@ -14,31 +14,31 @@ bool happens_before(const std::vector<std::size_t>& e1, const std::vector<std::s
not std::equal(e1.begin(), e1.end(), e2.begin(), e2.end(), std::greater_equal<>{});
}
std::vector<stream_race> analyze_streams(const module& p, const stream_model& m)
std::vector<stream_race> analyze_streams(const module& m, const stream_model& strmm)
{
using vector_clock = std::vector<std::size_t>;
std::vector<stream_race> races;
auto nstream = m.get_nstream();
auto nstream = strmm.get_nstream();
std::vector<vector_clock> vclock(nstream, vector_clock(nstream));
std::unordered_map<instruction_ref, vector_clock> timestamp;
std::unordered_map<std::size_t, vector_clock> events;
for(auto ins : iterator_for(p))
for(auto ins : iterator_for(m))
{
if(not m.has_stream(ins))
if(not strmm.has_stream(ins))
continue;
std::size_t s = m.get_stream(ins);
std::size_t s = strmm.get_stream(ins);
assert(s < nstream);
assert(vclock.size() == nstream);
assert(vclock[s].size() == nstream);
if(m.is_record(ins))
if(strmm.is_record(ins))
{
vclock[s][s]++;
auto event = m.get_event_id(ins);
auto event = strmm.get_event_id(ins);
events[event] = vclock[s];
}
else if(m.is_wait(ins))
else if(strmm.is_wait(ins))
{
auto event = m.get_event_id(ins);
auto event = strmm.get_event_id(ins);
if(not contains(events, event))
MIGRAPHX_THROW("Event is waited on before being recorded: " +
std::to_string(event));
......@@ -57,21 +57,21 @@ std::vector<stream_race> analyze_streams(const module& p, const stream_model& m)
}
timestamp[ins] = vclock[s];
}
for(auto ins : iterator_for(p))
for(auto ins : iterator_for(m))
{
if(not m.has_stream(ins))
if(not strmm.has_stream(ins))
continue;
if(ins->inputs().empty())
continue;
std::size_t s = m.get_stream(ins);
std::size_t s = strmm.get_stream(ins);
// Find inputs from different streams
std::vector<instruction_ref> inputs;
fix([&](auto self, auto start) {
for(auto input : start->inputs())
{
if(not m.has_stream(input))
if(not strmm.has_stream(input))
self(input);
else if(m.get_stream(input) != s)
else if(strmm.get_stream(input) != s)
inputs.push_back(input);
}
})(ins);
......
This diff is collapsed.
......@@ -25,7 +25,8 @@ extern "C" {
#endif
// return code, more to be added later
typedef enum {
typedef enum
{
migraphx_status_success = 0,
migraphx_status_bad_param = 1,
migraphx_status_unknown_target = 3,
......@@ -35,7 +36,8 @@ typedef enum {
#define MIGRAPHX_SHAPE_GENERATE_ENUM_TYPES(x, t) migraphx_shape_##x,
/// An enum to represent the different data type inputs
typedef enum {
typedef enum
{
migraphx_shape_tuple_type,
MIGRAPHX_SHAPE_VISIT_TYPES(MIGRAPHX_SHAPE_GENERATE_ENUM_TYPES)
} migraphx_shape_datatype_t;
......@@ -62,6 +64,15 @@ typedef const struct migraphx_arguments* const_migraphx_arguments_t;
typedef struct migraphx_shapes* migraphx_shapes_t;
typedef const struct migraphx_shapes* const_migraphx_shapes_t;
typedef struct migraphx_instruction* migraphx_instruction_t;
typedef const struct migraphx_instruction* const_migraphx_instruction_t;
typedef struct migraphx_instructions* migraphx_instructions_t;
typedef const struct migraphx_instructions* const_migraphx_instructions_t;
typedef struct migraphx_modules* migraphx_modules_t;
typedef const struct migraphx_modules* const_migraphx_modules_t;
typedef struct migraphx_module* migraphx_module_t;
typedef const struct migraphx_module* const_migraphx_module_t;
......@@ -89,8 +100,24 @@ typedef const struct migraphx_quantize_op_names* const_migraphx_quantize_op_name
typedef struct migraphx_quantize_int8_options* migraphx_quantize_int8_options_t;
typedef const struct migraphx_quantize_int8_options* const_migraphx_quantize_int8_options_t;
typedef struct migraphx_context* migraphx_context_t;
typedef const struct migraphx_context* const_migraphx_context_t;
typedef struct migraphx_experimental_custom_op* migraphx_experimental_custom_op_t;
typedef const struct migraphx_experimental_custom_op* const_migraphx_experimental_custom_op_t;
typedef migraphx_status (*migraphx_experimental_custom_op_compute_shape)(migraphx_shape_t out,
void* obj,
migraphx_shapes_t inputs);
typedef migraphx_status (*migraphx_experimental_custom_op_copy)(void** out, void* input);
typedef migraphx_status (*migraphx_experimental_custom_op_delete)(void* input);
migraphx_status migraphx_shape_destroy(migraphx_shape_t shape);
migraphx_status migraphx_shape_assign_to(migraphx_shape_t output, const_migraphx_shape_t input);
migraphx_status migraphx_shape_create(migraphx_shape_t* shape,
migraphx_shape_datatype_t type,
size_t* lengths,
......@@ -121,6 +148,9 @@ migraphx_shape_equal(bool* out, const_migraphx_shape_t shape, const_migraphx_sha
migraphx_status migraphx_argument_destroy(migraphx_argument_t argument);
migraphx_status migraphx_argument_assign_to(migraphx_argument_t output,
const_migraphx_argument_t input);
migraphx_status
migraphx_argument_create(migraphx_argument_t* argument, const_migraphx_shape_t shape, void* buffer);
......@@ -137,11 +167,17 @@ migraphx_argument_generate(migraphx_argument_t* out, const_migraphx_shape_t s, s
migraphx_status migraphx_target_destroy(migraphx_target_t target);
migraphx_status migraphx_target_assign_to(migraphx_target_t output, const_migraphx_target_t input);
migraphx_status migraphx_target_create(migraphx_target_t* target, const char* name);
migraphx_status migraphx_program_parameter_shapes_destroy(
migraphx_program_parameter_shapes_t program_parameter_shapes);
migraphx_status
migraphx_program_parameter_shapes_assign_to(migraphx_program_parameter_shapes_t output,
const_migraphx_program_parameter_shapes_t input);
migraphx_status migraphx_program_parameter_shapes_size(
size_t* out, migraphx_program_parameter_shapes_t program_parameter_shapes);
......@@ -156,6 +192,9 @@ migraphx_status migraphx_program_parameter_shapes_names(
migraphx_status
migraphx_program_parameters_destroy(migraphx_program_parameters_t program_parameters);
migraphx_status migraphx_program_parameters_assign_to(migraphx_program_parameters_t output,
const_migraphx_program_parameters_t input);
migraphx_status
migraphx_program_parameters_create(migraphx_program_parameters_t* program_parameters);
......@@ -165,6 +204,9 @@ migraphx_status migraphx_program_parameters_add(migraphx_program_parameters_t pr
migraphx_status migraphx_arguments_destroy(migraphx_arguments_t arguments);
migraphx_status migraphx_arguments_assign_to(migraphx_arguments_t output,
const_migraphx_arguments_t input);
migraphx_status migraphx_arguments_size(size_t* out, migraphx_arguments_t arguments);
migraphx_status
......@@ -172,18 +214,78 @@ migraphx_arguments_get(const_migraphx_argument_t* out, migraphx_arguments_t argu
migraphx_status migraphx_shapes_destroy(migraphx_shapes_t shapes);
migraphx_status migraphx_shapes_assign_to(migraphx_shapes_t output, const_migraphx_shapes_t input);
migraphx_status migraphx_shapes_size(size_t* out, migraphx_shapes_t shapes);
migraphx_status
migraphx_shapes_get(const_migraphx_shape_t* out, migraphx_shapes_t shapes, size_t idx);
migraphx_status migraphx_instruction_destroy(migraphx_instruction_t instruction);
migraphx_status migraphx_instruction_assign_to(migraphx_instruction_t output,
const_migraphx_instruction_t input);
migraphx_status migraphx_instructions_destroy(migraphx_instructions_t instructions);
migraphx_status migraphx_instructions_assign_to(migraphx_instructions_t output,
const_migraphx_instructions_t input);
migraphx_status migraphx_instructions_create(migraphx_instructions_t* instructions,
const_migraphx_instruction_t* ptr,
size_t size);
migraphx_status migraphx_modules_destroy(migraphx_modules_t modules);
migraphx_status migraphx_modules_assign_to(migraphx_modules_t output,
const_migraphx_modules_t input);
migraphx_status
migraphx_modules_create(migraphx_modules_t* modules, migraphx_module_t* ptr, size_t size);
migraphx_status migraphx_module_create(migraphx_module_t* module, char* name);
migraphx_status migraphx_module_print(const_migraphx_module_t module);
migraphx_status migraphx_module_add_instruction(migraphx_instruction_t* out,
migraphx_module_t module,
migraphx_operation_t op,
migraphx_instructions_t args);
migraphx_status migraphx_module_add_instruction_with_mod_args(migraphx_instruction_t* out,
migraphx_module_t module,
migraphx_operation_t op,
migraphx_instructions_t args,
migraphx_modules_t module_refs);
migraphx_status migraphx_module_add_literal(migraphx_instruction_t* out,
migraphx_module_t module,
const_migraphx_shape_t shape,
const char* buffer);
migraphx_status migraphx_module_add_parameter(migraphx_instruction_t* out,
migraphx_module_t module,
const char* name,
const_migraphx_shape_t shape);
migraphx_status migraphx_module_add_return(migraphx_instruction_t* out,
migraphx_module_t module,
migraphx_instructions_t args);
migraphx_status migraphx_program_destroy(migraphx_program_t program);
migraphx_status migraphx_program_assign_to(migraphx_program_t output,
const_migraphx_program_t input);
migraphx_status migraphx_program_create(migraphx_program_t* program);
migraphx_status migraphx_program_get_main_module(migraphx_module_t* out,
migraphx_program_t program);
migraphx_status migraphx_program_create_module(migraphx_module_t* out,
migraphx_program_t program,
const char* name);
migraphx_status migraphx_program_compile(migraphx_program_t program,
migraphx_target_t target,
migraphx_compile_options_t options);
......@@ -205,8 +307,14 @@ migraphx_status migraphx_program_run(migraphx_arguments_t* out,
migraphx_status
migraphx_program_equal(bool* out, const_migraphx_program_t program, const_migraphx_program_t x);
migraphx_status migraphx_program_experimental_get_context(migraphx_context_t* out,
const_migraphx_program_t program);
migraphx_status migraphx_operation_destroy(migraphx_operation_t operation);
migraphx_status migraphx_operation_assign_to(migraphx_operation_t output,
const_migraphx_operation_t input);
migraphx_status migraphx_operation_create(migraphx_operation_t* operation,
const char* name,
const char* attributes,
......@@ -222,6 +330,9 @@ migraphx_save(migraphx_program_t p, const char* name, migraphx_file_options_t op
migraphx_status migraphx_onnx_options_destroy(migraphx_onnx_options_t onnx_options);
migraphx_status migraphx_onnx_options_assign_to(migraphx_onnx_options_t output,
const_migraphx_onnx_options_t input);
migraphx_status migraphx_onnx_options_create(migraphx_onnx_options_t* onnx_options);
migraphx_status migraphx_onnx_options_set_input_parameter_shape(
......@@ -236,6 +347,9 @@ migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_o
migraphx_status migraphx_file_options_destroy(migraphx_file_options_t file_options);
migraphx_status migraphx_file_options_assign_to(migraphx_file_options_t output,
const_migraphx_file_options_t input);
migraphx_status migraphx_file_options_create(migraphx_file_options_t* file_options);
migraphx_status migraphx_file_options_set_file_format(migraphx_file_options_t file_options,
......@@ -243,6 +357,9 @@ migraphx_status migraphx_file_options_set_file_format(migraphx_file_options_t fi
migraphx_status migraphx_compile_options_destroy(migraphx_compile_options_t compile_options);
migraphx_status migraphx_compile_options_assign_to(migraphx_compile_options_t output,
const_migraphx_compile_options_t input);
migraphx_status migraphx_compile_options_create(migraphx_compile_options_t* compile_options);
migraphx_status
......@@ -261,6 +378,9 @@ migraphx_status migraphx_parse_onnx_buffer(migraphx_program_t* out,
migraphx_status migraphx_tf_options_destroy(migraphx_tf_options_t tf_options);
migraphx_status migraphx_tf_options_assign_to(migraphx_tf_options_t output,
const_migraphx_tf_options_t input);
migraphx_status migraphx_tf_options_create(migraphx_tf_options_t* tf_options);
migraphx_status migraphx_tf_options_set_nhwc(migraphx_tf_options_t tf_options, bool is_nhwc);
......@@ -282,6 +402,9 @@ migraphx_parse_tf(migraphx_program_t* out, const char* name, migraphx_tf_options
migraphx_status migraphx_quantize_op_names_destroy(migraphx_quantize_op_names_t quantize_op_names);
migraphx_status migraphx_quantize_op_names_assign_to(migraphx_quantize_op_names_t output,
const_migraphx_quantize_op_names_t input);
migraphx_status migraphx_quantize_op_names_create(migraphx_quantize_op_names_t* quantize_op_names);
migraphx_status migraphx_quantize_op_names_add(migraphx_quantize_op_names_t quantize_op_names,
......@@ -295,6 +418,10 @@ migraphx_status migraphx_quantize_fp16(migraphx_program_t prog);
migraphx_status
migraphx_quantize_int8_options_destroy(migraphx_quantize_int8_options_t quantize_int8_options);
migraphx_status
migraphx_quantize_int8_options_assign_to(migraphx_quantize_int8_options_t output,
const_migraphx_quantize_int8_options_t input);
migraphx_status
migraphx_quantize_int8_options_create(migraphx_quantize_int8_options_t* quantize_int8_options);
......@@ -309,6 +436,30 @@ migraphx_status migraphx_quantize_int8(migraphx_program_t prog,
migraphx_target_t target,
migraphx_quantize_int8_options_t options);
migraphx_status migraphx_context_finish(const_migraphx_context_t context);
migraphx_status migraphx_context_get_queue(void** out, migraphx_context_t context);
migraphx_status
migraphx_experimental_custom_op_destroy(migraphx_experimental_custom_op_t experimental_custom_op);
migraphx_status
migraphx_experimental_custom_op_assign_to(migraphx_experimental_custom_op_t output,
const_migraphx_experimental_custom_op_t input);
migraphx_status
migraphx_experimental_custom_op_create(migraphx_experimental_custom_op_t* experimental_custom_op,
void* obj,
migraphx_experimental_custom_op_copy c,
migraphx_experimental_custom_op_delete d,
const char* name);
migraphx_status migraphx_experimental_custom_op_set_compute_shape(
migraphx_experimental_custom_op_t obj, migraphx_experimental_custom_op_compute_shape input);
migraphx_status
migraphx_experimental_custom_op_register(migraphx_experimental_custom_op_t experimental_custom_op);
#ifdef __cplusplus
}
#endif
......
This diff is collapsed.
......@@ -178,14 +178,58 @@ def shapes(h):
returns='const migraphx::shape&')
@api.handle('migraphx_instruction', 'migraphx::instruction_ref')
def instruction(h):
pass
@api.handle('migraphx_instructions', 'std::vector<migraphx::instruction_ref>')
def instructions(h):
h.constructor(
'create',
api.params(ptr='const_migraphx_instruction_t*', size='size_t'),
fname='migraphx::to_obj_vector<const_migraphx_instruction_t>')
@api.handle('migraphx_modules', 'std::vector<migraphx::module*>')
def modules(h):
h.constructor('create',
api.params(ptr='migraphx_module_t*', size='size_t'),
fname='migraphx::to_objptr_vector<migraphx::module*>')
@auto_handle(ref=True)
def module(h):
h.constructor('create', api.params(name='std::string'))
h.method('print', invoke='migraphx::print_module($@)', const=True)
h.method('add_instruction',
api.params(op='migraphx::operation',
args='std::vector<migraphx::instruction_ref>'),
returns='migraphx::instruction_ref')
h.method('add_instruction_with_mod_args',
api.params(op='migraphx::operation',
args='std::vector<migraphx::instruction_ref>',
module_refs='std::vector<migraphx::module*>'),
fname='add_instruction',
returns='migraphx::instruction_ref')
h.method('add_literal',
api.params(shape='const migraphx::shape&', buffer='const char*'),
returns='migraphx::instruction_ref')
h.method('add_parameter',
api.params(name='const char*', shape='const migraphx::shape&'),
returns='migraphx::instruction_ref')
h.method('add_return',
api.params(args='std::vector<migraphx::instruction_ref>'),
returns='migraphx::instruction_ref')
@auto_handle()
def program(h):
h.constructor('create')
h.method('get_main_module', returns='migraphx::module*')
h.method('create_module',
api.params(name='const char*'),
returns='migraphx::module*')
h.method(
'compile',
api.params(target='migraphx::target',
......@@ -207,6 +251,10 @@ def program(h):
invoke='migraphx::equal($@)',
returns='bool',
const=True)
h.method('experimental_get_context',
invoke='migraphx::get_context($@)',
const=True,
returns='migraphx::context')
@auto_handle()
......@@ -353,3 +401,19 @@ api.add_function('migraphx_quantize_int8',
target='migraphx::target',
options='migraphx::quantize_int8_options'),
fname='migraphx::quantize_int8_wrap')
@auto_handle(ref=True)
def context(h):
h.method('finish', const=True)
h.method('get_queue', returns='void*', fname='get_queue().unsafe_get')
@api.interface('migraphx_experimental_custom_op',
'migraphx::experimental_custom_op')
def experimental_custom_op(h):
h.constructor('create', api.params(name='const char*'))
h.virtual('compute_shape',
api.params(inputs='std::vector<migraphx::shape>'),
returns='migraphx::shape')
h.method('register', invoke='migraphx::register_custom_op($@)')
......@@ -29,7 +29,6 @@ void argument::assign_buffer(std::function<char*()> d)
// Collect all shapes
std::unordered_map<std::size_t, shape> shapes;
{
// cppcheck-suppress variableScope
std::size_t i = 0;
fix([&](auto self, auto ss) {
if(ss.sub_shapes().empty())
......@@ -60,7 +59,6 @@ void argument::assign_buffer(std::function<char*()> d)
}
assert(offset == s.bytes());
// cppcheck-suppress variableScope
std::size_t i = 0;
m_data = fix<data_t>([&](auto self, auto ss) {
data_t result;
......@@ -106,7 +104,11 @@ bool argument::empty() const { return not m_data.get and m_data.sub.empty(); }
const shape& argument::get_shape() const { return this->m_shape; }
argument argument::reshape(const shape& s) const { return {s, this->m_data}; }
argument argument::reshape(const shape& s) const
{
assert(s.element_space() <= this->get_shape().element_space());
return {s, this->m_data};
}
argument::data_t argument::data_t::share() const
{
......
......@@ -8,15 +8,42 @@
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
void auto_contiguous::apply(module& p) const
void auto_contiguous::apply(module& m) const
{
for(auto ins : iterator_for(p))
std::string key = "require_std_shape";
for(auto ins : reverse_iterator_for(m))
{
auto&& attr = ins->get_operator().attributes();
if((attr.get(key, false)))
{
auto args = ins->inputs();
auto new_args = args;
std::transform(args.begin(), args.end(), new_args.begin(), [&](auto in) {
if(in->name() == "contiguous")
{
return in;
}
return m.insert_instruction(ins, make_op("contiguous"), in);
});
if(new_args != args)
{
m.replace_instruction(ins, ins->get_operator(), new_args);
}
}
}
auto last = std::prev(m.end());
for(auto ins : iterator_for(m))
{
// for last instruction that is NOT a return
if(ins->outputs().empty() and ins != last)
continue;
shape s = ins->get_shape();
if(not s.standard() and s.elements() != 0)
{
auto c = p.insert_instruction(std::next(ins), make_op("contiguous"), ins);
p.replace_instruction(ins, c);
auto c = m.insert_instruction(std::next(ins), make_op("contiguous"), ins);
m.replace_instruction(ins, c);
}
}
}
......
......@@ -28,13 +28,20 @@ std::vector<char> src_compiler::compile(const std::vector<src_file>& srcs) const
{
params += " " + src.path.filename().string();
if(out.empty())
out = src.path.stem().string() + ".o";
out = src.path.stem().string() + out_ext;
}
}
params += " -o " + out;
td.execute(compiler, params);
if(not launcher.empty())
{
td.execute(launcher, compiler + " " + params);
}
else
{
td.execute(compiler, params);
}
auto out_path = td.path / out;
if(not fs::exists(out_path))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment