Commit 4a39a0f7 authored by Shucai Xiao's avatar Shucai Xiao
Browse files

Merge branch 'develop' of github.com:ROCmSoftwarePlatform/AMDMIGraphX into add-conv_bn_add-test

parents 5564172e bb827865
......@@ -73,26 +73,22 @@ migraphx_shape_datatype_t to_shape_type(shape::type_t t)
target get_target(const std::string& name) { return make_target(name); }
migraphx::compile_options to_compile_options(const migraphx_compile_options& options)
{
migraphx::compile_options result{};
result.offload_copy = options.offload_copy;
result.fast_math = options.fast_math;
return result;
}
void set_offload_copy(compile_options& options, bool value) { options.offload_copy = value; }
migraphx::file_options to_file_options(const migraphx_file_options& options)
{
migraphx::file_options result{};
result.format = options.format;
return result;
}
void set_fast_math(compile_options& options, bool value) { options.fast_math = value; }
void set_file_format(file_options& options, const char* format) { options.format = format; }
void set_default_dim_value(onnx_options& options, size_t value)
{
options.default_dim_value = value;
}
void set_default_loop_iterations(onnx_options& options, int64_t value)
{
options.max_loop_iterations = value;
}
void set_nhwc(tf_options& options, bool is_nhwc) { options.is_nhwc = is_nhwc; }
void set_default_dim_value(tf_options& options, size_t value) { options.batch_size = value; }
......@@ -320,6 +316,26 @@ struct migraphx_onnx_options
migraphx::onnx_options object;
};
extern "C" struct migraphx_file_options;
struct migraphx_file_options
{
template <class... Ts>
migraphx_file_options(Ts&&... xs) : object(std::forward<Ts>(xs)...)
{
}
migraphx::file_options object;
};
extern "C" struct migraphx_compile_options;
struct migraphx_compile_options
{
template <class... Ts>
migraphx_compile_options(Ts&&... xs) : object(std::forward<Ts>(xs)...)
{
}
migraphx::compile_options object;
};
extern "C" struct migraphx_tf_options;
struct migraphx_tf_options
{
......@@ -678,17 +694,16 @@ extern "C" migraphx_status migraphx_program_get_main_module(migraphx_module_t* o
extern "C" migraphx_status migraphx_program_compile(migraphx_program_t program,
migraphx_target_t target,
migraphx_compile_options* options)
migraphx_compile_options_t options)
{
return migraphx::try_([&] {
if(program == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter program: Null pointer");
if(target == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter target: Null pointer");
(program->object)
.compile((target->object),
(options == nullptr ? migraphx::compile_options{}
: migraphx::to_compile_options(*options)));
if(options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter options: Null pointer");
(program->object).compile((target->object), (options->object));
});
}
......@@ -786,25 +801,24 @@ migraphx_operation_name(char* out, size_t out_size, migraphx_operation_t operati
}
extern "C" migraphx_status
migraphx_load(migraphx_program_t* out, const char* name, migraphx_file_options* options)
migraphx_load(migraphx_program_t* out, const char* name, migraphx_file_options_t options)
{
return migraphx::try_([&] {
*out = allocate<migraphx_program_t>(migraphx::load(
(name),
(options == nullptr ? migraphx::file_options{} : migraphx::to_file_options(*options))));
if(options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter options: Null pointer");
*out = allocate<migraphx_program_t>(migraphx::load((name), (options->object)));
});
}
extern "C" migraphx_status
migraphx_save(migraphx_program_t p, const char* name, migraphx_file_options* options)
migraphx_save(migraphx_program_t p, const char* name, migraphx_file_options_t options)
{
return migraphx::try_([&] {
if(p == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter p: Null pointer");
migraphx::save(
(p->object),
(name),
(options == nullptr ? migraphx::file_options{} : migraphx::to_file_options(*options)));
if(options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter options: Null pointer");
migraphx::save((p->object), (name), (options->object));
});
}
......@@ -843,6 +857,76 @@ migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options
});
}
extern "C" migraphx_status
migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_options,
int64_t value)
{
return migraphx::try_([&] {
if(onnx_options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter onnx_options: Null pointer");
migraphx::set_default_loop_iterations((onnx_options->object), (value));
});
}
extern "C" migraphx_status migraphx_file_options_destroy(migraphx_file_options_t file_options)
{
return migraphx::try_([&] { destroy((file_options)); });
}
extern "C" migraphx_status migraphx_file_options_create(migraphx_file_options_t* file_options)
{
return migraphx::try_([&] {
*file_options = object_cast<migraphx_file_options_t>(allocate<migraphx::file_options>());
});
}
extern "C" migraphx_status
migraphx_file_options_set_file_format(migraphx_file_options_t file_options, const char* format)
{
return migraphx::try_([&] {
if(file_options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param, "Bad parameter file_options: Null pointer");
migraphx::set_file_format((file_options->object), (format));
});
}
extern "C" migraphx_status
migraphx_compile_options_destroy(migraphx_compile_options_t compile_options)
{
return migraphx::try_([&] { destroy((compile_options)); });
}
extern "C" migraphx_status
migraphx_compile_options_create(migraphx_compile_options_t* compile_options)
{
return migraphx::try_([&] {
*compile_options =
object_cast<migraphx_compile_options_t>(allocate<migraphx::compile_options>());
});
}
extern "C" migraphx_status
migraphx_compile_options_set_offload_copy(migraphx_compile_options_t compile_options, bool value)
{
return migraphx::try_([&] {
if(compile_options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param,
"Bad parameter compile_options: Null pointer");
migraphx::set_offload_copy((compile_options->object), (value));
});
}
extern "C" migraphx_status
migraphx_compile_options_set_fast_math(migraphx_compile_options_t compile_options, bool value)
{
return migraphx::try_([&] {
if(compile_options == nullptr)
MIGRAPHX_THROW(migraphx_status_bad_param,
"Bad parameter compile_options: Null pointer");
migraphx::set_fast_math((compile_options->object), (value));
});
}
extern "C" migraphx_status
migraphx_parse_onnx(migraphx_program_t* out, const char* name, migraphx_onnx_options_t options)
{
......
......@@ -41,26 +41,6 @@ typedef enum {
} migraphx_shape_datatype_t;
#undef MIGRAPHX_SHAPE_GENERATE_ENUM_TYPES
/// Options to be passed when compiling
typedef struct
{
/// For targets with offloaded memory(such as the gpu), this will insert
/// instructions during compilation to copy the input parameters to the
/// offloaded memory and to copy the final result from the offloaded
/// memory back to main memory.
bool offload_copy;
/// Optimize math functions to use faster approximate versions. There may
/// be slight accuracy degredation when enabled.
bool fast_math;
} migraphx_compile_options;
/// Options for saving and loading files
typedef struct
{
/// Format to be used for file. It can either be json or msgpack
const char* format;
} migraphx_file_options;
typedef struct migraphx_shape* migraphx_shape_t;
typedef const struct migraphx_shape* const_migraphx_shape_t;
......@@ -94,6 +74,12 @@ typedef const struct migraphx_operation* const_migraphx_operation_t;
typedef struct migraphx_onnx_options* migraphx_onnx_options_t;
typedef const struct migraphx_onnx_options* const_migraphx_onnx_options_t;
typedef struct migraphx_file_options* migraphx_file_options_t;
typedef const struct migraphx_file_options* const_migraphx_file_options_t;
typedef struct migraphx_compile_options* migraphx_compile_options_t;
typedef const struct migraphx_compile_options* const_migraphx_compile_options_t;
typedef struct migraphx_tf_options* migraphx_tf_options_t;
typedef const struct migraphx_tf_options* const_migraphx_tf_options_t;
......@@ -200,7 +186,7 @@ migraphx_status migraphx_program_get_main_module(migraphx_module_t* out,
migraphx_status migraphx_program_compile(migraphx_program_t program,
migraphx_target_t target,
migraphx_compile_options* options);
migraphx_compile_options_t options);
migraphx_status migraphx_program_get_parameter_shapes(migraphx_program_parameter_shapes_t* out,
migraphx_program_t program);
......@@ -228,10 +214,10 @@ migraphx_status migraphx_operation_create(migraphx_operation_t* operation,
migraphx_status migraphx_operation_name(char* out, size_t out_size, migraphx_operation_t operation);
migraphx_status
migraphx_load(migraphx_program_t* out, const char* name, migraphx_file_options* options);
migraphx_load(migraphx_program_t* out, const char* name, migraphx_file_options_t options);
migraphx_status
migraphx_save(migraphx_program_t p, const char* name, migraphx_file_options* options);
migraphx_save(migraphx_program_t p, const char* name, migraphx_file_options_t options);
migraphx_status migraphx_onnx_options_destroy(migraphx_onnx_options_t onnx_options);
......@@ -243,6 +229,27 @@ migraphx_status migraphx_onnx_options_set_input_parameter_shape(
migraphx_status migraphx_onnx_options_set_default_dim_value(migraphx_onnx_options_t onnx_options,
size_t value);
migraphx_status
migraphx_onnx_options_set_default_loop_iterations(migraphx_onnx_options_t onnx_options,
int64_t value);
migraphx_status migraphx_file_options_destroy(migraphx_file_options_t file_options);
migraphx_status migraphx_file_options_create(migraphx_file_options_t* file_options);
migraphx_status migraphx_file_options_set_file_format(migraphx_file_options_t file_options,
const char* format);
migraphx_status migraphx_compile_options_destroy(migraphx_compile_options_t compile_options);
migraphx_status migraphx_compile_options_create(migraphx_compile_options_t* compile_options);
migraphx_status
migraphx_compile_options_set_offload_copy(migraphx_compile_options_t compile_options, bool value);
migraphx_status migraphx_compile_options_set_fast_math(migraphx_compile_options_t compile_options,
bool value);
migraphx_status
migraphx_parse_onnx(migraphx_program_t* out, const char* name, migraphx_onnx_options_t options);
......
......@@ -494,6 +494,29 @@ struct module
void print() const { call(&migraphx_module_print, mm); }
};
struct compile_options : MIGRAPHX_HANDLE_BASE(compile_options)
{
compile_options() { this->make_handle(&migraphx_compile_options_create); }
compile_options(migraphx_compile_options* p, own) { this->set_handle(p, own()); }
/// For targets with offloaded memory(such as the gpu), this will insert
/// instructions during compilation to copy the input parameters to the
/// offloaded memory and to copy the final result from the offloaded
/// memory back to main memory.
void set_offload_copy(bool value = true)
{
call(&migraphx_compile_options_set_offload_copy, this->get_handle_ptr(), value);
}
/// Optimize math functions to use faster approximate versions. There may
/// be slight accuracy degredation when enabled.
void set_fast_math(bool value = true)
{
call(&migraphx_compile_options_set_fast_math, this->get_handle_ptr(), value);
}
};
/// A program represents the all computation graphs to be compiled and executed
struct program : MIGRAPHX_HANDLE_BASE(program)
{
......@@ -504,16 +527,21 @@ struct program : MIGRAPHX_HANDLE_BASE(program)
program(migraphx_program* p, borrow) { this->set_handle(p, borrow{}); }
/// Compile the program for a specific target to be ran on
void compile(const target& ptarget, migraphx_compile_options poptions) const
void compile(const target& ptarget, const compile_options& poptions) const
{
call(
&migraphx_program_compile, this->get_handle_ptr(), ptarget.get_handle_ptr(), &poptions);
call(&migraphx_program_compile,
this->get_handle_ptr(),
ptarget.get_handle_ptr(),
poptions.get_handle_ptr());
}
/// Compile the program for a specific target to be ran on
void compile(const target& ptarget) const
{
call(&migraphx_program_compile, this->get_handle_ptr(), ptarget.get_handle_ptr(), nullptr);
call(&migraphx_program_compile,
this->get_handle_ptr(),
ptarget.get_handle_ptr(),
migraphx::compile_options{}.get_handle_ptr());
}
/// Return the shapes for the input parameters
......@@ -584,28 +612,45 @@ struct operation : MIGRAPHX_HANDLE_BASE(operation)
}
};
// options for migraphx file format options
struct file_options : MIGRAPHX_HANDLE_BASE(file_options)
{
file_options() { this->make_handle(&migraphx_file_options_create); }
file_options(migraphx_file_options* p, own) { this->set_handle(p, own()); }
// set file format
void set_file_format(const char* format)
{
call(&migraphx_file_options_set_file_format, this->get_handle_ptr(), format);
}
};
/// Load a saved migraphx program from a file
inline program load(const char* filename, migraphx_file_options options)
inline program load(const char* filename, const file_options& options)
{
return program(make<migraphx_program>(&migraphx_load, filename, &options), own{});
return program(make<migraphx_program>(&migraphx_load, filename, options.get_handle_ptr()),
own{});
}
/// Load a saved migraphx program from a file
inline program load(const char* filename)
{
return program(make<migraphx_program>(&migraphx_load, filename, nullptr), own{});
return program(
make<migraphx_program>(&migraphx_load, filename, migraphx::file_options{}.get_handle_ptr()),
own{});
}
/// Save a program to a file
inline void save(const program& p, const char* filename, migraphx_file_options options)
inline void save(const program& p, const char* filename, const file_options& options)
{
call(&migraphx_save, p.get_handle_ptr(), filename, &options);
call(&migraphx_save, p.get_handle_ptr(), filename, options.get_handle_ptr());
}
/// Save a program to a file
inline void save(const program& p, const char* filename)
{
call(&migraphx_save, p.get_handle_ptr(), filename, nullptr);
call(&migraphx_save, p.get_handle_ptr(), filename, migraphx::file_options{}.get_handle_ptr());
}
/// Options for parsing onnx options
......@@ -630,6 +675,12 @@ struct onnx_options : MIGRAPHX_HANDLE_BASE(onnx_options)
{
call(&migraphx_onnx_options_set_default_dim_value, this->get_handle_ptr(), value);
}
/// Set default max iteration number for the loop operator
void set_default_loop_iterations(int64_t value)
{
call(&migraphx_onnx_options_set_default_loop_iterations, this->get_handle_ptr(), value);
}
};
/// Parse an onnx file into a migraphx program
......
......@@ -243,6 +243,30 @@ def onnx_options(h):
api.params(value='size_t'),
invoke='migraphx::set_default_dim_value($@)',
)
h.method(
'set_default_loop_iterations',
api.params(value='int64_t'),
invoke='migraphx::set_default_loop_iterations($@)',
)
@auto_handle()
def file_options(h):
h.constructor('create')
h.method('set_file_format',
api.params(format='const char*'),
invoke='migraphx::set_file_format($@)')
@auto_handle()
def compile_options(h):
h.constructor('create')
h.method('set_offload_copy',
api.params(value='bool'),
invoke='migraphx::set_offload_copy($@)')
h.method('set_fast_math',
api.params(value='bool'),
invoke='migraphx::set_fast_math($@)')
api.add_function('migraphx_parse_onnx',
......
#include <migraphx/instruction.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/common.hpp>
#include <migraphx/apply_alpha_beta.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
instruction_ref insert_apply_alpha_beta(module& m,
instruction_ref pos,
const std::vector<instruction_ref>& args,
const operation& op,
const literal& alpha,
const literal& beta)
{
auto a = args[0];
auto b = args[1];
auto input_type = a->get_shape().type();
if(!float_equal(alpha.at<float>(0), 1.0))
{
auto alpha_literal = m.add_literal(alpha);
a = insert_common_op(m, pos, migraphx::make_op("mul"), {alpha_literal, a});
if(a->get_shape().type() != input_type)
{
a = m.insert_instruction(pos, make_op("convert", {{"target_type", input_type}}), a);
}
}
auto op_res = m.insert_instruction(pos, op, a, b);
if(args.size() == 3)
{
if(not float_equal(beta.at<float>(0), 0.0) && args[2]->get_shape().elements() > 0)
{
auto out_lens = op_res->get_shape().lens();
auto c = args[2];
auto c_lens = c->get_shape().lens();
input_type = c->get_shape().type();
if(out_lens != c_lens)
{
c = m.insert_instruction(
pos, migraphx::make_op("multibroadcast", {{"out_lens", out_lens}}), args[2]);
}
auto beta_literal = m.add_literal(beta);
auto beta_c = insert_common_op(m, pos, migraphx::make_op("mul"), {c, beta_literal});
if(beta_c->get_shape().type() != input_type)
{
beta_c = m.insert_instruction(
pos, migraphx::make_op("convert", {{"target_type", input_type}}), beta_c);
}
return m.insert_instruction(pos, migraphx::make_op("add"), op_res, beta_c);
}
}
return op_res;
}
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
......@@ -8,7 +8,7 @@ inline namespace MIGRAPHX_INLINE_NS {
argument::argument(const shape& s) : m_shape(s)
{
auto buffer = make_shared_array<char>(s.bytes());
m_data = {[=]() mutable { return buffer.get(); }};
assign_buffer({[=]() mutable { return buffer.get(); }});
}
argument::argument(shape s, std::nullptr_t)
......@@ -18,10 +18,14 @@ argument::argument(shape s, std::nullptr_t)
argument::argument(const shape& s, const argument::data_t& d) : m_shape(s), m_data(d) {}
argument argument::load(const shape& s, char* buffer)
void argument::assign_buffer(std::function<char*()> d)
{
const shape& s = m_shape;
if(s.type() != shape::tuple_type)
return argument{s, buffer};
{
m_data = {std::move(d)};
return;
}
// Collect all shapes
std::unordered_map<std::size_t, shape> shapes;
{
......@@ -58,19 +62,22 @@ argument argument::load(const shape& s, char* buffer)
// cppcheck-suppress variableScope
std::size_t i = 0;
return fix<argument>([&](auto self, auto ss) {
m_data = fix<data_t>([&](auto self, auto ss) {
data_t result;
if(ss.sub_shapes().empty())
{
argument r{shapes[i], buffer + offsets[i]};
auto n = offsets[i];
result = {[d, n]() mutable { return d() + n; }};
i++;
return r;
return result;
}
std::vector<argument> subs;
std::vector<data_t> subs;
std::transform(ss.sub_shapes().begin(),
ss.sub_shapes().end(),
std::back_inserter(subs),
[&](auto child) { return self(child); });
return argument{subs};
result.sub = subs;
return result;
})(s);
}
......@@ -124,6 +131,14 @@ argument::data_t argument::data_t::from_args(const std::vector<argument>& args)
return result;
}
argument argument::copy() const
{
argument result{this->get_shape()};
auto* src = this->data();
std::copy(src, src + this->get_shape().bytes(), result.data());
return result;
}
argument argument::share() const { return {m_shape, m_data.share()}; }
std::vector<argument> argument::get_sub_objects() const
......
#include <migraphx/common.hpp>
#include <migraphx/module.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/algorithm.hpp>
#include <migraphx/stringutils.hpp>
#include <migraphx/instruction.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
// Example:
// s0 = (3,2,4,5) and s1 = (2,1,1)
//
// In this case we need to broadcast (:,1,1) portion of
// s1 plus broadcast the 1st dimension of s1
// giving output_lens = (3,2,4,5)
//
// Another example:
// s0 = (3,2,1,5) and s1 = (2,7,5)
// In this case we need to broadcast the (:,:,1:,:) axis
// of s0 plus the 1st dimension of s1 giving
// output_lens = (3,2,7,5)
std::vector<std::size_t> compute_broadcasted_lens(std::vector<std::size_t> s0,
std::vector<std::size_t> s1)
{
if(s0 == s1)
return s0;
if(s0.size() > s1.size())
s0.swap(s1);
std::vector<std::size_t> out_lens(s1);
auto offset = s1.size() - s0.size();
std::transform(
s0.begin(), s0.end(), s1.begin() + offset, out_lens.begin() + offset, [&](auto a, auto b) {
if(a != b and a != 1 and b != 1)
{
MIGRAPHX_THROW("COMPUTE_BROADCASTLEN: shape {" + to_string_range(s0) + "} and {" +
to_string_range(s1) + "} mismatch!");
}
return std::max(a, b);
});
return out_lens;
}
std::vector<std::size_t> compute_common_lens(const std::vector<shape>& shapes)
{
assert(not shapes.empty());
return transform_accumulate(shapes.begin() + 1,
shapes.end(),
shapes.front().lens(),
&compute_broadcasted_lens,
[](auto s) { return s.lens(); });
}
shape::type_t compute_common_type(shape::type_t t1, shape::type_t t2)
{
if(t1 == t2)
return t1;
shape::type_t result;
shape::visit(t1, [&](auto x) {
shape::visit(t2, [&](auto y) {
// Workaround broken warning on gcc 5
(void)x;
(void)y;
using type = std::common_type_t<decltype(x()), decltype(y())>;
result = shape::get_type<type>{};
});
});
return result;
}
shape::type_t compute_common_types(const std::vector<shape>& shapes)
{
assert(not shapes.empty());
return transform_accumulate(
shapes.begin() + 1, shapes.end(), shapes.front().type(), &compute_common_type, [&](auto s) {
return s.type();
});
}
shape common_shape(const std::vector<shape>& shapes)
{
if(shapes.empty())
return {};
return {compute_common_types(shapes), compute_common_lens(shapes)};
}
instruction_ref insert_common_op(module& m,
instruction_ref ins,
const operation& op,
std::vector<instruction_ref> inputs)
{
auto common = common_shape(to_shapes(inputs));
std::transform(inputs.begin(), inputs.end(), inputs.begin(), [&](auto input) {
if(input->get_shape().lens() != common.lens())
{
input = m.insert_instruction(
ins, make_op("multibroadcast", {{"out_lens", common.lens()}}), input);
}
if(input->get_shape().type() != common.type())
{
input = m.insert_instruction(
ins, make_op("convert", {{"target_type", common.type()}}), input);
}
return input;
});
return m.insert_instruction(ins, op, inputs);
}
instruction_ref add_common_op(module& m, const operation& op, std::vector<instruction_ref> inputs)
{
return insert_common_op(m, m.end(), op, std::move(inputs));
}
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
......@@ -32,7 +32,7 @@ std::vector<char> src_compiler::compile(const std::vector<src_file>& srcs) const
}
}
params += " -o" + out;
params += " -o " + out;
td.execute(compiler, params);
......
......@@ -6,6 +6,7 @@
#include <migraphx/errors.hpp>
#include <migraphx/ranges.hpp>
#include <migraphx/convert_to_json.hpp>
#include <migraphx/stringutils.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
......@@ -77,6 +78,18 @@ std::vector<token> json_tokenize(const std::string& s)
return ++start;
});
// Line comments
lexers.push_back([](const char* start, const char* end) {
if(*start == '#')
start++;
else if((start + 1) < end and start[0] == '/' and start[1] == '/')
start += 2;
else
return start;
return std::find_if(start, end, [&](char c) { return c == '\n'; });
});
// Whitespace
lexers.push_back(lex_while(&isspace));
// Punctation
......@@ -98,6 +111,8 @@ std::string convert_to_json(const std::string& str)
for(auto& token : tokens)
{
std::string s(token.first, token.second);
if(starts_with(s, "#") or starts_with(s, "//"))
continue;
if(std::isalpha(s.front()) != 0 and
not contains({"null", "nan", "true", "false", "inf"}, s))
{
......
......@@ -60,7 +60,8 @@ void dead_code_elimination::apply(module& m) const
leaf->clear_arguments();
assert(bidistance(m, last, leaf) < 0);
assert(leaf != ins);
m.move_instruction(leaf, m.end());
if(leaf->name() != "@param")
m.move_instruction(leaf, m.end());
for(auto arg : args)
self(arg);
}
......
#include <migraphx/decompose.hpp>
#include <migraphx/program.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/iterator_for.hpp>
#include <migraphx/functional.hpp>
#include <migraphx/ranges.hpp>
#include <migraphx/float_equal.hpp>
#include <migraphx/matcher.hpp>
#include <migraphx/op/dot.hpp>
#include <migraphx/make_op.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
namespace {
struct alpha_beta
{
float alpha = 0.0;
float beta = 0.0;
};
alpha_beta get_alpha_beta(const operation& op)
{
auto v = op.to_value();
return {v.at("alpha").to<float>(), v.at("beta").to<float>()};
}
struct find_dot_add
{
auto matcher() const { return match::name("dot", "quant_dot")(match::nargs(3)); }
void apply(module& p, const match::matcher_result& r) const
{
auto ins = r.result;
auto dot = get_alpha_beta(ins->get_operator());
auto a_ins = ins->inputs()[0];
auto b_ins = ins->inputs()[1];
if(not float_equal(dot.alpha, 1))
{
auto alpha = p.add_literal(literal{shape{a_ins->get_shape().type()}, {dot.alpha}});
auto alpha_broadcast = p.insert_instruction(
ins,
make_op("multibroadcast", {{"output_lens", a_ins->get_shape().lens()}}),
alpha);
a_ins = p.insert_instruction(ins, make_op("mul"), a_ins, alpha_broadcast);
}
auto dot_ins = p.insert_instruction(ins, make_op(ins->name(), {{"beta", 0}}), a_ins, b_ins);
auto c_ins = ins->inputs()[2];
if(not float_equal(dot.beta, 1))
{
auto beta = p.add_literal(literal{shape{c_ins->get_shape().type()}, {dot.beta}});
auto beta_broadcast = p.insert_instruction(
ins, make_op("multibroadcast", {{"output_lens", ins->get_shape().lens()}}), beta);
c_ins = p.insert_instruction(ins, make_op("mul"), c_ins, beta_broadcast);
}
p.replace_instruction(ins, make_op("add"), dot_ins, c_ins);
}
};
struct find_dot_alpha
{
auto matcher() const { return match::name("dot", "quant_dot")(match::nargs(2)); }
void apply(module& p, const match::matcher_result& r) const
{
auto ins = r.result;
auto dot = get_alpha_beta(ins->get_operator());
auto a_ins = ins->inputs()[0];
auto b_ins = ins->inputs()[1];
if(not float_equal(dot.alpha, 1))
{
auto alpha = p.add_literal(literal{shape{a_ins->get_shape().type()}, {dot.alpha}});
auto alpha_broadcast = p.insert_instruction(
ins,
make_op("multibroadcast", {{"output_lens", a_ins->get_shape().lens()}}),
alpha);
a_ins = p.insert_instruction(ins, make_op("mul"), a_ins, alpha_broadcast);
}
p.replace_instruction(ins, make_op(ins->name(), {{"beta", 0}}), a_ins, b_ins);
}
};
} // namespace
void decompose::apply(module& p) const { match::find_matches(p, find_dot_add{}, find_dot_alpha{}); }
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
#include <migraphx/dom_info.hpp>
#include <migraphx/program.hpp>
#include <migraphx/iterator_for.hpp>
#include <migraphx/erase.hpp>
#include <migraphx/ranges.hpp>
#include <unordered_set>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
bool dominator_info::strictly_dominate(instruction_ref ins1, instruction_ref ins2)
{
if(ins1 == ins2)
return false;
auto iter = ins2idom.find(ins2);
while(iter != ins2idom.end())
{
if(ins1 == iter->second)
return true;
assert(iter != ins2idom.find(iter->second));
iter = ins2idom.find(iter->second);
}
return false;
}
struct module_visitor
{
module* mm;
module& get_nodes() const { return *mm; }
const std::vector<instruction_ref>& get_children(instruction_ref ins) { return ins->inputs(); }
};
template <class Visitor>
dominator_info compute_dominator_generic(Visitor v)
{
dominator_info info;
std::unordered_map<instruction_ref, std::unordered_set<instruction_ref>> instr2_doms;
for(instruction_ref ins : iterator_for(v.get_nodes()))
{
const std::vector<instruction_ref>& children = v.get_children(ins);
if(children.size() == 1)
{
info.ins2idom[ins] = children.front();
instr2_doms[ins].insert(children.front());
}
else if(children.size() > 1)
{
auto&& doms = instr2_doms[ins];
doms = instr2_doms[children.front()];
std::for_each(children.begin() + 1, children.end(), [&](instruction_ref child) {
auto&& child_doms = instr2_doms[child];
erase_if(doms, [&](auto x) { return not contains(child_doms, x); });
});
auto iter = std::find_if(doms.begin(), doms.end(), [&](auto dom1) {
return std::none_of(doms.begin(), doms.end(), [&](auto dom2) {
if(dom1 == dom2)
return false;
return info.strictly_dominate(dom1, dom2);
});
});
if(iter != doms.end())
info.ins2idom[ins] = *iter;
}
instr2_doms[ins].insert(ins);
}
return info;
}
dominator_info compute_dominator(module& m)
{
return compute_dominator_generic(module_visitor{&m});
}
} // namespace MIGRAPHX_INLINE_NS
} // namespace migraphx
#include <migraphx/operators.hpp>
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/apply_alpha_beta.hpp>
#include "models.hpp"
namespace migraphx {
......@@ -144,10 +145,10 @@ migraphx::program alexnet(unsigned batch) // NOLINT(readability-function-size)
migraphx::op::multibroadcast multibroadcast42;
multibroadcast42.output_lens = {batch, 4096};
auto mx42 = mm->add_instruction(multibroadcast42, mx4);
migraphx::op::dot dot43;
dot43.alpha = 1;
dot43.beta = 1;
auto mx43 = mm->add_instruction(dot43, mx40, mx41, mx42);
float dot43_alpha = 1;
float dot43_beta = 1;
auto mx43 = migraphx::add_apply_alpha_beta(
*mm, {mx40, mx41, mx42}, migraphx::make_op("dot"), dot43_alpha, dot43_beta);
migraphx::op::relu relu44;
auto mx44 = mm->add_instruction(relu44, mx43);
migraphx::op::identity identity45;
......@@ -158,10 +159,10 @@ migraphx::program alexnet(unsigned batch) // NOLINT(readability-function-size)
migraphx::op::multibroadcast multibroadcast47;
multibroadcast47.output_lens = {batch, 4096};
auto mx47 = mm->add_instruction(multibroadcast47, mx2);
migraphx::op::dot dot48;
dot48.alpha = 1;
dot48.beta = 1;
auto mx48 = mm->add_instruction(dot48, mx45, mx46, mx47);
float dot48_alpha = 1;
float dot48_beta = 1;
auto mx48 = migraphx::add_apply_alpha_beta(
*mm, {mx45, mx46, mx47}, migraphx::make_op("dot"), dot48_alpha, dot48_beta);
migraphx::op::relu relu49;
auto mx49 = mm->add_instruction(relu49, mx48);
migraphx::op::transpose transpose50;
......@@ -170,10 +171,10 @@ migraphx::program alexnet(unsigned batch) // NOLINT(readability-function-size)
migraphx::op::multibroadcast multibroadcast51;
multibroadcast51.output_lens = {batch, 1000};
auto mx51 = mm->add_instruction(multibroadcast51, mx0);
migraphx::op::dot dot52;
dot52.alpha = 1;
dot52.beta = 1;
mm->add_instruction(dot52, mx49, mx50, mx51);
float dot52_alpha = 1;
float dot52_beta = 1;
migraphx::add_apply_alpha_beta(
*mm, {mx49, mx50, mx51}, migraphx::make_op("dot"), dot52_alpha, dot52_beta);
return p;
}
......
#include <migraphx/operators.hpp>
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/apply_alpha_beta.hpp>
#include "models.hpp"
namespace migraphx {
......@@ -2225,10 +2226,10 @@ migraphx::program inceptionv3(unsigned batch) // NOLINT(readability-function-siz
migraphx::op::multibroadcast multibroadcast798;
multibroadcast798.output_lens = {batch, 1000};
auto mx798 = mm->add_instruction(multibroadcast798, mx0);
migraphx::op::dot dot799;
dot799.alpha = 1;
dot799.beta = 1;
mm->add_instruction(dot799, mx796, mx797, mx798);
float dot799_alpha = 1;
float dot799_beta = 1;
migraphx::add_apply_alpha_beta(
*mm, {mx796, mx797, mx798}, migraphx::make_op("dot"), dot799_alpha, dot799_beta);
return p;
}
......
......@@ -503,6 +503,26 @@ struct op : command<op>
}
};
struct onnx : command<onnx>
{
bool show_ops = false;
void parse(argument_parser& ap)
{
ap(show_ops,
{"--list", "-l"},
ap.help("List all onnx operators supported by MIGraphX"),
ap.set_value(true));
}
void run() const
{
if(show_ops)
{
for(const auto& name : get_onnx_operators())
std::cout << name << std::endl;
}
}
};
struct main_command
{
static std::string get_command_help()
......
#include <migraphx/operators.hpp>
#include <migraphx/program.hpp>
#include <migraphx/generate.hpp>
#include <migraphx/apply_alpha_beta.hpp>
#include "models.hpp"
namespace migraphx {
......@@ -1228,10 +1229,10 @@ migraphx::program resnet50(unsigned batch) // NOLINT(readability-function-size)
migraphx::op::multibroadcast multibroadcast442;
multibroadcast442.output_lens = {batch, 1000};
auto mx442 = mm->add_instruction(multibroadcast442, mx0);
migraphx::op::dot dot443;
dot443.alpha = 1;
dot443.beta = 1;
mm->add_instruction(dot443, mx440, mx441, mx442);
float dot443_alpha = 1;
float dot443_beta = 1;
migraphx::add_apply_alpha_beta(
*mm, {mx440, mx441, mx442}, migraphx::make_op("dot"), dot443_alpha, dot443_beta);
return p;
}
......
......@@ -3,17 +3,20 @@
#include <migraphx/iterator_for.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/ranges.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
void eliminate_data_type::apply(module& m) const
{
static const std::vector<std::string> skip_op_names = {
"convert", "get_tuple_elem", "if", "loop"};
for(auto ins : iterator_for(m))
{
if(ins->name()[0] == '@')
continue;
if(ins->name() == "convert")
if(contains(skip_op_names, ins->name()))
continue;
auto inputs = ins->inputs();
std::transform(inputs.begin(), inputs.end(), inputs.begin(), [&](auto i) {
......
......@@ -5,76 +5,86 @@
#include <migraphx/op/im2col.hpp>
#include <migraphx/op/pooling.hpp>
#include <migraphx/op/pad.hpp>
#include <migraphx/make_op.hpp>
#include <migraphx/iterator_for.hpp>
#include <migraphx/stringutils.hpp>
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
void eliminate_pad::apply(module& p) const
{
for(auto ins : iterator_for(p))
{
const std::string& op_name = ins->name();
if(op_name != "convolution" and op_name != "im2col" and op_name != "pooling")
continue;
auto input = ins->inputs().front();
if(input->name() != "pad")
continue;
if(op_name == "convolution" or op_name == "im2col")
update_op(input, ins, p);
else if(op_name == "pooling")
update_pooling(input, ins, p);
}
}
void eliminate_pad::update_op(const instruction_ref& input,
const instruction_ref& ins,
module& p) const
static void update_op(const instruction_ref& input, const instruction_ref& ins, module& m)
{
auto pad_op = any_cast<op::pad>(input->get_operator());
if(!pad_op.symmetric())
return;
auto kdims = input->get_shape().lens().size() - 2;
auto kdims_it = pad_op.pads.begin() + 2;
std::vector<size_t> new_pads(kdims_it, kdims_it + kdims);
std::vector<size_t> pads_l(kdims_it, kdims_it + kdims);
std::vector<size_t> pads_r(kdims_it + kdims + 2, pad_op.pads.end());
auto op = ins->get_operator();
op.from_value({{"padding", new_pads}});
std::vector<size_t> padding(kdims * 2, 0);
std::transform(
pads_l.begin(), pads_l.end(), padding.begin(), padding.begin(), std::plus<size_t>());
std::transform(pads_r.begin(),
pads_r.end(),
padding.begin() + kdims,
padding.begin() + kdims,
std::plus<size_t>());
op.from_value({{"padding", padding}});
std::vector<instruction_ref> new_inputs{ins->inputs()};
new_inputs.front() = input->inputs().front();
p.replace_instruction(ins, op, new_inputs);
m.replace_instruction(ins, op, new_inputs);
}
void eliminate_pad::update_pooling(const instruction_ref& input,
const instruction_ref& ins,
module& p) const
static void update_pooling(const instruction_ref& input, const instruction_ref& ins, module& m)
{
auto pad_op = any_cast<op::pad>(input->get_operator());
if(!pad_op.symmetric())
return;
auto kdims = input->get_shape().lens().size() - 2;
auto kdims_it = pad_op.pads.begin() + 2;
std::vector<size_t> new_pads(kdims_it, kdims_it + kdims);
auto op = any_cast<op::pooling>(ins->get_operator());
if(op.mode == "average")
{
return;
}
auto pad_op = any_cast<op::pad>(input->get_operator());
auto kdims = input->get_shape().lens().size() - 2;
auto kdims_it = pad_op.pads.begin() + 2;
std::vector<size_t> pads_l(kdims_it, kdims_it + kdims);
std::vector<size_t> pads_r(kdims_it + kdims + 2, pad_op.pads.end());
op.padding = new_pads;
std::transform(
pads_l.begin(), pads_l.end(), op.padding.begin(), op.padding.begin(), std::plus<size_t>());
std::transform(pads_r.begin(),
pads_r.end(),
op.padding.begin() + kdims,
op.padding.begin() + kdims,
std::plus<size_t>());
std::vector<instruction_ref> new_inputs{ins->inputs()};
new_inputs.front() = input->inputs().front();
p.replace_instruction(ins, op, new_inputs);
m.replace_instruction(ins, op, new_inputs);
}
void eliminate_pad::apply(module& m) const
{
for(auto ins : iterator_for(m))
{
const std::string& op_name = ins->name();
if(op_name != "convolution" and op_name != "im2col" and op_name != "pooling")
continue;
auto input = ins->inputs().front();
if(input->name() != "pad")
continue;
if(op_name == "convolution" or op_name == "im2col")
update_op(input, ins, m);
else if(op_name == "pooling")
update_pooling(input, ins, m);
}
}
} // namespace MIGRAPHX_INLINE_NS
......
......@@ -6,7 +6,8 @@
namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
std::vector<char> read_buffer(const std::string& filename)
template <class T>
T generic_read_file(const std::string& filename)
{
std::ifstream is(filename, std::ios::binary | std::ios::ate);
std::streamsize size = is.tellg();
......@@ -14,12 +15,22 @@ std::vector<char> read_buffer(const std::string& filename)
MIGRAPHX_THROW("Invalid size for: " + filename);
is.seekg(0, std::ios::beg);
std::vector<char> buffer(size);
if(!is.read(buffer.data(), size))
T buffer(size, 0);
if(!is.read(&buffer[0], size))
MIGRAPHX_THROW("Error reading file: " + filename);
return buffer;
}
std::vector<char> read_buffer(const std::string& filename)
{
return generic_read_file<std::vector<char>>(filename);
}
std::string read_string(const std::string& filename)
{
return generic_read_file<std::string>(filename);
}
void write_buffer(const std::string& filename, const char* buffer, std::size_t size)
{
std::ofstream os(filename);
......
......@@ -6,32 +6,59 @@ inline namespace MIGRAPHX_INLINE_NS {
argument fill_argument(shape s, unsigned long value)
{
argument result;
s.visit_type([&](auto as) {
using type = typename decltype(as)::type;
auto v = fill_tensor_data<type>(s, value);
result = {s, v};
});
if(s.type() == shape::tuple_type)
{
std::vector<argument> sub_args;
const auto& sub_ss = s.sub_shapes();
std::transform(sub_ss.begin(), sub_ss.end(), std::back_inserter(sub_args), [&](auto ss) {
return fill_argument(ss, value);
});
result = argument(sub_args);
}
else
{
s.visit_type([&](auto as) {
using type = typename decltype(as)::type;
auto v = fill_tensor_data<type>(s, value);
result = {s, v};
});
}
return result;
}
argument generate_argument(shape s, unsigned long seed)
{
argument result;
s.visit_type([&](auto as) {
// we use char type to store bool type internally, so bool_type
// needs special processing to generate data
if(s.type() == shape::bool_type)
{
auto v = generate_tensor_data<bool>(s, seed);
result = {s, v};
}
else
{
using type = typename decltype(as)::type;
auto v = generate_tensor_data<type>(s, seed);
result = {s, v};
}
});
if(s.type() == shape::tuple_type)
{
const auto& sub_ss = s.sub_shapes();
std::vector<argument> sub_args;
std::transform(sub_ss.begin(), sub_ss.end(), std::back_inserter(sub_args), [&](auto ss) {
return generate_argument(ss, seed);
});
result = argument(sub_args);
}
else
{
s.visit_type([&](auto as) {
// we use char type to store bool type internally, so bool_type
// needs special processing to generate data
if(s.type() == shape::bool_type)
{
auto v = generate_tensor_data<bool>(s, seed);
result = {s, v};
}
else
{
using type = typename decltype(as)::type;
auto v = generate_tensor_data<type>(s, seed);
result = {s, v};
}
});
}
return result;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment