Commit fb75dfaf authored by Paul's avatar Paul
Browse files

Only use no-cache on jenkins

parents e596eec2 f0604d78
#include <migraph/program.hpp>
#include <migraph/argument.hpp>
#include <migraph/shape.hpp>
struct sum_op
{
std::string name() const { return "sum"; }
migraph::argument
compute(migraph::context&, migraph::shape, std::vector<migraph::argument> args) const
{
migraph::argument result;
if(args.size() != 2)
MIGRAPH_THROW("Wrong args");
if(args[0].get_shape() != args[1].get_shape())
MIGRAPH_THROW("Wrong args");
if(args[0].get_shape().lens().size() != 1)
MIGRAPH_THROW("Wrong args");
if(args[0].get_shape().lens().front() != 1)
MIGRAPH_THROW("Wrong args");
args[0].visit_at([&](auto x) {
args[1].visit_at([&](auto y) { result = migraph::literal{x + y}.get_argument(); });
});
return result;
}
migraph::shape compute_shape(std::vector<migraph::shape> inputs) const
{
if(inputs.size() != 2)
MIGRAPH_THROW("Wrong inputs");
return inputs.front();
}
};
struct minus_op
{
std::string name() const { return "minus"; }
migraph::argument
compute(migraph::context&, migraph::shape, std::vector<migraph::argument> args) const
{
migraph::argument result;
if(args.size() != 2)
MIGRAPH_THROW("Wrong args");
if(args[0].get_shape() != args[1].get_shape())
MIGRAPH_THROW("Wrong args");
if(args[0].get_shape().lens().size() != 1)
MIGRAPH_THROW("Wrong args");
if(args[0].get_shape().lens().front() != 1)
MIGRAPH_THROW("Wrong args");
args[0].visit_at([&](auto x) {
args[1].visit_at([&](auto y) { result = migraph::literal{x - y}.get_argument(); });
});
return result;
}
migraph::shape compute_shape(std::vector<migraph::shape> inputs) const
{
if(inputs.size() != 2)
MIGRAPH_THROW("Wrong inputs");
return inputs.front();
}
};
struct pass_op
{
std::string name() const { return "pass"; }
migraph::argument
compute(migraph::context&, migraph::shape, std::vector<migraph::argument> args) const
{
if(args.empty())
return {};
return args.front();
}
migraph::shape compute_shape(std::vector<migraph::shape> inputs) const
{
if(inputs.empty())
return {};
return inputs.front();
}
};
struct nop
{
std::string name() const { return "nop"; }
migraph::argument
compute(migraph::context&, migraph::shape, std::vector<migraph::argument>) const
{
return {};
}
migraph::shape compute_shape(std::vector<migraph::shape>) const { return {}; }
};
......@@ -4,8 +4,8 @@
#include <cstdlib>
#include <iostream>
#ifndef RTG_GUARD_TEST_TEST_HPP
#define RTG_GUARD_TEST_TEST_HPP
#ifndef MIGRAPH_GUARD_TEST_TEST_HPP
#define MIGRAPH_GUARD_TEST_TEST_HPP
namespace test {
// NOLINTNEXTLINE
......@@ -48,9 +48,9 @@ struct expression
decltype(auto) value() const { return Operator::call(lhs, rhs); };
};
// TODO: Remove rvalue references
template <class T, class U, class Operator>
expression<typename std::decay<T>::type, typename std::decay<U>::type, Operator>
make_expression(T&& rhs, U&& lhs, Operator)
expression<T, U, Operator> make_expression(T&& rhs, U&& lhs, Operator)
{
return {std::forward<T>(rhs), std::forward<U>(lhs)};
}
......@@ -58,10 +58,11 @@ make_expression(T&& rhs, U&& lhs, Operator)
template <class T>
struct lhs_expression;
// TODO: Remove rvalue reference
template <class T>
lhs_expression<typename std::decay<T>::type> make_lhs_expression(T&& lhs)
lhs_expression<T> make_lhs_expression(T&& lhs)
{
return lhs_expression<typename std::decay<T>::type>{std::forward<T>(lhs)};
return lhs_expression<T>{std::forward<T>(lhs)};
}
template <class T>
......@@ -114,10 +115,11 @@ struct capture
};
template <class T, class F>
void failed(T x, const char* msg, const char* file, int line, F f)
void failed(T x, const char* msg, const char* func, const char* file, int line, F f)
{
if(!x.value())
{
std::cout << func << std::endl;
std::cout << file << ":" << line << ":" << std::endl;
std::cout << " FAILED: " << msg << " " << x << std::endl;
f();
......@@ -162,11 +164,18 @@ void run_test()
} // namespace test
// NOLINTNEXTLINE
#define CHECK(...) \
test::failed(test::capture{}->*__VA_ARGS__, #__VA_ARGS__, __FILE__, __LINE__, [] {})
#define CHECK(...) \
test::failed( \
test::capture{}->*__VA_ARGS__, #__VA_ARGS__, __PRETTY_FUNCTION__, __FILE__, __LINE__, [] { \
})
// NOLINTNEXTLINE
#define EXPECT(...) \
test::failed(test::capture{}->*__VA_ARGS__, #__VA_ARGS__, __FILE__, __LINE__, &std::abort)
#define EXPECT(...) \
test::failed(test::capture{}->*__VA_ARGS__, \
#__VA_ARGS__, \
__PRETTY_FUNCTION__, \
__FILE__, \
__LINE__, \
&std::abort)
// NOLINTNEXTLINE
#define STATUS(...) EXPECT((__VA_ARGS__) == 0)
......
#ifndef RTG_GUARD_VERIFY_HPP
#define RTG_GUARD_VERIFY_HPP
#ifndef MIGRAPH_GUARD_VERIFY_HPP
#define MIGRAPH_GUARD_VERIFY_HPP
#include <algorithm>
#include <cmath>
......
#include <rtg/literal.hpp>
#include <migraph/literal.hpp>
#include <sstream>
#include <string>
#include "test.hpp"
void literal_test()
{
EXPECT(rtg::literal{1} == rtg::literal{1});
EXPECT(rtg::literal{1} != rtg::literal{2});
EXPECT(rtg::literal{} == rtg::literal{});
EXPECT(rtg::literal{} != rtg::literal{2});
EXPECT(migraph::literal{1} == migraph::literal{1});
EXPECT(migraph::literal{1} != migraph::literal{2});
EXPECT(migraph::literal{} == migraph::literal{});
EXPECT(migraph::literal{} != migraph::literal{2});
rtg::literal l1{1};
rtg::literal l2 = l1; // NOLINT
migraph::literal l1{1};
migraph::literal l2 = l1; // NOLINT
EXPECT(l1 == l2);
EXPECT(l1.at<int>(0) == 1);
EXPECT(!l1.empty());
EXPECT(!l2.empty());
rtg::literal l3{};
rtg::literal l4{};
migraph::literal l3{};
migraph::literal l4{};
EXPECT(l3 == l4);
EXPECT(l3.empty());
EXPECT(l4.empty());
......@@ -27,7 +27,7 @@ void literal_test()
void literal_os1()
{
rtg::literal l{1};
migraph::literal l{1};
std::stringstream ss;
ss << l;
EXPECT(ss.str() == "1");
......@@ -35,7 +35,7 @@ void literal_os1()
void literal_os2()
{
rtg::literal l{};
migraph::literal l{};
std::stringstream ss;
ss << l;
EXPECT(ss.str().empty());
......@@ -43,8 +43,8 @@ void literal_os2()
void literal_os3()
{
rtg::shape s{rtg::shape::int64_type, {3}};
rtg::literal l{s, {1, 2, 3}};
migraph::shape s{migraph::shape::int64_type, {3}};
migraph::literal l{s, {1, 2, 3}};
std::stringstream ss;
ss << l;
EXPECT(ss.str() == "1, 2, 3");
......
#include <rtg/program.hpp>
#include <rtg/operators.hpp>
#include <rtg/cpu/cpu_target.hpp>
#include <rtg/miopen/miopen_target.hpp>
#include <rtg/manage_ptr.hpp>
#include <miopen/miopen.h>
#include <random>
#include "test.hpp"
#include "verify.hpp"
using hip_ptr = RTG_MANAGE_PTR(void, hipFree);
using miopen_handle = RTG_MANAGE_PTR(miopenHandle_t, miopenDestroy);
template <class Result, class F, class... Ts>
Result make_obj(F f, Ts... xs)
{
typename Result::pointer x = nullptr;
auto status = f(&x, xs...);
Result r{x};
if(status != miopenStatusSuccess)
RTG_THROW("MIOpen call failed");
return r;
}
hip_ptr hip_allocate(std::size_t sz)
{
void* result;
// TODO: Check status
hipMalloc(&result, sz);
return hip_ptr{result};
}
template <class T>
hip_ptr write(const T& x)
{
using type = typename T::value_type;
auto size = x.size() * sizeof(type);
auto result = hip_allocate(size);
// TODO: Check status
hipMemcpy(result.get(), x.data(), size, hipMemcpyHostToDevice);
return result;
}
template <class T>
std::vector<T> read(const void* x, std::size_t sz)
{
std::vector<T> result(sz);
// TODO: Check status
hipMemcpy(result.data(), x, sz * sizeof(T), hipMemcpyDeviceToHost);
return result;
}
rtg::program create_program()
{
rtg::program p;
auto input = p.add_parameter("x", rtg::shape{rtg::shape::float_type, {4, 3, 3, 3}});
auto weights = p.add_parameter("w", rtg::shape{rtg::shape::float_type, {4, 3, 3, 3}});
auto conv = p.add_instruction(rtg::convolution{}, input, weights);
p.add_instruction(rtg::activation{"relu"}, conv);
return p;
}
std::vector<float> get_tensor_data(rtg::shape s)
{
std::vector<float> result(s.elements());
std::mt19937 engine{0};
std::uniform_real_distribution<> dist;
std::generate(result.begin(), result.end(), [&] { return dist(engine); });
return result;
}
rtg::argument get_tensor_argument_cpu(rtg::shape s)
{
auto v = get_tensor_data(s);
return {s, [v]() mutable { return reinterpret_cast<char*>(v.data()); }};
}
rtg::argument get_tensor_argument_gpu(rtg::shape s)
{
auto v = get_tensor_data(s);
auto p = rtg::share(write(v));
return {s, [p]() mutable { return reinterpret_cast<char*>(p.get()); }};
}
std::vector<float> cpu()
{
std::vector<float> result;
auto p = create_program();
auto x = get_tensor_argument_cpu({rtg::shape::float_type, {4, 3, 3, 3}});
auto w = get_tensor_argument_cpu({rtg::shape::float_type, {4, 3, 3, 3}});
p.compile(rtg::cpu::cpu_target{});
auto r = p.eval({{"x", x}, {"w", w}});
auto output = r.get<float>();
result.assign(output.begin(), output.end());
return result;
}
std::vector<float> gpu()
{
std::vector<float> result;
auto p = create_program();
auto x = get_tensor_argument_gpu({rtg::shape::float_type, {4, 3, 3, 3}});
auto w = get_tensor_argument_gpu({rtg::shape::float_type, {4, 3, 3, 3}});
p.compile(rtg::miopen::miopen_target{});
auto y = get_tensor_argument_gpu(p.get_parameter_shape("output"));
auto handle = make_obj<miopen_handle>(&miopenCreate);
auto r = p.eval(
{{"x", x}, {"w", w}, {"output", y}, {"handle", {rtg::shape::any_type, handle.get()}}});
result = read<float>(r.data(), r.get_shape().elements());
return result;
}
void test1()
{
auto x = cpu();
auto y = gpu();
EXPECT(test::verify_range(x, y));
}
int main() { test1(); }
#include <iostream>
#include <vector>
#include <migraph/literal.hpp>
#include <migraph/operators.hpp>
#include <migraph/program.hpp>
#include <migraph/onnx.hpp>
#include "test.hpp"
#include "verify.hpp"
void pytorch_conv_bias_test()
{
migraph::program p;
auto l0 = p.add_parameter("0", {migraph::shape::float_type, {1, 3, 32, 32}});
auto l1 = p.add_parameter("1", {migraph::shape::float_type, {1, 3, 5, 5}});
auto l2 = p.add_parameter("2", {migraph::shape::float_type, {1}});
uint64_t axis = 1;
auto l3 = p.add_instruction(migraph::convolution{}, l0, l1);
auto l4 = p.add_instruction(migraph::broadcast{axis}, l3, l2);
p.add_instruction(migraph::add{}, l3, l4);
auto prog = migraph::parse_onnx("conv.onnx");
EXPECT(p == prog);
}
void pytorch_conv_relu_maxpool()
{
migraph::program p;
auto l0 = p.add_parameter("0", {migraph::shape::float_type, {1, 3, 32, 32}});
auto l1 = p.add_parameter("1", {migraph::shape::float_type, {1, 3, 5, 5}});
auto l2 = p.add_parameter("2", {migraph::shape::float_type, {1}});
uint64_t axis = 1;
auto l3 = p.add_instruction(migraph::convolution{}, l0, l1);
auto l4 = p.add_instruction(migraph::broadcast{axis}, l3, l2);
auto l5 = p.add_instruction(migraph::add{}, l3, l4);
auto l6 = p.add_instruction(migraph::activation{"relu"}, l5);
p.add_instruction(migraph::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l6);
auto prog = migraph::parse_onnx("conv_relu_maxpool.onnx");
EXPECT(p == prog);
}
void pytorch_conv_bn_relu_maxpool()
{
migraph::program p;
auto l0 = p.add_parameter("0", {migraph::shape::float_type, {1, 3, 32, 32}});
auto l1 = p.add_parameter("1", {migraph::shape::float_type, {1, 3, 5, 5}});
auto l2 = p.add_parameter("2", {migraph::shape::float_type, {1}});
auto p3 = p.add_parameter("3", {migraph::shape::float_type, {1}});
auto p4 = p.add_parameter("4", {migraph::shape::float_type, {1}});
auto p5 = p.add_parameter("5", {migraph::shape::float_type, {1}});
auto p6 = p.add_parameter("6", {migraph::shape::float_type, {1}});
uint64_t axis = 1;
auto l3 = p.add_instruction(migraph::convolution{}, l0, l1);
auto l4 = p.add_instruction(migraph::broadcast{axis}, l3, l2);
auto l5 = p.add_instruction(migraph::add{}, l3, l4);
auto l6 = p.add_instruction(migraph::batch_norm_inference{}, l5, p3, p4, p5, p6);
auto l7 = p.add_instruction(migraph::activation{"relu"}, l6);
p.add_instruction(migraph::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l7);
auto prog = migraph::parse_onnx("conv_bn_relu_maxpool.onnx");
EXPECT(p == prog);
}
void pytorch_conv_relu_maxpoolX2()
{
migraph::program p;
auto l0 = p.add_parameter("0", {migraph::shape::float_type, {1, 3, 32, 32}});
auto l1 = p.add_parameter("1", {migraph::shape::float_type, {5, 3, 5, 5}});
auto l2 = p.add_parameter("2", {migraph::shape::float_type, {5}});
uint64_t axis = 1;
auto l3 = p.add_instruction(migraph::convolution{}, l0, l1);
auto l4 = p.add_instruction(migraph::broadcast{axis}, l3, l2);
auto l5 = p.add_instruction(migraph::add{}, l3, l4);
auto l6 = p.add_instruction(migraph::activation{"relu"}, l5);
auto l7 = p.add_instruction(migraph::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l6);
auto l8 = p.add_parameter("3", {migraph::shape::float_type, {1, 5, 5, 5}});
auto l9 = p.add_parameter("4", {migraph::shape::float_type, {1}});
auto l10 = p.add_instruction(migraph::convolution{}, l7, l8);
auto l11 = p.add_instruction(migraph::broadcast{axis}, l10, l9);
auto l12 = p.add_instruction(migraph::add{}, l10, l11);
auto l13 = p.add_instruction(migraph::activation{"relu"}, l12);
p.add_instruction(migraph::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l13);
auto prog = migraph::parse_onnx("conv_relu_maxpoolX2.onnx");
EXPECT(p == prog);
}
int main()
{
pytorch_conv_bias_test();
pytorch_conv_relu_maxpool();
pytorch_conv_bn_relu_maxpool();
pytorch_conv_relu_maxpoolX2();
}
#include <rtg/operation.hpp>
#include <migraph/operation.hpp>
#include <sstream>
#include <string>
#include "test.hpp"
......@@ -8,10 +8,14 @@ struct simple_operation
{
int data = 1;
std::string name() const { return "simple"; }
rtg::shape compute_shape(std::vector<rtg::shape>) const { RTG_THROW("not computable"); }
rtg::argument compute(rtg::shape, std::vector<rtg::argument>) const
migraph::shape compute_shape(std::vector<migraph::shape>) const
{
RTG_THROW("not computable");
MIGRAPH_THROW("not computable");
}
migraph::argument
compute(migraph::context&, migraph::shape, std::vector<migraph::argument>) const
{
MIGRAPH_THROW("not computable");
}
friend std::ostream& operator<<(std::ostream& os, const simple_operation& op)
{
......@@ -23,18 +27,22 @@ struct simple_operation
struct simple_operation_no_print
{
std::string name() const { return "simple"; }
rtg::shape compute_shape(std::vector<rtg::shape>) const { RTG_THROW("not computable"); }
rtg::argument compute(rtg::shape, std::vector<rtg::argument>) const
migraph::shape compute_shape(std::vector<migraph::shape>) const
{
MIGRAPH_THROW("not computable");
}
migraph::argument
compute(migraph::context&, migraph::shape, std::vector<migraph::argument>) const
{
RTG_THROW("not computable");
MIGRAPH_THROW("not computable");
}
};
void operation_copy_test()
{
simple_operation s{};
rtg::operation op1 = s; // NOLINT
rtg::operation op2 = op1; // NOLINT
migraph::operation op1 = s; // NOLINT
migraph::operation op2 = op1; // NOLINT
EXPECT(s.name() == op1.name());
EXPECT(op2.name() == op1.name());
}
......@@ -45,18 +53,18 @@ struct not_operation
void operation_any_cast()
{
rtg::operation op1 = simple_operation{};
EXPECT(rtg::any_cast<simple_operation>(op1).data == 1);
EXPECT(rtg::any_cast<not_operation*>(&op1) == nullptr);
EXPECT(test::throws([&] { rtg::any_cast<not_operation&>(op1); }));
rtg::operation op2 = simple_operation{2};
EXPECT(rtg::any_cast<simple_operation>(op2).data == 2);
EXPECT(rtg::any_cast<not_operation*>(&op2) == nullptr);
migraph::operation op1 = simple_operation{};
EXPECT(migraph::any_cast<simple_operation>(op1).data == 1);
EXPECT(migraph::any_cast<not_operation*>(&op1) == nullptr);
EXPECT(test::throws([&] { migraph::any_cast<not_operation&>(op1); }));
migraph::operation op2 = simple_operation{2};
EXPECT(migraph::any_cast<simple_operation>(op2).data == 2);
EXPECT(migraph::any_cast<not_operation*>(&op2) == nullptr);
}
void operation_print()
{
rtg::operation op = simple_operation{};
migraph::operation op = simple_operation{};
std::stringstream ss;
ss << op;
std::string s = ss.str();
......@@ -65,7 +73,7 @@ void operation_print()
void operation_default_print()
{
rtg::operation op = simple_operation_no_print{};
migraph::operation op = simple_operation_no_print{};
std::stringstream ss;
ss << op;
std::string s = ss.str();
......
#include <migraph/program.hpp>
#include <migraph/iterator_for.hpp>
#include <migraph/instruction.hpp>
#include <sstream>
#include "test.hpp"
#include <basic_ops.hpp>
migraph::program create_program()
{
migraph::program p;
auto x = p.add_parameter("x", {migraph::shape::int64_type});
auto y = p.add_parameter("y", {migraph::shape::int64_type});
auto sum = p.add_instruction(sum_op{}, x, y);
auto one = p.add_literal(1);
p.add_instruction(sum_op{}, sum, one);
return p;
}
void program_equality()
{
migraph::program x = create_program();
migraph::program y = create_program();
EXPECT(x == y);
}
int main() { program_equality(); }
#include <rtg/shape.hpp>
#include <migraph/shape.hpp>
#include <array>
#include <algorithm>
#include <numeric>
#include "test.hpp"
void test_shape_default()
{
migraph::shape s{};
EXPECT(s.elements() == 0);
EXPECT(s.bytes() == 0);
}
void test_shape_assign()
{
rtg::shape s1{rtg::shape::float_type, {100, 32, 8, 8}};
rtg::shape s2 = s1; // NOLINT
migraph::shape s1{migraph::shape::float_type, {100, 32, 8, 8}};
migraph::shape s2 = s1; // NOLINT
EXPECT(s1 == s2);
EXPECT(!(s1 != s2));
}
void test_shape_default()
void test_shape_packed_default()
{
migraph::shape s{migraph::shape::float_type, {2, 2}};
EXPECT(s.standard());
EXPECT(s.packed());
EXPECT(not s.transposed());
EXPECT(not s.broadcasted());
}
void test_shape_packed()
{
migraph::shape s{migraph::shape::float_type, {2, 2}, {2, 1}};
EXPECT(s.standard());
EXPECT(s.packed());
EXPECT(not s.transposed());
EXPECT(not s.broadcasted());
}
void test_shape_transposed()
{
migraph::shape s{migraph::shape::float_type, {2, 2}, {1, 2}};
EXPECT(not s.standard());
EXPECT(s.packed());
EXPECT(s.transposed());
EXPECT(not s.broadcasted());
}
void test_shape_broadcasted()
{
rtg::shape s1{};
rtg::shape s2{};
migraph::shape s{migraph::shape::float_type, {2, 2}, {1, 0}};
EXPECT(not s.standard());
EXPECT(not s.packed());
EXPECT(not s.transposed());
EXPECT(s.broadcasted());
}
void test_shape_default_copy()
{
migraph::shape s1{};
migraph::shape s2{};
EXPECT(s1 == s2);
EXPECT(!(s1 != s2));
}
void test_shape4()
{
rtg::shape s{rtg::shape::float_type, {100, 32, 8, 8}};
migraph::shape s{migraph::shape::float_type, {100, 32, 8, 8}};
EXPECT(s.standard());
EXPECT(s.packed());
EXPECT(s.type() == rtg::shape::float_type);
EXPECT(not s.transposed());
EXPECT(not s.broadcasted());
EXPECT(s.type() == migraph::shape::float_type);
EXPECT(s.lens()[0] == 100);
EXPECT(s.lens()[1] == 32);
EXPECT(s.lens()[2] == 8);
......@@ -67,9 +113,12 @@ void test_shape4_nonpacked()
strides.rbegin() + 1,
std::multiplies<std::size_t>());
rtg::shape s{rtg::shape::float_type, lens, strides};
EXPECT(!s.packed());
EXPECT(s.type() == rtg::shape::float_type);
migraph::shape s{migraph::shape::float_type, lens, strides};
EXPECT(not s.standard());
EXPECT(not s.packed());
EXPECT(not s.transposed());
EXPECT(not s.broadcasted());
EXPECT(s.type() == migraph::shape::float_type);
EXPECT(s.lens()[0] == 100);
EXPECT(s.lens()[1] == 32);
EXPECT(s.lens()[2] == 8);
......@@ -94,8 +143,13 @@ void test_shape4_nonpacked()
int main()
{
test_shape_assign();
test_shape_default();
test_shape_assign();
test_shape_packed_default();
test_shape_packed();
test_shape_transposed();
test_shape_broadcasted();
test_shape_default_copy();
test_shape4();
test_shape4_nonpacked();
}
#include <migraph/simplify_reshapes.hpp>
#include <migraph/dead_code_elimination.hpp>
#include <migraph/operators.hpp>
#include <basic_ops.hpp>
#include <test.hpp>
struct simplify_reshapes_target
{
std::string name() const { return "simplify_reshapes"; }
std::vector<migraph::pass> get_passes(migraph::context&) const
{
return {migraph::simplify_reshapes{}, migraph::dead_code_elimination{}};
}
migraph::context get_context() const { return {}; }
};
migraph::literal get_2x2()
{
return migraph::literal{{migraph::shape::float_type, {2, 2}}, {1, 2, 3, 4}};
}
migraph::literal get_2x2_transposed()
{
return migraph::literal{{migraph::shape::float_type, {2, 2}, {1, 2}}, {1, 2, 3, 4}};
}
migraph::literal get_2() { return migraph::literal{{migraph::shape::float_type, {2}}, {1, 2}}; }
migraph::literal get_2_broadcasted()
{
return migraph::literal{{migraph::shape::float_type, {2, 1}, {1, 0}}, {1, 2}};
}
void double_contig()
{
migraph::program p;
auto l = p.add_literal(get_2x2());
auto t1 = p.add_instruction(migraph::transpose{{1, 0}}, l);
auto c1 = p.add_instruction(migraph::contiguous{}, t1);
auto c2 = p.add_instruction(migraph::contiguous{}, c1);
p.add_instruction(pass_op{}, c2);
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
p.compile(simplify_reshapes_target{});
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
EXPECT(std::distance(p.begin(), p.end()) == 2);
auto result = p.eval({});
EXPECT(result == get_2x2());
}
void double_transpose()
{
migraph::program p;
auto l = p.add_literal(get_2x2());
auto t1 = p.add_instruction(migraph::transpose{{1, 0}}, l);
auto t2 = p.add_instruction(migraph::transpose{{1, 0}}, t1);
p.add_instruction(pass_op{}, t2);
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
p.compile(simplify_reshapes_target{});
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
EXPECT(std::distance(p.begin(), p.end()) == 2);
auto result = p.eval({});
EXPECT(result == get_2x2());
}
void double_transpose_contig()
{
migraph::program p;
auto l = p.add_literal(get_2x2());
auto t1 = p.add_instruction(migraph::transpose{{1, 0}}, l);
auto c1 = p.add_instruction(migraph::contiguous{}, t1);
auto t2 = p.add_instruction(migraph::transpose{{1, 0}}, c1);
auto c2 = p.add_instruction(migraph::contiguous{}, t2);
p.add_instruction(pass_op{}, c2);
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
p.compile(simplify_reshapes_target{});
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
EXPECT(std::distance(p.begin(), p.end()) == 2);
auto result = p.eval({});
EXPECT(result == get_2x2());
}
void single_transpose()
{
migraph::program p;
auto l = p.add_literal(get_2x2());
auto t1 = p.add_instruction(migraph::transpose{{1, 0}}, l);
p.add_instruction(pass_op{}, t1);
EXPECT(not p.get_shape().standard());
EXPECT(p.get_shape().transposed());
p.compile(simplify_reshapes_target{});
EXPECT(not p.get_shape().standard());
EXPECT(p.get_shape().transposed());
EXPECT(std::distance(p.begin(), p.end()) == 3);
auto result = p.eval({});
EXPECT(result != get_2x2());
}
void double_transpose_sin_pass()
{
migraph::program p;
auto l = p.add_literal(get_2x2());
auto t1 = p.add_instruction(migraph::transpose{{1, 0}}, l);
p.add_instruction(migraph::transpose{{1, 0}}, t1);
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
p.compile(simplify_reshapes_target{});
EXPECT(p.get_shape().standard());
EXPECT(not p.get_shape().transposed());
// std::cout << p << std::endl;
// TODO: Fix this
// EXPECT(std::distance(p.begin(), p.end()) == 1);
auto result = p.eval({});
EXPECT(result == get_2x2());
}
void single_transpose_sin_pass()
{
migraph::program p;
auto l = p.add_literal(get_2x2());
p.add_instruction(migraph::transpose{{1, 0}}, l);
EXPECT(not p.get_shape().standard());
EXPECT(p.get_shape().transposed());
p.compile(simplify_reshapes_target{});
EXPECT(not p.get_shape().standard());
EXPECT(p.get_shape().transposed());
EXPECT(std::distance(p.begin(), p.end()) == 2);
auto result = p.eval({});
EXPECT(result != get_2x2());
}
int main()
{
double_contig();
double_transpose();
double_transpose_contig();
single_transpose();
double_transpose_sin_pass();
single_transpose_sin_pass();
}
#include <migraph/type_name.hpp>
#include "test.hpp"
struct global_class
{
struct inner_class
{
};
};
namespace foo {
struct ns_class
{
struct inner_class
{
};
};
} // namespace foo
int main()
{
EXPECT(migraph::get_type_name<global_class>() == "global_class");
EXPECT(migraph::get_type_name<global_class::inner_class>() == "global_class::inner_class");
EXPECT(migraph::get_type_name<foo::ns_class>() == "foo::ns_class");
EXPECT(migraph::get_type_name<foo::ns_class::inner_class>() == "foo::ns_class::inner_class");
}
#include <migraph/program.hpp>
#include <migraph/instruction.hpp>
#include <basic_ops.hpp>
#include <test.hpp>
void simple_test()
{
migraph::program p;
auto one = p.add_literal(1);
auto two = p.add_literal(2);
p.add_instruction(sum_op{}, one, two);
EXPECT(bool{p.validate() == p.end()});
auto result = p.eval({});
EXPECT(result == migraph::literal{3});
EXPECT(result != migraph::literal{4});
}
void out_of_order()
{
migraph::program p;
auto one = p.add_literal(1);
auto two = p.add_literal(2);
auto ins = p.add_instruction(sum_op{}, one, two);
p.move_instruction(two, p.end());
EXPECT(bool{p.validate() == ins});
}
void incomplete_args()
{
migraph::program p;
auto one = p.add_literal(1);
auto two = p.add_literal(2);
auto ins = p.add_instruction(sum_op{}, one, two);
ins->clear_arguments();
EXPECT(bool{p.validate() == ins});
}
void invalid_args()
{
migraph::program p;
auto one = p.add_literal(1);
auto two = p.add_literal(2);
auto ins = p.add_instruction(sum_op{}, one, two);
ins->arguments.clear();
EXPECT(bool{p.validate() == p.begin()});
}
int main()
{
simple_test();
out_of_order();
incomplete_args();
invalid_args();
}
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
ls -1 $DIR/include/ | xargs -n 1 -P $(nproc) -I{} -t bash -c "python $DIR/te.py $DIR/include/{} | clang-format-5.0 -style=file > $DIR/../src/include/rtg/{}"
ls -1 $DIR/include/ | xargs -n 1 -P $(nproc) -I{} -t bash -c "python3.6 $DIR/te.py $DIR/include/{} | clang-format-5.0 -style=file > $DIR/../src/include/migraph/{}"
#ifndef MIGRAPH_GUARD_CONTEXT_HPP
#define MIGRAPH_GUARD_CONTEXT_HPP
#include <cassert>
#include <string>
#include <functional>
#include <memory>
#include <type_traits>
#include <utility>
namespace migraph {
#ifdef DOXYGEN
/// A context is used to store internal data for a `target`. A context is
/// constructed by a target during compilation and passed to the operations
/// during `eval`.
struct context
{
};
#else
<%
interface('context')
%>
#endif
} // namespace migraph
#endif
#ifndef RTG_GUARD_RTGLIB_OPERAND_HPP
#define RTG_GUARD_RTGLIB_OPERAND_HPP
#ifndef MIGRAPH_GUARD_MIGRAPHLIB_OPERAND_HPP
#define MIGRAPH_GUARD_MIGRAPHLIB_OPERAND_HPP
#include <cassert>
#include <string>
#include <functional>
#include <memory>
#include <type_traits>
#include <utility>
#include <rtg/shape.hpp>
#include <rtg/argument.hpp>
#include <migraph/shape.hpp>
#include <migraph/argument.hpp>
#include <migraph/context.hpp>
#include <migraph/auto_any_cast.hpp>
namespace rtg {
namespace migraph {
#ifdef DOXYGEN
/// The operation interface represents an action an instruction will perform. All
/// operation classes must be CopyConstructible.
struct operation
{
/// A unique name identifying the operation
std::string name() const;
/// This is used to compute the resulting shape from an operation. If an
/// operation cannot be run with input shapes, then it should throw an
/// exception.
shape compute_shape(std::vector<shape> input) const;
/**
* @brief This performs the operation's computation
*
* @param ctx This is the context created by the `target` during compilation. Implementations
* can use the target's `context` class rather than the `context` interface class.
* @param output This is the output shape. It is equivalent to running `compute_shape` with each
* `shape` of the `argument`.
* @param input This is the `argument` result from the previous instuction's computation.
* @return Return an `argument` of the result computation. The `shape` of `argument` should be
* the same the `output` shape.
*/
argument compute(context& ctx, shape output, std::vector<argument> input) const;
/// An optional stream operator to print the operation. When this is not
/// implemented, it will just print the operation's name.
friend std::ostream& operator<<(std::ostream& os, const operation& op);
};
#else
namespace operation_stream {
......@@ -21,15 +55,23 @@ auto operator<<(std::ostream& os, const T& x) -> decltype(os << x.name())
} // namespace operation_stream
template <class T>
argument compute_op(const T& x, context& ctx, shape output_shape, std::vector<argument> input)
{
return x.compute(auto_any_cast(ctx), output_shape, input);
}
<%
interface('operation',
virtual('name', returns='std::string', const=True),
virtual('compute_shape', returns='shape', input='std::vector<shape>', const=True),
virtual('compute', returns='argument', output='shape', input='std::vector<argument>', const=True),
friend('operator<<', returns='std::ostream &', os='std::ostream &', op='const operation &', using='rtg::operation_stream::operator<<')
virtual('compute', returns='argument', ctx='context&', output='shape', input='std::vector<argument>', const=True, default='compute_op'),
friend('operator<<', returns='std::ostream &', os='std::ostream &', op='const operation &', using='migraph::operation_stream::operator<<')
)
%>
} // namespace rtg
#endif
} // namespace migraph
#endif
#ifndef MIGRAPH_GUARD_PASS_HPP
#define MIGRAPH_GUARD_PASS_HPP
#include <cassert>
#include <string>
#include <functional>
#include <memory>
#include <type_traits>
#include <utility>
namespace migraph {
struct program;
#ifdef DOXYGEN
/// An interface for applying a transformation to the instructions in a
/// `program`
struct pass
{
/// A unique name used to identify the pass
std::string name() const;
/// Run the pass on the program
void apply(program& p) const;
};
#else
<%
interface('pass',
virtual('name', returns='std::string', const=True),
virtual('apply', returns='void', p='program &', const=True)
)
%>
#endif
} // namespace migraph
#endif
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment