Commit e571d0a0 authored by Khalique's avatar Khalique
Browse files

remove activation op

parent 9ca0fbf1
......@@ -223,22 +223,6 @@ struct pooling
}
};
struct activation
{
std::string mode;
std::string name() const { return "activation"; }
shape compute_shape(std::vector<shape> inputs) const
{
check_shapes{inputs, *this}.has(1);
return inputs.front();
}
friend std::ostream& operator<<(std::ostream& os, const activation& op)
{
os << op.name() << ":" << op.mode;
return os;
}
};
struct leaky_relu
{
std::string name() const { return "leaky_relu"; }
......@@ -673,6 +657,11 @@ struct neg : unary
std::string name() const { return "neg"; }
};
struct relu : unary
{
std::string name() const { return "relu"; }
};
struct softmax
{
std::string name() const { return "softmax"; }
......
......@@ -52,7 +52,7 @@ struct onnx_parser
add_generic_op("Div", op::div{});
add_generic_op("MatMul", op::dot{});
add_generic_op("Mul", op::mul{});
add_generic_op("Relu", op::activation{"relu"});
add_generic_op("Relu", op::relu{});
add_generic_op("Sub", op::sub{});
add_generic_op("Sum", op::add{});
......
......@@ -606,6 +606,7 @@ struct cpu_apply
apply_map["sin"] = simple_op<cpu_unary<sin_op>>();
apply_map["cos"] = simple_op<cpu_unary<cos_op>>();
apply_map["tan"] = simple_op<cpu_unary<tan_op>>();
apply_map["relu"] = simple_op<cpu_unary<relu_op>>();
apply_map["add"] = simple_op<cpu_binary<add_op>>();
apply_map["sub"] = simple_op<cpu_binary<sub_op>>();
apply_map["mul"] = simple_op<cpu_binary<mul_op>>();
......@@ -619,11 +620,7 @@ struct cpu_apply
init();
for(auto it : iterator_for(*prog))
{
if(it->name() == "activation")
{
apply_activation(it);
}
else if(it->name() == "pooling")
if(it->name() == "pooling")
{
apply_pooling(it);
}
......@@ -647,13 +644,6 @@ struct cpu_apply
prog->replace_instruction(ins, T{op}, ins->inputs());
}
void apply_activation(instruction_ref ins)
{
auto&& op = any_cast<op::activation>(ins->get_operator());
if(op.mode == "relu")
prog->replace_instruction(ins, cpu_unary<relu_op>{}, ins->inputs());
}
void apply_pooling(instruction_ref ins)
{
auto&& op = any_cast<op::pooling>(ins->get_operator());
......
......@@ -50,9 +50,9 @@ struct miopen_apply
{
check_shape(s, apply_convolution(it));
}
else if(it->name() == "activation")
else if(it->name() == "relu")
{
check_shape(s, apply_activation(it));
check_shape(s, apply_relu(it));
}
else if(it->name() == "leaky_relu")
{
......@@ -131,17 +131,14 @@ struct miopen_apply
ins, miopen_pooling{op, std::move(pd)}, ins->inputs().at(0), output);
}
instruction_ref apply_activation(instruction_ref ins)
instruction_ref apply_relu(instruction_ref ins)
{
auto&& op = any_cast<op::activation>(ins->get_operator());
auto&& op = any_cast<op::relu>(ins->get_operator());
auto ad = make_relu();
if(op.mode == "relu")
{
auto output = insert_allocation(ins, ins->get_shape());
return prog->replace_instruction(
ins, miopen_relu{std::move(ad)}, ins->inputs().at(0), output);
}
return ins;
auto output = insert_allocation(ins, ins->get_shape());
return prog->replace_instruction(
ins, miopen_relu{std::move(ad)}, ins->inputs().at(0), output);
}
instruction_ref apply_leaky_relu(instruction_ref ins)
......
......@@ -556,7 +556,7 @@ void relu_test()
migraph::program p;
migraph::shape s{migraph::shape::float_type, {3}};
auto l = p.add_literal(migraph::literal{s, {-1.f, 0.f, 1.f}});
p.add_instruction(migraph::op::activation{"relu"}, l);
p.add_instruction(migraph::op::relu{}, l);
p.compile(migraph::cpu::target{});
auto result = p.eval({});
std::vector<float> results_vector(3);
......
......@@ -158,7 +158,7 @@ struct test_literals
auto weights = p.add_literal(
generate_literal(migraph::shape{migraph::shape::float_type, {4, 3, 3, 3}}));
auto conv = p.add_instruction(migraph::op::convolution{}, input, weights);
p.add_instruction(migraph::op::activation{"relu"}, conv);
p.add_instruction(migraph::op::relu{}, conv);
return p;
}
};
......@@ -392,7 +392,7 @@ struct test_conv_relu
auto weights =
p.add_parameter("w", migraph::shape{migraph::shape::float_type, {4, 3, 3, 3}});
auto conv = p.add_instruction(migraph::op::convolution{}, input, weights);
p.add_instruction(migraph::op::activation{"relu"}, conv);
p.add_instruction(migraph::op::relu{}, conv);
return p;
}
};
......@@ -406,7 +406,7 @@ struct test_conv_relu_half
auto weights =
p.add_parameter("w", migraph::shape{migraph::shape::half_type, {4, 3, 3, 3}});
auto conv = p.add_instruction(migraph::op::convolution{}, input, weights);
p.add_instruction(migraph::op::activation{"relu"}, conv);
p.add_instruction(migraph::op::relu{}, conv);
return p;
}
};
......@@ -419,7 +419,7 @@ struct test_add_relu
auto x = p.add_parameter("x", migraph::shape{migraph::shape::float_type, {4, 3, 3, 3}});
auto y = p.add_parameter("y", migraph::shape{migraph::shape::float_type, {4, 3, 3, 3}});
auto add = p.add_instruction(migraph::op::add{}, x, y);
p.add_instruction(migraph::op::activation{"relu"}, add);
p.add_instruction(migraph::op::relu{}, add);
return p;
}
};
......@@ -446,7 +446,7 @@ struct test_conv_pooling
p.add_parameter("w", migraph::shape{migraph::shape::float_type, {4, 3, 3, 3}});
auto conv = p.add_instruction(migraph::op::convolution{}, input, weights);
auto pooling = p.add_instruction(migraph::op::pooling{"max"}, conv);
p.add_instruction(migraph::op::activation{"relu"}, pooling);
p.add_instruction(migraph::op::relu{}, pooling);
return p;
}
};
......@@ -657,7 +657,7 @@ struct test_conv_bn_relu_pooling
auto variance = p.add_literal(migraph::abs(migraph::generate_literal(vars, 4)));
auto bn = p.add_instruction(
migraph::op::batch_norm_inference{}, conv, scale, bias, mean, variance);
auto relu = p.add_instruction(migraph::op::activation{"relu"}, bn);
auto relu = p.add_instruction(migraph::op::relu{}, bn);
p.add_instruction(migraph::op::pooling{"average", {1, 1}, {2, 2}, {3, 3}}, relu);
return p;
}
......@@ -727,7 +727,7 @@ struct test_conv_bn_relu_pooling2
auto conv2 = p.add_instruction(migraph::op::convolution{{0, 0}, {2, 2}, {1, 1}}, x2, w2);
auto bn2 = add_bn(p, conv2, 2048);
auto add = p.add_instruction(migraph::op::add{}, bn1, bn2);
auto relu = p.add_instruction(migraph::op::activation{"relu"}, add);
auto relu = p.add_instruction(migraph::op::relu{}, add);
p.add_instruction(migraph::op::pooling{"average", {1, 1}, {2, 2}, {3, 3}}, relu);
return p;
}
......
......@@ -32,7 +32,7 @@ void pytorch_conv_relu_maxpool()
auto l3 = p.add_instruction(migraph::op::convolution{}, l0, l1);
auto l4 = p.add_instruction(migraph::op::broadcast{axis, l3->get_shape()}, l2);
auto l5 = p.add_instruction(migraph::op::add{}, l3, l4);
auto l6 = p.add_instruction(migraph::op::activation{"relu"}, l5);
auto l6 = p.add_instruction(migraph::op::relu{}, l5);
p.add_instruction(migraph::op::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l6);
auto prog = migraph::parse_onnx("conv_relu_maxpool.onnx");
......@@ -55,7 +55,7 @@ void pytorch_conv_bn_relu_maxpool()
auto l4 = p.add_instruction(migraph::op::broadcast{axis, l3->get_shape()}, l2);
auto l5 = p.add_instruction(migraph::op::add{}, l3, l4);
auto l6 = p.add_instruction(migraph::op::batch_norm_inference{1.0e-5f}, l5, p3, p4, p5, p6);
auto l7 = p.add_instruction(migraph::op::activation{"relu"}, l6);
auto l7 = p.add_instruction(migraph::op::relu{}, l6);
p.add_instruction(migraph::op::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l7);
auto prog = migraph::parse_onnx("conv_bn_relu_maxpool.onnx");
......@@ -72,7 +72,7 @@ void pytorch_conv_relu_maxpool_x2()
auto l3 = p.add_instruction(migraph::op::convolution{}, l0, l1);
auto l4 = p.add_instruction(migraph::op::broadcast{axis, l3->get_shape()}, l2);
auto l5 = p.add_instruction(migraph::op::add{}, l3, l4);
auto l6 = p.add_instruction(migraph::op::activation{"relu"}, l5);
auto l6 = p.add_instruction(migraph::op::relu{}, l5);
auto l7 = p.add_instruction(migraph::op::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l6);
auto l8 = p.add_parameter("3", {migraph::shape::float_type, {1, 5, 5, 5}});
......@@ -80,7 +80,7 @@ void pytorch_conv_relu_maxpool_x2()
auto l10 = p.add_instruction(migraph::op::convolution{}, l7, l8);
auto l11 = p.add_instruction(migraph::op::broadcast{axis, l10->get_shape()}, l9);
auto l12 = p.add_instruction(migraph::op::add{}, l10, l11);
auto l13 = p.add_instruction(migraph::op::activation{"relu"}, l12);
auto l13 = p.add_instruction(migraph::op::relu{}, l12);
p.add_instruction(migraph::op::pooling{"max", {{0, 0}}, {{2, 2}}, {{2, 2}}}, l13);
auto prog = migraph::parse_onnx("conv_relu_maxpoolX2.onnx");
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment