"src/include/rtg/program.hpp" did not exist on "717744ce91f1153fe3eeb5ea79c4ac3ab407f894"
Commit 35b5b51f authored by Paul's avatar Paul
Browse files

Clean bn

parent 48f35aa0
...@@ -37,9 +37,6 @@ struct miopen_batch_norm_inference ...@@ -37,9 +37,6 @@ struct miopen_batch_norm_inference
float alpha = 1.0, beta = 0.0f; float alpha = 1.0, beta = 0.0f;
// TODO: adityaatluri
// create bn-scale-bias-mean-variance descriptor for
// miopen call
miopenBatchNormalizationForwardInference(ctx.handle.get(), miopenBatchNormalizationForwardInference(ctx.handle.get(),
miopenBatchNormMode_t(op.bn_mode), miopenBatchNormMode_t(op.bn_mode),
&alpha, &alpha,
...@@ -328,8 +325,6 @@ struct miopen_apply ...@@ -328,8 +325,6 @@ struct miopen_apply
{ {
apply_contiguous(it); apply_contiguous(it);
} }
// TODO: adityaatluri
// tagging to easily find where code changed
else if(it->op.name() == "batch_norm_inference") else if(it->op.name() == "batch_norm_inference")
{ {
apply_batch_norm_inference(it); apply_batch_norm_inference(it);
...@@ -409,29 +404,24 @@ struct miopen_apply ...@@ -409,29 +404,24 @@ struct miopen_apply
prog->replace_instruction(ins, miopen_contiguous{op}, ins->arguments.at(0), output); prog->replace_instruction(ins, miopen_contiguous{op}, ins->arguments.at(0), output);
} }
// TODO: adityaatluri
// Not sure how to write this. Review and fix required
void apply_batch_norm_inference(instruction_ref ins) void apply_batch_norm_inference(instruction_ref ins)
{ {
auto&& op = any_cast<batch_norm_inference>(ins->op); auto&& op = any_cast<batch_norm_inference>(ins->op);
auto output = insert_allocation(ins, ins->result); auto output = insert_allocation(ins, ins->result);
shape old_shape = ins->arguments.at(1)->get_shape(); shape old_shape = ins->arguments.at(1)->get_shape();
std::vector<int64_t> new_shape{1, static_cast<int64_t>(old_shape.elements()), 1, 1}; std::vector<int64_t> new_shape{1, static_cast<int64_t>(old_shape.elements()), 1, 1};
auto arg1 = auto reshape_op = reshape{new_shape};
prog->insert_instruction(ins, migraph::reshape{new_shape}, ins->arguments.at(1)); std::vector<instruction_ref> reshapes;
auto arg2 = std::transform(ins->arguments.begin()+1, ins->arguments.end(), std::back_inserter(reshapes), [&](auto i) {
prog->insert_instruction(ins, migraph::reshape{new_shape}, ins->arguments.at(2)); return prog->insert_instruction(ins, reshape_op, i);
auto arg3 = });
prog->insert_instruction(ins, migraph::reshape{new_shape}, ins->arguments.at(3));
auto arg4 =
prog->insert_instruction(ins, migraph::reshape{new_shape}, ins->arguments.at(4));
prog->replace_instruction(ins, prog->replace_instruction(ins,
miopen_batch_norm_inference{op}, miopen_batch_norm_inference{op},
ins->arguments.at(0), ins->arguments.at(0),
arg1, reshapes[0],
arg2, reshapes[1],
arg3, reshapes[2],
arg4, reshapes[3],
output); output);
} }
}; };
......
...@@ -281,49 +281,6 @@ struct test_batchnorm_inference ...@@ -281,49 +281,6 @@ struct test_batchnorm_inference
} }
}; };
void batch_norm_inference_test()
{
migraph::program p;
const size_t width = 2, height = 2, channels = 4, batches = 2;
const float x_val = 8.0f, mean_val = 2.0f, variance_val = 4.0f, scale_val = 2.0f,
bias_val = 1.0f;
const float output_val = scale_val * (x_val - mean_val) / (std::sqrt(variance_val)) + bias_val;
migraph::shape s{migraph::shape::float_type, {batches, channels, height, width}};
migraph::shape vars{migraph::shape::float_type, {channels}};
std::vector<float> x_data(width * height * channels * batches);
std::vector<float> scale_data(channels);
std::vector<float> bias_data(channels);
std::vector<float> mean_data(channels);
std::vector<float> variance_data(channels);
std::fill(x_data.begin(), x_data.end(), x_val);
std::fill(mean_data.begin(), mean_data.end(), mean_val);
std::fill(variance_data.begin(), variance_data.end(), variance_val);
std::fill(scale_data.begin(), scale_data.end(), scale_val);
std::fill(bias_data.begin(), bias_data.end(), bias_val);
auto x = p.add_literal(migraph::literal{s, x_data});
auto scale = p.add_literal(migraph::literal{vars, scale_data});
auto bias = p.add_literal(migraph::literal{vars, bias_data});
auto mean = p.add_literal(migraph::literal{vars, mean_data});
auto variance = p.add_literal(migraph::literal{vars, variance_data});
p.add_instruction(migraph::batch_norm_inference{}, x, mean, variance, scale, bias);
p.compile(migraph::gpu::target{});
migraph::program::parameter_map m;
m["output"] = migraph::gpu::to_gpu(migraph::generate_argument(p.get_parameter_shape("output")));
auto result = migraph::gpu::from_gpu(p.eval(m));
std::vector<float> result_vector(width * height * channels * batches);
std::vector<float> gold(width * height * channels * batches);
std::fill(gold.begin(), gold.end(), output_val);
result.visit([&](auto output) { result_vector.assign(output.begin(), output.end()); });
EXPECT(test::verify_range(result_vector, gold));
}
int main() int main()
{ {
verify_program<test_add>(); verify_program<test_add>();
...@@ -338,5 +295,4 @@ int main() ...@@ -338,5 +295,4 @@ int main()
verify_program<test_contiguous>(); verify_program<test_contiguous>();
verify_program<test_transpose>(); verify_program<test_transpose>();
verify_program<test_batchnorm_inference>(); verify_program<test_batchnorm_inference>();
batch_norm_inference_test();
} }
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment