Unverified Commit 19f65e7e authored by kahmed10's avatar kahmed10 Committed by GitHub
Browse files

Update driver's perf report to account for batch size (#1000)

Currently we have the option of passing in --batch to the driver to change the batch size when the model has a dynamic dim value. We can use this flag to adjust the perf report's rate.
parent 157935ff
...@@ -480,7 +480,7 @@ struct perf : command<perf> ...@@ -480,7 +480,7 @@ struct perf : command<perf>
std::cout << "Allocating params ... " << std::endl; std::cout << "Allocating params ... " << std::endl;
auto m = c.params(p); auto m = c.params(p);
std::cout << "Running performance report ... " << std::endl; std::cout << "Running performance report ... " << std::endl;
p.perf_report(std::cout, n, m); p.perf_report(std::cout, n, m, c.l.batch);
} }
}; };
......
...@@ -67,7 +67,8 @@ struct program ...@@ -67,7 +67,8 @@ struct program
void finalize(); void finalize();
void perf_report(std::ostream& os, std::size_t n, parameter_map params) const; void
perf_report(std::ostream& os, std::size_t n, parameter_map params, std::size_t batch = 1) const;
void mark(const parameter_map& params, marker&& m); void mark(const parameter_map& params, marker&& m);
......
...@@ -526,7 +526,10 @@ void program::mark(const parameter_map& params, marker&& m) ...@@ -526,7 +526,10 @@ void program::mark(const parameter_map& params, marker&& m)
m.mark_stop(*this); m.mark_stop(*this);
} }
void program::perf_report(std::ostream& os, std::size_t n, parameter_map params) const void program::perf_report(std::ostream& os,
std::size_t n,
parameter_map params,
std::size_t batch) const
{ {
auto& ctx = this->impl->ctx; auto& ctx = this->impl->ctx;
// Run once by itself // Run once by itself
...@@ -619,7 +622,8 @@ void program::perf_report(std::ostream& os, std::size_t n, parameter_map params) ...@@ -619,7 +622,8 @@ void program::perf_report(std::ostream& os, std::size_t n, parameter_map params)
os << std::endl; os << std::endl;
os << "Rate: " << rate << "/sec" << std::endl; os << "Batch size: " << batch << std::endl;
os << "Rate: " << rate * batch << "/sec" << std::endl;
os << "Total time: " << total_time << "ms" << std::endl; os << "Total time: " << total_time << "ms" << std::endl;
os << "Total instructions time: " << total_instruction_time << "ms" << std::endl; os << "Total instructions time: " << total_instruction_time << "ms" << std::endl;
os << "Overhead time: " << overhead_time << "ms" os << "Overhead time: " << overhead_time << "ms"
......
...@@ -19,6 +19,7 @@ TEST_CASE(perf_report) ...@@ -19,6 +19,7 @@ TEST_CASE(perf_report)
std::string output = ss.str(); std::string output = ss.str();
EXPECT(migraphx::contains(output, "Summary:")); EXPECT(migraphx::contains(output, "Summary:"));
EXPECT(migraphx::contains(output, "Batch size:"));
EXPECT(migraphx::contains(output, "Rate:")); EXPECT(migraphx::contains(output, "Rate:"));
EXPECT(migraphx::contains(output, "Total time:")); EXPECT(migraphx::contains(output, "Total time:"));
EXPECT(migraphx::contains(output, "Total instructions time:")); EXPECT(migraphx::contains(output, "Total instructions time:"));
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment