Commit 56762529 authored by lijian6's avatar lijian6
Browse files

Initial commit


Signed-off-by: lijian6's avatarlijian <lijian6@sugon.com>
parents
#include <Sample.h>
#include <SimpleLog.h>
#include <Filesystem.h>
#include <DetectorYOLOV5.h>
#include <sys/time.h>
#include <Decoder.h>
#include <Queuethread.h>
using namespace cv;
using namespace std;
using namespace cv::dnn;
using namespace migraphx;
using namespace migraphxSamples;
static void DecoderThreadFunc(Queue* queue)
{
int ret, end = 0;
int frame_cnt = 0;
Queue* que = queue;
Decoder decoder(que->device);
InitializationParameterOfDecoder initParamOfDecoderYOLOV5;
#ifndef DMA
initParamOfDecoderYOLOV5.src_filename = "../Resource/Images/Mean.mp4";
if( que->device == _HW) {
initParamOfDecoderYOLOV5.str_devid[4] = {0};
initParamOfDecoderYOLOV5.xcoder_params = "out=hw";
initParamOfDecoderYOLOV5.dec_name = "h264_ni_quadra_dec";
initParamOfDecoderYOLOV5.filters_descr = "ni_quadra_scale=608:608:format=bgrp,hwdownload,format=bgrp";
} else if (que->device == _HW_DMA) {
LOG_ERROR(stdout, "Error program param or cmake param, not USE_P2P can`t set '--dma'!\n");
que->finish();
return;
}
#else
if( que->device == _HW_DMA) {
initParamOfDecoderYOLOV5.str_devid[4] = {0};
initParamOfDecoderYOLOV5.xcoder_params = "out=hw";
initParamOfDecoderYOLOV5.dec_name = "h264_ni_quadra_dec";
initParamOfDecoderYOLOV5.filters_descr = "ni_quadra_scale=608:608:format=rgba:is_p2p=1";
initParamOfDecoderYOLOV5.src_filename = "../Resource/Images/cr7_1920x1080.h264";
} else {
LOG_ERROR(stdout, "Error program param or cmake param, USE_P2P need set '--dma'!\n");
que->finish();
return;
}
#endif
ret = decoder.DecoderInit(initParamOfDecoderYOLOV5);
if (ret == -1)
{
que->finish();
return;
}
while(true)
{
if (av_read_frame(decoder.fmt_ctx, decoder.pkt) < 0)
{
if(end == 2)
{
que->DecodeEnd = true;
break;
}
end = 1;
}
if (decoder.pkt->stream_index == decoder.video_stream_idx) {
if(!end) {
ret = avcodec_send_packet(decoder.video_dec_ctx, decoder.pkt);
} else {
ret = avcodec_send_packet(decoder.video_dec_ctx, NULL);
}
if (ret < 0 && ret != AVERROR_EOF) {
fprintf(stderr, "Error submitting a packet for decoding\n");
que->DecodeEnd = true;
break;
}
while (ret >= 0 || end == 1)
{
ret = avcodec_receive_frame(decoder.video_dec_ctx, decoder.frame);
if (ret == AVERROR(EAGAIN)) {
break;
} else if (ret == AVERROR_EOF ) {
end = 2;
break;
} else if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while receiving a frame from the decoder\n");
que->finish();
return;
}
decoder.frame->pts = decoder.frame->best_effort_timestamp;
frame_cnt++;
if (que->device == CPU)
{
cv::Mat srcImage = cv::Mat::zeros(decoder.frame->height*3/2, decoder.frame->width, CV_8UC1);
memcpy(srcImage.data, (unsigned char*)decoder.frame->data[0], decoder.frame->width * decoder.frame->height);
memcpy(srcImage.data + decoder.frame->width * decoder.frame->height, (unsigned char*)decoder.frame->data[1], decoder.frame->width * decoder.frame->height/4);
memcpy(srcImage.data + decoder.frame->width * decoder.frame->height*5/4, (unsigned char*)decoder.frame->data[2], decoder.frame->width * decoder.frame->height/4);
cvtColor(srcImage, srcImage, COLOR_YUV420p2RGB);
que->enQueue(srcImage);
}
if (que->device == _HW || que->device == _HW_DMA)
{
if (av_buffersrc_add_frame_flags(decoder.buffersrc_ctx, decoder.frame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
break;
}
while (1)
{
ret = av_buffersink_get_frame(decoder.buffersink_ctx, decoder.filt_frame);
if (ret == AVERROR(EAGAIN))
{
break;
}
else if(ret == AVERROR_EOF)
{
end = 2;
break;
}
if (ret < 0)
{
que->finish();
return;
}
#ifndef DMA
if (que->device == _HW)
{
cv::Mat srcImage;
switch (decoder.filt_frame->format)
{
case AV_PIX_FMT_BGRP:
{
srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC3);
cv::Mat mat_r = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)decoder.filt_frame->data[0]);
cv::Mat mat_g = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)decoder.filt_frame->data[1]);
cv::Mat mat_b = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)decoder.filt_frame->data[2]);
cv::Mat Channels[3]{mat_r, mat_g, mat_b};
cv::merge(Channels, 3, srcImage);
break;
}
case AV_PIX_FMT_YUV420P:
{
srcImage = cv::Mat::zeros(decoder.filt_frame->height*3/2, decoder.filt_frame->width, CV_8UC1);
memcpy(srcImage.data, (unsigned char*)decoder.filt_frame->data[0], decoder.filt_frame->width * decoder.filt_frame->height);
memcpy(srcImage.data + decoder.filt_frame->width * decoder.filt_frame->height, (unsigned char*)decoder.filt_frame->data[1], decoder.filt_frame->width * decoder.filt_frame->height/4);
memcpy(srcImage.data + decoder.filt_frame->width * decoder.filt_frame->height*5/4, (unsigned char*)decoder.filt_frame->data[2], decoder.filt_frame->width * decoder.filt_frame->height/4);
cvtColor(srcImage, srcImage, COLOR_YUV420p2RGB);
break;
}
case AV_PIX_FMT_RGBA:
{
srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC4);
memcpy(srcImage.data, (unsigned char*)decoder.filt_frame->data[0], decoder.filt_frame->width * decoder.filt_frame->height * 4);
cvtColor(srcImage, srcImage, COLOR_BGRA2RGB);
break;
}
default:
break;
}
que->enQueue(srcImage);
av_frame_unref(decoder.filt_frame);
}
#else
if (que->device == _HW_DMA)
{
DCU_Frame dcu_frame;
AVHWFramesContext *hwfc = (AVHWFramesContext *)decoder.filt_frame->hw_frames_ctx->data;
switch (hwfc->sw_format)
{
case AV_PIX_FMT_BGRP:
{
dcu_frame.format = AV_PIX_FMT_BGRP;
dcu_frame.data_len = decoder.filt_frame->width * decoder.filt_frame->height * 3;
dcu_frame.srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC3);
break;
}
case AV_PIX_FMT_YUV420P:
{
dcu_frame.format = AV_PIX_FMT_YUV420P;
dcu_frame.data_len = decoder.filt_frame->width * decoder.filt_frame->height * 3 / 2;
dcu_frame.srcImage = cv::Mat::zeros(decoder.filt_frame->height*3/2, decoder.filt_frame->width, CV_8UC1);
break;
}
case AV_PIX_FMT_RGBA:
{
dcu_frame.format = AV_PIX_FMT_RGBA;
dcu_frame.data_len = decoder.filt_frame->width * decoder.filt_frame->height * 4;
dcu_frame.srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC4);
break;
}
default:
break;
}
dcu_frame.width = decoder.filt_frame->width;
dcu_frame.height = decoder.filt_frame->height;
hipMalloc((void**)&(dcu_frame.dcu_data), dcu_frame.data_len * sizeof(unsigned char));
ret = decoder.retrieve_filter_frame(dcu_frame, decoder.filt_frame);
if (ret)
av_log(NULL, AV_LOG_ERROR, "Error while retrieve_filter_frame with p2p.\n");
if(dcu_frame.format == AV_PIX_FMT_BGRP)
{
cv::Mat mat_b = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)dcu_frame.srcImage.data);
cv::Mat mat_g = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)(dcu_frame.srcImage.data + decoder.filt_frame->height * decoder.filt_frame->width));
cv::Mat mat_r = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)(dcu_frame.srcImage.data + decoder.filt_frame->height * decoder.filt_frame->width * 2));
cv::Mat Channels[3]{mat_r, mat_g, mat_b};
cv::merge(Channels, 3, dcu_frame.srcImage);
}
if(dcu_frame.format == AV_PIX_FMT_YUV420P)
cvtColor(dcu_frame.srcImage, dcu_frame.srcImage, COLOR_YUV420p2RGB);
if(dcu_frame.format == AV_PIX_FMT_RGBA)
cvtColor(dcu_frame.srcImage, dcu_frame.srcImage, COLOR_BGRA2RGB);
queue->enQueue(dcu_frame);
av_frame_unref(decoder.filt_frame);
}
#endif
}
}
av_frame_unref(decoder.frame);
}
}
av_packet_unref(decoder.pkt);
}
LOG_INFO(stdout, "Decoder: ####### frame count: %d\n", frame_cnt);
que->finish();
}
static void DetectorThreadFunc(Queue* que)
{
Queue* queue = que;
// DetectorYOLOV5 Init
DetectorYOLOV5 detector;
InitializationParameterOfDetector initParamOfDetectorYOLOV5;
initParamOfDetectorYOLOV5.parentPath = "";
initParamOfDetectorYOLOV5.configFilePath = CONFIG_FILE;
initParamOfDetectorYOLOV5.logName = "";
ErrorCode errorCode=detector.Initialize(initParamOfDetectorYOLOV5);
if(errorCode!=SUCCESS)
{
LOG_ERROR(stdout, "fail to initialize detector!\n");
exit(-1);
}
LOG_INFO(stdout, "succeed to initialize detector\n");
int frame_cnt = 0;
double start_time = getTickCount();
while (!queue->DecodeEnd) {
#ifdef DMA
DCU_Frame dcu_frame;
queue->deQueue(&dcu_frame);
if(dcu_frame.srcImage.empty()) {
continue;
}
#else
cv::Mat InferImage;
queue->deQueue(&InferImage);
if (InferImage.empty()) {
continue;
}
#endif
// detect
std::vector<ResultOfDetection> predictions;
double time1 = getTickCount();
#ifdef DMA
detector.Detect(dcu_frame, predictions);
#else
detector.Detect(InferImage, predictions);
#endif
double time2 = getTickCount();
double elapsedTime = (time2 - time1)*1000 / getTickFrequency();
LOG_INFO(stdout, "inference time:%f ms\n", elapsedTime);
frame_cnt++;
#ifdef DMA
hipFree(dcu_frame.dcu_data);
#endif
// process result
LOG_INFO(stdout,"////////////////Detection Results////////////////\n");
for( int i = 0; i < predictions.size(); ++i)
{
ResultOfDetection result = predictions[i];
#ifdef DMA
cv::rectangle(dcu_frame.srcImage, result.boundingBox, Scalar(0,255,255),2);
cv::putText(dcu_frame.srcImage, result.className, cv::Point(result.boundingBox.x, result.boundingBox.y-20), cv::FONT_HERSHEY_PLAIN, 2.0, Scalar(0, 0, 255), 2);
#else
cv::rectangle(InferImage, result.boundingBox, Scalar(0,255,255),2);
cv::putText(InferImage, result.className, cv::Point(result.boundingBox.x, result.boundingBox.y-20), cv::FONT_HERSHEY_PLAIN, 2.0, Scalar(0, 0, 255), 2);
#endif
LOG_INFO(stdout,"box:%d %d %d %d,label:%d,confidence:%f\n",result.boundingBox.x,
result.boundingBox.y,result.boundingBox.width,result.boundingBox.height,result.classID,result.confidence);
}
// X11 display can`t support in docker.
/*namedWindow("video", WINDOW_NORMAL | WINDOW_KEEPRATIO);
#ifdef DMA
imshow("video", dcu_frame.srcImage);
#else
imshow("video", InferImage);
#endif
if (waitKey(10) == 'q') {
break;
}*/
}
#ifdef DMA
hipFree(detector.preprocess_Image);
#endif
double end_time = getTickCount();
fprintf(stdout, "Finish ####### frame_cnt: %d, Inference fps: %.2f, all time: %.2f ms\n", frame_cnt, float(frame_cnt/((end_time - start_time)/getTickFrequency())), (end_time - start_time)/getTickFrequency()*1000);
queue->finish();
}
void Sample_DetectorYOLOV5(int device)
{
Queue* queue = new Queue(1);
queue->device = device;
std::thread ThreadDecoder(DecoderThreadFunc, queue);
std::thread ThreadDetector(DetectorThreadFunc, queue);
ThreadDecoder.join();
ThreadDetector.join();
delete queue;
queue = NULL;
return;
}
#include <Sample.h>
#include <SimpleLog.h>
#include <Filesystem.h>
#include <DetectorYOLOV7.h>
#include <sys/time.h>
#include <Decoder.h>
#include <Queuethread.h>
using namespace cv;
using namespace std;
using namespace cv::dnn;
using namespace migraphx;
using namespace migraphxSamples;
static void DecoderThreadFunc(Queue* queue)
{
int ret, end = 0;
int frame_cnt = 0;
Queue* que = queue;
Decoder decoder(que->device);
InitializationParameterOfDecoder initParamOfDecoderYOLOV7;
#ifndef DMA
initParamOfDecoderYOLOV7.src_filename = "../Resource/Images/Mean.mp4";
if( que->device == _HW)
{
initParamOfDecoderYOLOV7.str_devid[4] = {0};
initParamOfDecoderYOLOV7.xcoder_params = "out=hw";
initParamOfDecoderYOLOV7.dec_name = "h264_ni_quadra_dec";
initParamOfDecoderYOLOV7.filters_descr = "ni_quadra_scale=640:640:format=bgrp,hwdownload,format=bgrp";
} else if (que->device == _HW_DMA) {
LOG_ERROR(stdout, "Error program param or cmake param, not USE_P2P can`t set '--dma'!\n");
que->finish();
return;
}
#else
if( que->device == _HW_DMA) {
initParamOfDecoderYOLOV7.str_devid[4] = {0};
initParamOfDecoderYOLOV7.xcoder_params = "out=hw";
initParamOfDecoderYOLOV7.dec_name = "h264_ni_quadra_dec";
initParamOfDecoderYOLOV7.filters_descr = "ni_quadra_scale=640:640:format=rgba:is_p2p=1";
initParamOfDecoderYOLOV7.src_filename = "../Resource/Images/cr7_1920x1080.h264";
} else {
LOG_ERROR(stdout, "Error program param or cmake param, USE_P2P need set '--dma'!\n");
que->finish();
return;
}
#endif
ret = decoder.DecoderInit(initParamOfDecoderYOLOV7);
if (ret == -1)
{
que->finish();
return;
}
while(true)
{
if (av_read_frame(decoder.fmt_ctx, decoder.pkt) < 0)
{
if(end == 2)
{
que->DecodeEnd = true;
break;
}
end = 1;
}
if (decoder.pkt->stream_index == decoder.video_stream_idx) {
if(!end) {
ret = avcodec_send_packet(decoder.video_dec_ctx, decoder.pkt);
} else {
ret = avcodec_send_packet(decoder.video_dec_ctx, NULL);
}
if (ret < 0 && ret != AVERROR_EOF) {
fprintf(stderr, "Error submitting a packet for decoding\n");
que->DecodeEnd = true;
break;
}
while (ret >= 0 || end == 1)
{
ret = avcodec_receive_frame(decoder.video_dec_ctx, decoder.frame);
if (ret == AVERROR(EAGAIN)) {
break;
} else if (ret == AVERROR_EOF ) {
end = 2;
break;
} else if (ret < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while receiving a frame from the decoder\n");
que->finish();
return;
}
decoder.frame->pts = decoder.frame->best_effort_timestamp;
frame_cnt++;
if (que->device == CPU)
{
cv::Mat srcImage = cv::Mat::zeros(decoder.frame->height*3/2, decoder.frame->width, CV_8UC1);
memcpy(srcImage.data, (unsigned char*)decoder.frame->data[0], decoder.frame->width * decoder.frame->height);
memcpy(srcImage.data + decoder.frame->width * decoder.frame->height, (unsigned char*)decoder.frame->data[1], decoder.frame->width * decoder.frame->height/4);
memcpy(srcImage.data + decoder.frame->width * decoder.frame->height*5/4, (unsigned char*)decoder.frame->data[2], decoder.frame->width * decoder.frame->height/4);
cvtColor(srcImage, srcImage, COLOR_YUV420p2RGB);
que->enQueue(srcImage);
}
if (que->device == _HW || que->device == _HW_DMA)
{
if (av_buffersrc_add_frame_flags(decoder.buffersrc_ctx, decoder.frame, AV_BUFFERSRC_FLAG_KEEP_REF) < 0) {
av_log(NULL, AV_LOG_ERROR, "Error while feeding the filtergraph\n");
break;
}
while (1)
{
ret = av_buffersink_get_frame(decoder.buffersink_ctx, decoder.filt_frame);
if (ret == AVERROR(EAGAIN))
{
break;
}
else if(ret == AVERROR_EOF)
{
end = 2;
break;
}
if (ret < 0)
{
que->finish();
return;
}
#ifndef DMA
if (que->device == _HW)
{
cv::Mat srcImage;
switch (decoder.filt_frame->format)
{
case AV_PIX_FMT_BGRP:
{
srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC3);
cv::Mat mat_r = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)decoder.filt_frame->data[0]);
cv::Mat mat_g = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)decoder.filt_frame->data[1]);
cv::Mat mat_b = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)decoder.filt_frame->data[2]);
cv::Mat Channels[3]{mat_r, mat_g, mat_b};
cv::merge(Channels, 3, srcImage);
break;
}
case AV_PIX_FMT_YUV420P:
{
srcImage = cv::Mat::zeros(decoder.filt_frame->height*3/2, decoder.filt_frame->width, CV_8UC1);
memcpy(srcImage.data, (unsigned char*)decoder.filt_frame->data[0], decoder.filt_frame->width * decoder.filt_frame->height);
memcpy(srcImage.data + decoder.filt_frame->width * decoder.filt_frame->height, (unsigned char*)decoder.filt_frame->data[1], decoder.filt_frame->width * decoder.filt_frame->height/4);
memcpy(srcImage.data + decoder.filt_frame->width * decoder.filt_frame->height*5/4, (unsigned char*)decoder.filt_frame->data[2], decoder.filt_frame->width * decoder.filt_frame->height/4);
cvtColor(srcImage, srcImage, COLOR_YUV420p2RGB);
break;
}
case AV_PIX_FMT_RGBA:
{
srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC4);
memcpy(srcImage.data, (unsigned char*)decoder.filt_frame->data[0], decoder.filt_frame->width * decoder.filt_frame->height * 4);
cvtColor(srcImage, srcImage, COLOR_BGRA2RGB);
break;
}
default:
break;
}
que->enQueue(srcImage);
av_frame_unref(decoder.filt_frame);
}
#else
if (que->device == _HW_DMA)
{
DCU_Frame dcu_frame;
AVHWFramesContext *hwfc = (AVHWFramesContext *)decoder.filt_frame->hw_frames_ctx->data;
switch (hwfc->sw_format)
{
case AV_PIX_FMT_BGRP:
{
dcu_frame.format = AV_PIX_FMT_BGRP;
dcu_frame.data_len = decoder.filt_frame->width * decoder.filt_frame->height * 3;
dcu_frame.srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC3);
break;
}
case AV_PIX_FMT_YUV420P:
{
dcu_frame.format = AV_PIX_FMT_YUV420P;
dcu_frame.data_len = decoder.filt_frame->width * decoder.filt_frame->height * 3 / 2;
dcu_frame.srcImage = cv::Mat::zeros(decoder.filt_frame->height*3/2, decoder.filt_frame->width, CV_8UC1);
break;
}
case AV_PIX_FMT_RGBA:
{
dcu_frame.format = AV_PIX_FMT_RGBA;
dcu_frame.data_len = decoder.filt_frame->width * decoder.filt_frame->height * 4;
dcu_frame.srcImage = cv::Mat::zeros(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC4);
break;
}
default:
break;
}
dcu_frame.width = decoder.filt_frame->width;
dcu_frame.height = decoder.filt_frame->height;
hipMalloc((void**)&(dcu_frame.dcu_data), dcu_frame.data_len * sizeof(unsigned char));
ret = decoder.retrieve_filter_frame(dcu_frame, decoder.filt_frame);
if (ret)
av_log(NULL, AV_LOG_ERROR, "Error while retrieve_filter_frame with p2p.\n");
if(dcu_frame.format == AV_PIX_FMT_BGRP)
{
cv::Mat mat_b = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)dcu_frame.srcImage.data);
cv::Mat mat_g = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)(dcu_frame.srcImage.data + decoder.filt_frame->height * decoder.filt_frame->width));
cv::Mat mat_r = cv::Mat(decoder.filt_frame->height, decoder.filt_frame->width, CV_8UC1, (unsigned char*)(dcu_frame.srcImage.data + decoder.filt_frame->height * decoder.filt_frame->width * 2));
cv::Mat Channels[3]{mat_r, mat_g, mat_b};
cv::merge(Channels, 3, dcu_frame.srcImage);
}
if(dcu_frame.format == AV_PIX_FMT_YUV420P)
cvtColor(dcu_frame.srcImage, dcu_frame.srcImage, COLOR_YUV420p2RGB);
if(dcu_frame.format == AV_PIX_FMT_RGBA)
cvtColor(dcu_frame.srcImage, dcu_frame.srcImage, COLOR_BGRA2RGB);
queue->enQueue(dcu_frame);
av_frame_unref(decoder.filt_frame);
}
#endif
}
}
av_frame_unref(decoder.frame);
}
}
av_packet_unref(decoder.pkt);
}
LOG_INFO(stdout, "Decoder: ####### frame count: %d\n", frame_cnt);
que->finish();
}
static void DetectorThreadFunc(Queue* que)
{
Queue* queue = que;
// DetectorYOLOV7 Init
DetectorYOLOV7 detector;
InitializationParameterOfDetector initParamOfDetectorYOLOV7;
initParamOfDetectorYOLOV7.parentPath = "";
initParamOfDetectorYOLOV7.configFilePath = CONFIG_FILE;
initParamOfDetectorYOLOV7.logName = "";
ErrorCode errorCode=detector.Initialize(initParamOfDetectorYOLOV7);
if(errorCode!=SUCCESS)
{
LOG_ERROR(stdout, "fail to initialize detector!\n");
exit(-1);
}
LOG_INFO(stdout, "succeed to initialize detector\n");
int frame_cnt = 0;
double start_time = getTickCount();
while (!queue->DecodeEnd) {
#ifdef DMA
DCU_Frame dcu_frame;
queue->deQueue(&dcu_frame);
if(dcu_frame.srcImage.empty()) {
continue;
}
#else
cv::Mat InferImage;
queue->deQueue(&InferImage);
if (InferImage.empty()) {
continue;
}
#endif
// detect
std::vector<ResultOfDetection> predictions;
double time1 = getTickCount();
#ifdef DMA
detector.Detect(dcu_frame, predictions);
#else
detector.Detect(InferImage, predictions);
#endif
double time2 = getTickCount();
double elapsedTime = (time2 - time1)*1000 / getTickFrequency();
LOG_INFO(stdout, "inference time:%f ms\n", elapsedTime);
frame_cnt++;
#ifdef DMA
hipFree(dcu_frame.dcu_data);
#endif
// process result
LOG_INFO(stdout,"////////////////Detection Results////////////////\n");
for( int i = 0; i < predictions.size(); ++i)
{
ResultOfDetection result = predictions[i];
#ifdef DMA
cv::rectangle(dcu_frame.srcImage, result.boundingBox, Scalar(0,255,255),2);
cv::putText(dcu_frame.srcImage, result.className, cv::Point(result.boundingBox.x, result.boundingBox.y-20), cv::FONT_HERSHEY_PLAIN, 2.0, Scalar(0, 0, 255), 2);
#else
cv::rectangle(InferImage, result.boundingBox, Scalar(0,255,255),2);
cv::putText(InferImage, result.className, cv::Point(result.boundingBox.x, result.boundingBox.y-20), cv::FONT_HERSHEY_PLAIN, 2.0, Scalar(0, 0, 255), 2);
#endif
LOG_INFO(stdout,"box:%d %d %d %d,label:%d,confidence:%f\n",result.boundingBox.x,
result.boundingBox.y,result.boundingBox.width,result.boundingBox.height,result.classID,result.confidence);
}
// X11 display can`t support in docker.
/*namedWindow("video", WINDOW_NORMAL | WINDOW_KEEPRATIO);
imshow("video", InferImage);
if (waitKey(10) == 'q') {
break;
}*/
}
#ifdef DMA
hipFree(detector.preprocess_Image);
#endif
double end_time = getTickCount();
fprintf(stdout, "Finish ####### frame_cnt: %d, Inference fps: %.2f, all time: %.2f ms\n", frame_cnt, float(frame_cnt/((end_time - start_time)/getTickFrequency())), (end_time - start_time)/getTickFrequency()*1000);
queue->finish();
}
void Sample_DetectorYOLOV7(int device)
{
Queue* queue = new Queue(1);
queue->device = device;
std::thread ThreadDecoder(DecoderThreadFunc, queue);
std::thread ThreadDetector(DetectorThreadFunc, queue);
ThreadDecoder.join();
ThreadDetector.join();
delete queue;
queue = NULL;
return;
}
#include <cstring>
#include <stdio.h>
#include <stdlib.h>
#include <string>
#include <Sample.h>
#include <getopt.h>
#include <SimpleLog.h>
void MIGraphXSamplesUsage()
{
printf("Two args are required: ./a --cpu --net=0\n");
printf("Usage : argc[1] --device_type argc[2] --net=index \n");
printf("device_type: cpu / hw / dma\n");
printf("index:\n");
printf("\t 0) YOLOV3 sample.\n");
printf("\t 1) YOLOV5 sample.\n");
printf("\t 2) YOLOV7 sample.\n");
printf("\t 3) SSD sample.\n");
printf("\t 4) RetinaFace sample.\n");
}
static struct option long_options[] = {
{"cpu", no_argument, NULL, 'c'},
{"hw", no_argument, NULL, 'h'},
{"dma", no_argument, NULL, 'd'},
{"net", required_argument, NULL, 'n'},
{NULL, 0, NULL, 0}
};
int main(int argc, char *argv[])
{
if (argc < 3 || argc > 3)
{
MIGraphXSamplesUsage();
return -1;
}
int opt, index;
int device = -1;
const char* Nets[] = {"YOLOV3", "YOLOV5", "YOLOV7", "SSD", "RetinaFace"};
while ((opt = getopt_long(argc, argv, "chdn:", long_options, NULL)) != -1)
{
switch (opt)
{
case 'c':
LOG_INFO(stdout, "Run SW Decode.\n");
device = CPU;
break;
case 'h':
LOG_INFO(stdout, "Run HW Decode and PCI to DCU.\n");
device = _HW;
break;
case 'd':
LOG_INFO(stdout, "Run HW Decode and DMA to DCU.\n");
device = _HW_DMA;
break;
case 'n':
index = atoi(optarg);
LOG_INFO(stdout, "Run %s detector.\n", Nets[index]);
break;
case '?':
MIGraphXSamplesUsage();
return 0;
default:
MIGraphXSamplesUsage();
return 0;
}
}
switch (index)
{
case 0:
{
Sample_DetectorYOLOV3(device);
}
break;
case 1:
{
Sample_DetectorYOLOV5(device);
}
break;
case 2:
{
Sample_DetectorYOLOV7(device);
}
break;
case 3:
{
Sample_DetectorSSD(device);
}
break;
case 4:
{
Sample_DetectorRetinaFace(device);
}
break;
}
return 0;
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment