Commit abaad570 authored by limm's avatar limm
Browse files

add demo module

parent a64900f3
Pipeline #2814 canceled with stages
## Demo
We provide a demo showing what our mmdeploy can do for general model deployment.
In `demo_rewrite.py`, a resnet18 model from `torchvision` is rewritten through mmdeploy tool. In our rewritten model, the forward function of resnet gets modified to only down sample the original input to 4x. Original onnx model of resnet18 and its rewritten are visualized through [netron](https://netron.app/).
### Prerequisite
Before we run `demp_rewrite.py`, we need to install `pyppeteer` through:
```
pip install pyppeteer
```
### Demo results
The original resnet18 model and its modified one are visualized as follows. The left model is the original resnet18 while the right model is exported after rewritten.
| Original resnet18 | Rewritten model |
| :-------------------------: | :--------------------------: |
| ![](resources/original.png) | ![](resources/rewritten.png) |

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 16
VisualStudioVersion = 16.0.31729.503
MinimumVisualStudioVersion = 10.0.40219.1
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "image_classification", "image_classification\image_classification.csproj", "{77D130AF-1B08-496F-8B6C-F38FCDFEB1BE}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "image_restorer", "image_restorer\image_restorer.csproj", "{2FB1A872-C361-4799-AE26-1F28454EA56D}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "image_segmentation", "image_segmentation\image_segmentation.csproj", "{3568F017-112A-4BF3-AE90-BD047E2C389D}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "object_detection", "object_detection\object_detection.csproj", "{140B7A2A-0492-4845-83DF-3E812E97CDC5}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ocr_detection", "ocr_detection\ocr_detection.csproj", "{3C4E4F6C-6E75-4110-BC68-DD36DD7B0546}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "ocr_recognition", "ocr_recognition\ocr_recognition.csproj", "{9B68260C-0C7C-473C-8D6F-284283F91F84}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "pose_detection", "pose_detection\pose_detection.csproj", "{10E3B87C-7544-4F4D-90A3-65D5654CBF94}"
EndProject
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "pose_tracker", "pose_tracker\pose_tracker.csproj", "{42FC54A1-73D5-429D-AF5E-09BAEC4F0D9E}"
EndProject
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "rotated_detection", "rotated_detection\rotated_detection.csproj", "{1957C2D2-F6D1-4E28-920C-2B7DE98EAF50}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{77D130AF-1B08-496F-8B6C-F38FCDFEB1BE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{77D130AF-1B08-496F-8B6C-F38FCDFEB1BE}.Debug|Any CPU.Build.0 = Debug|Any CPU
{77D130AF-1B08-496F-8B6C-F38FCDFEB1BE}.Release|Any CPU.ActiveCfg = Release|Any CPU
{77D130AF-1B08-496F-8B6C-F38FCDFEB1BE}.Release|Any CPU.Build.0 = Release|Any CPU
{2FB1A872-C361-4799-AE26-1F28454EA56D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{2FB1A872-C361-4799-AE26-1F28454EA56D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{2FB1A872-C361-4799-AE26-1F28454EA56D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{2FB1A872-C361-4799-AE26-1F28454EA56D}.Release|Any CPU.Build.0 = Release|Any CPU
{3568F017-112A-4BF3-AE90-BD047E2C389D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3568F017-112A-4BF3-AE90-BD047E2C389D}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3568F017-112A-4BF3-AE90-BD047E2C389D}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3568F017-112A-4BF3-AE90-BD047E2C389D}.Release|Any CPU.Build.0 = Release|Any CPU
{140B7A2A-0492-4845-83DF-3E812E97CDC5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{140B7A2A-0492-4845-83DF-3E812E97CDC5}.Debug|Any CPU.Build.0 = Debug|Any CPU
{140B7A2A-0492-4845-83DF-3E812E97CDC5}.Release|Any CPU.ActiveCfg = Release|Any CPU
{140B7A2A-0492-4845-83DF-3E812E97CDC5}.Release|Any CPU.Build.0 = Release|Any CPU
{3C4E4F6C-6E75-4110-BC68-DD36DD7B0546}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{3C4E4F6C-6E75-4110-BC68-DD36DD7B0546}.Debug|Any CPU.Build.0 = Debug|Any CPU
{3C4E4F6C-6E75-4110-BC68-DD36DD7B0546}.Release|Any CPU.ActiveCfg = Release|Any CPU
{3C4E4F6C-6E75-4110-BC68-DD36DD7B0546}.Release|Any CPU.Build.0 = Release|Any CPU
{9B68260C-0C7C-473C-8D6F-284283F91F84}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{9B68260C-0C7C-473C-8D6F-284283F91F84}.Debug|Any CPU.Build.0 = Debug|Any CPU
{9B68260C-0C7C-473C-8D6F-284283F91F84}.Release|Any CPU.ActiveCfg = Release|Any CPU
{9B68260C-0C7C-473C-8D6F-284283F91F84}.Release|Any CPU.Build.0 = Release|Any CPU
{10E3B87C-7544-4F4D-90A3-65D5654CBF94}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{10E3B87C-7544-4F4D-90A3-65D5654CBF94}.Debug|Any CPU.Build.0 = Debug|Any CPU
{10E3B87C-7544-4F4D-90A3-65D5654CBF94}.Release|Any CPU.ActiveCfg = Release|Any CPU
{10E3B87C-7544-4F4D-90A3-65D5654CBF94}.Release|Any CPU.Build.0 = Release|Any CPU
{42FC54A1-73D5-429D-AF5E-09BAEC4F0D9E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{42FC54A1-73D5-429D-AF5E-09BAEC4F0D9E}.Debug|Any CPU.Build.0 = Debug|Any CPU
{42FC54A1-73D5-429D-AF5E-09BAEC4F0D9E}.Release|Any CPU.ActiveCfg = Release|Any CPU
{42FC54A1-73D5-429D-AF5E-09BAEC4F0D9E}.Release|Any CPU.Build.0 = Release|Any CPU
{1957C2D2-F6D1-4E28-920C-2B7DE98EAF50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{1957C2D2-F6D1-4E28-920C-2B7DE98EAF50}.Debug|Any CPU.Build.0 = Debug|Any CPU
{1957C2D2-F6D1-4E28-920C-2B7DE98EAF50}.Release|Any CPU.ActiveCfg = Release|Any CPU
{1957C2D2-F6D1-4E28-920C-2B7DE98EAF50}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {85E68B94-6CED-4912-BDB6-BAB2367C4212}
EndGlobalSection
EndGlobal
# Usage
**step 0.** install the local nuget package
You should build csharp api first, it will generate a nuget package, or you can download our prebuit package. You may refer to [this](https://stackoverflow.com/a/55167481) on how to install local nuget package.
**step 1.** Add runtime dll to the system path
If you built csharp api from source and didn't build static lib, you should add the built dll to your system path. The same is to opencv, etc.
And don't forget to install backend dependencies. Take tensorrt backend as example, you have to install cudatoolkit, cudnn and tensorrt. The version of backend dependencies that our prebuit nuget package used will be offered in release note.
| backend | dependencies |
| ----------- | ----------------------------- |
| tensorrt | cudatoolkit, cudnn, tensorrt |
| onnxruntime | onnxruntime / onnxruntime-gpu |
**step 2.** Open Demo.sln and build solution.
**step 3.** Prepare the model.
You can either convert your model according to this [tutorial](../../docs/en/tutorials/how_to_convert_model.md) or download the test models from [OneDrive](https://1drv.ms/u/s!Aqis6w3rjKXSh2dXZ5OqbZIZSu9P?e=nefSdY) or [BaiduYun](https://pan.baidu.com/s/1VJkLo2oqHos6ZWDT7xamFg?pwd=STAR). The web drive contains onnx and tensorrt models and the test models are converted under environment of cuda11.1 + cudnn8.2.1 + tensorrt 8.2.3.0 + GTX2070s.
*Note*:
- a) If you want to use the tensorrt model from the link, make sure your environment and your gpu architecture is same with above.
- b) When you use the downloaded onnx model, you have to edit `deploy.json`, edit `end2end.engine` to `end2end.onnx` and `tensorrt` to `onnxruntime`.
**step 4.** Set one project as startup project and run it.

namespace image_classification
{
partial class FormDemo
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.buttonSelectModelPath = new System.Windows.Forms.Button();
this.buttonInitModel = new System.Windows.Forms.Button();
this.buttonLoadImage = new System.Windows.Forms.Button();
this.pictureBoxPicture = new System.Windows.Forms.PictureBox();
this.radioButtonDeviceCpu = new System.Windows.Forms.RadioButton();
this.radioButtonDeviceCuda = new System.Windows.Forms.RadioButton();
this.buttonInference = new System.Windows.Forms.Button();
this.textBoxModelPath = new System.Windows.Forms.TextBox();
this.labelModelPath = new System.Windows.Forms.Label();
this.labelDevice = new System.Windows.Forms.Label();
this.panelModelOptions = new System.Windows.Forms.Panel();
this.textBoxStatus = new System.Windows.Forms.TextBox();
this.labelStatus = new System.Windows.Forms.Label();
this.labelModelOptions = new System.Windows.Forms.Label();
this.panelPicture = new System.Windows.Forms.Panel();
this.labelPicture = new System.Windows.Forms.Label();
this.textBoxResult = new System.Windows.Forms.TextBox();
this.labelResult = new System.Windows.Forms.Label();
this.labelUsage = new System.Windows.Forms.Label();
this.textBoxUsage = new System.Windows.Forms.TextBox();
((System.ComponentModel.ISupportInitialize)(this.pictureBoxPicture)).BeginInit();
this.panelModelOptions.SuspendLayout();
this.panelPicture.SuspendLayout();
this.SuspendLayout();
//
// buttonSelectModelPath
//
this.buttonSelectModelPath.Location = new System.Drawing.Point(959, 76);
this.buttonSelectModelPath.Name = "buttonSelectModelPath";
this.buttonSelectModelPath.Size = new System.Drawing.Size(143, 41);
this.buttonSelectModelPath.TabIndex = 1;
this.buttonSelectModelPath.Text = "select";
this.buttonSelectModelPath.UseVisualStyleBackColor = true;
this.buttonSelectModelPath.Click += new System.EventHandler(this.buttonSelectModelPath_Click);
//
// buttonInitModel
//
this.buttonInitModel.ForeColor = System.Drawing.SystemColors.ControlText;
this.buttonInitModel.Location = new System.Drawing.Point(920, 76);
this.buttonInitModel.Name = "buttonInitModel";
this.buttonInitModel.Size = new System.Drawing.Size(143, 38);
this.buttonInitModel.TabIndex = 1;
this.buttonInitModel.Text = "init model";
this.buttonInitModel.UseVisualStyleBackColor = true;
this.buttonInitModel.Click += new System.EventHandler(this.buttonInitModel_Click);
//
// buttonLoadImage
//
this.buttonLoadImage.Location = new System.Drawing.Point(461, 752);
this.buttonLoadImage.Name = "buttonLoadImage";
this.buttonLoadImage.Size = new System.Drawing.Size(185, 50);
this.buttonLoadImage.TabIndex = 2;
this.buttonLoadImage.Text = "load image";
this.buttonLoadImage.UseVisualStyleBackColor = true;
this.buttonLoadImage.Click += new System.EventHandler(this.buttonLoadImage_Click);
//
// pictureBoxPicture
//
this.pictureBoxPicture.Location = new System.Drawing.Point(4, 3);
this.pictureBoxPicture.Name = "pictureBoxPicture";
this.pictureBoxPicture.Size = new System.Drawing.Size(392, 456);
this.pictureBoxPicture.SizeMode = System.Windows.Forms.PictureBoxSizeMode.StretchImage;
this.pictureBoxPicture.TabIndex = 3;
this.pictureBoxPicture.TabStop = false;
//
// radioButtonDeviceCpu
//
this.radioButtonDeviceCpu.AutoSize = true;
this.radioButtonDeviceCpu.Location = new System.Drawing.Point(244, 134);
this.radioButtonDeviceCpu.Name = "radioButtonDeviceCpu";
this.radioButtonDeviceCpu.Size = new System.Drawing.Size(87, 35);
this.radioButtonDeviceCpu.TabIndex = 4;
this.radioButtonDeviceCpu.Text = "cpu";
this.radioButtonDeviceCpu.UseVisualStyleBackColor = true;
this.radioButtonDeviceCpu.CheckedChanged += new System.EventHandler(this.radioButtonDeviceCpu_CheckedChanged);
//
// radioButtonDeviceCuda
//
this.radioButtonDeviceCuda.AutoSize = true;
this.radioButtonDeviceCuda.Checked = true;
this.radioButtonDeviceCuda.Location = new System.Drawing.Point(343, 134);
this.radioButtonDeviceCuda.Name = "radioButtonDeviceCuda";
this.radioButtonDeviceCuda.Size = new System.Drawing.Size(100, 35);
this.radioButtonDeviceCuda.TabIndex = 5;
this.radioButtonDeviceCuda.TabStop = true;
this.radioButtonDeviceCuda.Text = "cuda";
this.radioButtonDeviceCuda.UseVisualStyleBackColor = true;
this.radioButtonDeviceCuda.CheckedChanged += new System.EventHandler(this.radioButtonDeviceCuda_CheckedChanged);
//
// buttonInference
//
this.buttonInference.Location = new System.Drawing.Point(849, 752);
this.buttonInference.Name = "buttonInference";
this.buttonInference.Size = new System.Drawing.Size(185, 50);
this.buttonInference.TabIndex = 6;
this.buttonInference.Text = "inference";
this.buttonInference.UseVisualStyleBackColor = true;
this.buttonInference.Click += new System.EventHandler(this.buttonInference_Click);
//
// textBoxModelPath
//
this.textBoxModelPath.Location = new System.Drawing.Point(243, 76);
this.textBoxModelPath.Name = "textBoxModelPath";
this.textBoxModelPath.Size = new System.Drawing.Size(700, 38);
this.textBoxModelPath.TabIndex = 7;
//
// labelModelPath
//
this.labelModelPath.AutoSize = true;
this.labelModelPath.Location = new System.Drawing.Point(82, 76);
this.labelModelPath.Name = "labelModelPath";
this.labelModelPath.Size = new System.Drawing.Size(145, 31);
this.labelModelPath.TabIndex = 8;
this.labelModelPath.Text = "model path";
this.labelModelPath.TextAlign = System.Drawing.ContentAlignment.TopRight;
//
// labelDevice
//
this.labelDevice.AutoSize = true;
this.labelDevice.Location = new System.Drawing.Point(139, 134);
this.labelDevice.Name = "labelDevice";
this.labelDevice.Size = new System.Drawing.Size(88, 31);
this.labelDevice.TabIndex = 9;
this.labelDevice.Text = "device";
this.labelDevice.TextAlign = System.Drawing.ContentAlignment.TopRight;
//
// panelModelOptions
//
this.panelModelOptions.BackColor = System.Drawing.SystemColors.ButtonFace;
this.panelModelOptions.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.panelModelOptions.Controls.Add(this.textBoxStatus);
this.panelModelOptions.Controls.Add(this.labelStatus);
this.panelModelOptions.Controls.Add(this.buttonInitModel);
this.panelModelOptions.ForeColor = System.Drawing.SystemColors.ButtonShadow;
this.panelModelOptions.Location = new System.Drawing.Point(38, 57);
this.panelModelOptions.Name = "panelModelOptions";
this.panelModelOptions.Size = new System.Drawing.Size(1090, 139);
this.panelModelOptions.TabIndex = 10;
//
// textBoxStatus
//
this.textBoxStatus.BackColor = System.Drawing.SystemColors.Control;
this.textBoxStatus.BorderStyle = System.Windows.Forms.BorderStyle.None;
this.textBoxStatus.ForeColor = System.Drawing.SystemColors.WindowText;
this.textBoxStatus.Location = new System.Drawing.Point(657, 80);
this.textBoxStatus.Name = "textBoxStatus";
this.textBoxStatus.ReadOnly = true;
this.textBoxStatus.Size = new System.Drawing.Size(247, 31);
this.textBoxStatus.TabIndex = 3;
//
// labelStatus
//
this.labelStatus.AutoSize = true;
this.labelStatus.ForeColor = System.Drawing.SystemColors.ActiveCaptionText;
this.labelStatus.Location = new System.Drawing.Point(556, 76);
this.labelStatus.Name = "labelStatus";
this.labelStatus.Size = new System.Drawing.Size(95, 31);
this.labelStatus.TabIndex = 2;
this.labelStatus.Text = "status: ";
//
// labelModelOptions
//
this.labelModelOptions.AutoSize = true;
this.labelModelOptions.ForeColor = System.Drawing.SystemColors.ControlText;
this.labelModelOptions.Location = new System.Drawing.Point(21, 35);
this.labelModelOptions.Name = "labelModelOptions";
this.labelModelOptions.Size = new System.Drawing.Size(185, 31);
this.labelModelOptions.TabIndex = 11;
this.labelModelOptions.Text = "Model Options";
//
// panelPicture
//
this.panelPicture.BorderStyle = System.Windows.Forms.BorderStyle.FixedSingle;
this.panelPicture.Controls.Add(this.pictureBoxPicture);
this.panelPicture.Location = new System.Drawing.Point(354, 259);
this.panelPicture.Name = "panelPicture";
this.panelPicture.Size = new System.Drawing.Size(397, 464);
this.panelPicture.TabIndex = 12;
//
// labelPicture
//
this.labelPicture.AutoSize = true;
this.labelPicture.Location = new System.Drawing.Point(343, 239);
this.labelPicture.Name = "labelPicture";
this.labelPicture.Size = new System.Drawing.Size(94, 31);
this.labelPicture.TabIndex = 13;
this.labelPicture.Text = "Picture";
//
// textBoxResult
//
this.textBoxResult.BackColor = System.Drawing.SystemColors.Control;
this.textBoxResult.Location = new System.Drawing.Point(770, 267);
this.textBoxResult.Multiline = true;
this.textBoxResult.Name = "textBoxResult";
this.textBoxResult.ReadOnly = true;
this.textBoxResult.Size = new System.Drawing.Size(358, 456);
this.textBoxResult.TabIndex = 14;
//
// labelResult
//
this.labelResult.AutoSize = true;
this.labelResult.Location = new System.Drawing.Point(757, 233);
this.labelResult.Name = "labelResult";
this.labelResult.Size = new System.Drawing.Size(85, 31);
this.labelResult.TabIndex = 15;
this.labelResult.Text = "Result";
//
// labelUsage
//
this.labelUsage.AutoSize = true;
this.labelUsage.Location = new System.Drawing.Point(21, 233);
this.labelUsage.Name = "labelUsage";
this.labelUsage.Size = new System.Drawing.Size(85, 31);
this.labelUsage.TabIndex = 16;
this.labelUsage.Text = "Usage";
//
// textBoxUsage
//
this.textBoxUsage.Location = new System.Drawing.Point(37, 267);
this.textBoxUsage.Multiline = true;
this.textBoxUsage.Name = "textBoxUsage";
this.textBoxUsage.ReadOnly = true;
this.textBoxUsage.Size = new System.Drawing.Size(294, 454);
this.textBoxUsage.TabIndex = 17;
//
// FormDemo
//
this.AutoScaleDimensions = new System.Drawing.SizeF(14F, 31F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.ClientSize = new System.Drawing.Size(1157, 829);
this.Controls.Add(this.textBoxUsage);
this.Controls.Add(this.labelUsage);
this.Controls.Add(this.labelResult);
this.Controls.Add(this.textBoxResult);
this.Controls.Add(this.labelPicture);
this.Controls.Add(this.panelPicture);
this.Controls.Add(this.labelModelOptions);
this.Controls.Add(this.labelDevice);
this.Controls.Add(this.labelModelPath);
this.Controls.Add(this.textBoxModelPath);
this.Controls.Add(this.buttonInference);
this.Controls.Add(this.radioButtonDeviceCuda);
this.Controls.Add(this.radioButtonDeviceCpu);
this.Controls.Add(this.buttonLoadImage);
this.Controls.Add(this.buttonSelectModelPath);
this.Controls.Add(this.panelModelOptions);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.Fixed3D;
this.Name = "FormDemo";
this.Text = "Image_classification";
this.Load += new System.EventHandler(this.FormDemo_Load);
this.Resize += new System.EventHandler(this.FormDemo_Resize);
((System.ComponentModel.ISupportInitialize)(this.pictureBoxPicture)).EndInit();
this.panelModelOptions.ResumeLayout(false);
this.panelModelOptions.PerformLayout();
this.panelPicture.ResumeLayout(false);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Button buttonSelectModelPath;
private System.Windows.Forms.Button buttonInitModel;
private System.Windows.Forms.Button buttonLoadImage;
private System.Windows.Forms.Button buttonInference;
private System.Windows.Forms.PictureBox pictureBoxPicture;
private System.Windows.Forms.RadioButton radioButtonDeviceCpu;
private System.Windows.Forms.RadioButton radioButtonDeviceCuda;
private System.Windows.Forms.TextBox textBoxModelPath;
private System.Windows.Forms.Label labelModelPath;
private System.Windows.Forms.Label labelDevice;
private System.Windows.Forms.Panel panelModelOptions;
private System.Windows.Forms.Label labelModelOptions;
private System.Windows.Forms.Panel panelPicture;
private System.Windows.Forms.Label labelPicture;
private System.Windows.Forms.Label labelStatus;
private System.Windows.Forms.TextBox textBoxResult;
private System.Windows.Forms.Label labelResult;
private System.Windows.Forms.Label labelUsage;
private System.Windows.Forms.TextBox textBoxUsage;
private System.Windows.Forms.TextBox textBoxStatus;
}
}
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Windows.Forms;
using MMDeploy;
using OpenCvSharp.Extensions;
using ImreadModes = OpenCvSharp.ImreadModes;
using Cv2 = OpenCvSharp.Cv2;
using CvMat = OpenCvSharp.Mat;
#pragma warning disable IDE1006
#pragma warning disable IDE0044
namespace image_classification
{
public partial class FormDemo : Form
{
Classifier classifier;
string modelPath = "";
string device = "cuda";
int deviceId = 0;
string imgPath = "";
#region auto resize
private float x;
private float y;
private void setTag(Control cons)
{
foreach (Control con in cons.Controls)
{
con.Tag = con.Width + ";" + con.Height + ";" + con.Left + ";" + con.Top + ";" + con.Font.Size;
if (con.Controls.Count > 0)
{
setTag(con);
}
}
}
private void setControls(float newx, float newy, Control cons)
{
foreach (Control con in cons.Controls)
{
if (con.Tag != null)
{
string[] mytag = con.Tag.ToString().Split(new char[] { ';' });
con.Width = Convert.ToInt32(System.Convert.ToSingle(mytag[0]) * newx);
con.Height = Convert.ToInt32(System.Convert.ToSingle(mytag[1]) * newy);
con.Left = Convert.ToInt32(System.Convert.ToSingle(mytag[2]) * newx);
con.Top = Convert.ToInt32(System.Convert.ToSingle(mytag[3]) * newy);
Single currentSize = System.Convert.ToSingle(mytag[4]) * newy;
con.Font = new Font(con.Font.Name, currentSize, con.Font.Style, con.Font.Unit);
if (con.Controls.Count > 0)
{
setControls(newx, newy, con);
}
}
}
}
private void FormDemo_Resize(object sender, EventArgs e)
{
float newx = (this.Width) / x;
float newy = (this.Height) / y;
setControls(newx, newy, this);
}
#endregion
static void CvMatToMat(CvMat[] cvMats, out Mat[] mats)
{
mats = new Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
public FormDemo()
{
InitializeComponent();
x = this.Width;
y = this.Height;
setTag(this);
}
private void radioButtonDeviceCpu_CheckedChanged(object sender, EventArgs e)
{
device = "cpu";
}
private void radioButtonDeviceCuda_CheckedChanged(object sender, EventArgs e)
{
device = "cuda";
}
private void buttonSelectModelPath_Click(object sender, EventArgs e)
{
FolderBrowserDialog dilog = new FolderBrowserDialog();
if (dilog.ShowDialog() == DialogResult.OK)
{
textBoxModelPath.Text = dilog.SelectedPath;
}
}
private void buttonInitModel_Click(object sender, EventArgs e)
{
if (classifier != null)
{
classifier.Close();
}
classifier = null;
textBoxStatus.Text = "init model ...";
try
{
modelPath = textBoxModelPath.Text;
classifier = new Classifier(modelPath, device, deviceId);
textBoxStatus.ForeColor = Color.Green;
textBoxStatus.Text = "init model success.";
} catch
{
textBoxStatus.ForeColor = Color.Red;
textBoxStatus.Text = "init model failed.";
}
}
private void buttonLoadImage_Click(object sender, EventArgs e)
{
OpenFileDialog dilog = new OpenFileDialog
{
Filter = "(*.jpg;*.bmp;*.png;*.JPEG)|*.jpg;*.bmp;*.png;*.JPEG"
};
if (dilog.ShowDialog() == DialogResult.OK)
{
imgPath = dilog.FileName;
CvMat img = Cv2.ImRead(dilog.FileName);
Bitmap bitmap = BitmapConverter.ToBitmap(img);
pictureBoxPicture.Image = bitmap;
}
}
private void buttonInference_Click(object sender, EventArgs e)
{
textBoxResult.Clear();
if (classifier == null)
{
MessageBox.Show("init model first");
return;
}
CvMat[] imgs = new CvMat[1] { Cv2.ImRead(imgPath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
try
{
List<ClassifierOutput> output = classifier.Apply(mats);
int idx = 1;
foreach (var obj in output[0].Results)
{
if (obj.Score < 1e-7)
{
break;
}
string res = string.Format("Top-{0}-label: {1}, score: {2:f3}", idx, obj.Id, obj.Score);
if (idx == 1)
{
textBoxResult.Text = res;
}
else
{
textBoxResult.AppendText("\r\n" + res);
}
idx++;
}
} catch
{
MessageBox.Show("inference error");
}
}
private void FormDemo_Load(object sender, EventArgs e)
{
textBoxUsage.Text = "1) select model dir" +
"\r\n" + "2) choose device" +
"\r\n" + "3) init model" +
"\r\n" + "4) select image" +
"\r\n" + "5) do inference";
textBoxStatus.ForeColor = Color.Gray;
textBoxStatus.Text = "model not init";
}
}
}
<root>
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
</root>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Windows.Forms;
namespace image_classification
{
static class Program
{
/// <summary>
/// The main entry point for the application.
/// </summary>
[STAThread]
static void Main()
{
Application.SetHighDpiMode(HighDpiMode.SystemAware);
Application.EnableVisualStyles();
Application.SetCompatibleTextRenderingDefault(false);
Application.Run(new FormDemo());
}
}
}
<Project Sdk="Microsoft.NET.Sdk.WindowsDesktop">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
<UseWindowsForms>true</UseWindowsForms>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<ApplicationIcon />
<StartupObject />
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<Optimize>false</Optimize>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MMDeployCSharp" Version="1.3.1" />
<PackageReference Include="OpenCvSharp4" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.Extensions" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.runtime.win" Version="4.5.5.20211231" />
</ItemGroup>
</Project>
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="Current" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<PropertyGroup />
<ItemGroup>
<Compile Update="FormDemo.cs">
<SubType>Form</SubType>
</Compile>
</ItemGroup>
</Project>
using System;
using System.Collections.Generic;
using MMDeploy;
using ImreadModes = OpenCvSharp.ImreadModes;
using Cv2 = OpenCvSharp.Cv2;
using CvMat = OpenCvSharp.Mat;
using MatType = OpenCvSharp.MatType;
using ColorConversionCodes = OpenCvSharp.ColorConversionCodes;
using WindowFlags = OpenCvSharp.WindowFlags;
namespace image_restorer
{
class Program
{
/// <summary>
/// transform input
/// </summary>
static void CvMatToMat(CvMat[] cvMats, out Mat[] mats)
{
mats = new Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
static void CvWaitKey()
{
Cv2.WaitKey();
}
static void Main(string[] args)
{
if (args.Length != 3)
{
Console.WriteLine("usage:\n image_restorer deviceName modelPath imagePath\n");
Environment.Exit(1);
}
string deviceName = args[0];
string modelPath = args[1];
string imagePath = args[2];
// 1. create handle
Restorer handle = new Restorer(modelPath, deviceName, 0);
// 2. prepare input
CvMat[] imgs = new CvMat[1] { Cv2.ImRead(imagePath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
// 3. process
List<RestorerOutput> output = handle.Apply(mats);
// 4. show result
CvMat sr_img = new CvMat(output[0].Height, output[0].Width, MatType.CV_8UC3, output[0].Data);
Cv2.CvtColor(sr_img, sr_img, ColorConversionCodes.RGB2BGR);
Cv2.NamedWindow("sr", WindowFlags.GuiExpanded);
Cv2.ImShow("sr", sr_img);
CvWaitKey();
handle.Close();
}
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MMDeployCSharp" Version="1.3.1" />
<PackageReference Include="OpenCvSharp4" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.runtime.win" Version="4.5.5.20211231" />
</ItemGroup>
</Project>
using System;
using System.Collections.Generic;
using OpenCvSharp;
using MMDeploy;
namespace image_segmentation
{
class Program
{
/// <summary>
/// transform input
/// </summary>
static void CvMatToMat(OpenCvSharp.Mat[] cvMats, out MMDeploy.Mat[] mats)
{
mats = new MMDeploy.Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
static void CvWaitKey()
{
Cv2.WaitKey();
}
static Vec3b[] GenPalette(int classes)
{
Random rnd = new Random(0);
Vec3b[] palette = new Vec3b[classes];
for (int i = 0; i < classes; i++)
{
byte v1 = (byte)rnd.Next(0, 255);
byte v2 = (byte)rnd.Next(0, 255);
byte v3 = (byte)rnd.Next(0, 255);
palette[i] = new Vec3b(v1, v2, v3);
}
return palette;
}
static void Main(string[] args)
{
if (args.Length != 3)
{
Console.WriteLine("usage:\n image_segmentation deviceName modelPath imagePath\n");
Environment.Exit(1);
}
string deviceName = args[0];
string modelPath = args[1];
string imagePath = args[2];
// 1. create handle
Segmentor handle = new Segmentor(modelPath, deviceName, 0);
// 2. prepare input
OpenCvSharp.Mat[] imgs = new OpenCvSharp.Mat[1] { Cv2.ImRead(imagePath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
// 3. process
List<SegmentorOutput> output = handle.Apply(mats);
// 4. show result
OpenCvSharp.Mat colorMask = new OpenCvSharp.Mat(output[0].Height, output[0].Width, MatType.CV_8UC3, new Scalar());
Vec3b[] palette = GenPalette(output[0].Classes);
unsafe
{
byte* data = colorMask.DataPointer;
if (output[0].Mask.Length > 0)
{
fixed (int* _label = output[0].Mask)
{
int* label = _label;
for (int i = 0; i < output[0].Height; i++)
{
for (int j = 0; j < output[0].Width; j++)
{
data[0] = palette[*label][0];
data[1] = palette[*label][1];
data[2] = palette[*label][2];
data += 3;
label++;
}
}
}
}
else
{
int pos = 0;
fixed (float* _score = output[0].Score)
{
float *score = _score;
int total = output[0].Height * output[0].Width;
for (int i = 0; i < output[0].Height; i++)
{
for (int j = 0; j < output[0].Width; j++)
{
List<Tuple<float, int>> scores = new List<Tuple<float, int>>();
for (int k = 0; k < output[0].Classes; k++)
{
scores.Add(new Tuple<float, int>(score[k * total + i * output[0].Width + j], k));
}
scores.Sort();
data[0] = palette[scores[^1].Item2][0];
data[1] = palette[scores[^1].Item2][1];
data[2] = palette[scores[^1].Item2][2];
data += 3;
}
}
}
}
}
colorMask = imgs[0] * 0.5 + colorMask * 0.5;
Cv2.NamedWindow("mmseg", WindowFlags.GuiExpanded);
Cv2.ImShow("mmseg", colorMask);
CvWaitKey();
handle.Close();
}
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MMDeployCSharp" Version="1.3.1" />
<PackageReference Include="OpenCvSharp4" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.runtime.win" Version="4.5.5.20211231" />
</ItemGroup>
</Project>
using System;
using System.Collections.Generic;
using OpenCvSharp;
using MMDeploy;
namespace object_detection
{
class Program
{
static void CvMatToMat(OpenCvSharp.Mat[] cvMats, out MMDeploy.Mat[] mats)
{
mats = new MMDeploy.Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
static void CvWaitKey()
{
Cv2.WaitKey();
}
static void Main(string[] args)
{
if (args.Length != 3)
{
Console.WriteLine("usage:\n object_detection deviceName modelPath imagePath\n");
Environment.Exit(1);
}
string deviceName = args[0];
string modelPath = args[1];
string imagePath = args[2];
// 1. create handle
Detector handle = new Detector(modelPath, deviceName, 0);
// 2. prepare input
OpenCvSharp.Mat[] imgs = new OpenCvSharp.Mat[1] { Cv2.ImRead(imagePath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
// 3. process
List<DetectorOutput> output = handle.Apply(mats);
// 4. show result
foreach (var obj in output[0].Results)
{
if (obj.Score > 0.3)
{
if (obj.HasMask)
{
OpenCvSharp.Mat imgMask = new OpenCvSharp.Mat(obj.Mask.Height, obj.Mask.Width, MatType.CV_8UC1, obj.Mask.Data);
float x0 = Math.Max((float)Math.Floor(obj.BBox.Left) - 1, 0f);
float y0 = Math.Max((float)Math.Floor(obj.BBox.Top) - 1, 0f);
OpenCvSharp.Rect roi = new OpenCvSharp.Rect((int)x0, (int)y0, obj.Mask.Width, obj.Mask.Height);
Cv2.Split(imgs[0], out OpenCvSharp.Mat[] ch);
int col = 0;
Cv2.BitwiseOr(imgMask, ch[col][roi], ch[col][roi]);
Cv2.Merge(ch, imgs[0]);
}
Cv2.Rectangle(imgs[0], new Point((int)obj.BBox.Left, (int)obj.BBox.Top),
new Point((int)obj.BBox.Right, obj.BBox.Bottom), new Scalar(0, 255, 0));
}
}
Cv2.NamedWindow("mmdet", WindowFlags.GuiExpanded);
Cv2.ImShow("mmdet", imgs[0]);
CvWaitKey();
handle.Close();
}
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MMDeployCSharp" Version="1.3.1" />
<PackageReference Include="OpenCvSharp4" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.runtime.win" Version="4.5.5.20211231" />
</ItemGroup>
</Project>
using System;
using System.Collections.Generic;
using OpenCvSharp;
using MMDeploy;
namespace ocr_detection
{
class Program
{
static void CvMatToMat(OpenCvSharp.Mat[] cvMats, out MMDeploy.Mat[] mats)
{
mats = new MMDeploy.Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
static void CvWaitKey()
{
Cv2.WaitKey();
}
static void Main(string[] args)
{
if (args.Length != 3)
{
Console.WriteLine("usage:\n ocr_detection deviceName modelPath imagePath\n");
Environment.Exit(1);
}
string deviceName = args[0];
string modelPath = args[1];
string imagePath = args[2];
// 1. create handle
MMDeploy.TextDetector handle = new MMDeploy.TextDetector(modelPath, deviceName, 0);
// 2. prepare input
OpenCvSharp.Mat[] imgs = new OpenCvSharp.Mat[1] { Cv2.ImRead(imagePath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
// 3. process
List<TextDetectorOutput> output = handle.Apply(mats);
// 4. show result
foreach (var detect in output[0].Results)
{
for (int i = 0; i < 4; i++)
{
int sp = i;
int ep = (i + 1) % 4;
Cv2.Line(imgs[0], new Point((int)detect.BBox[sp].X, (int)detect.BBox[sp].Y),
new Point((int)detect.BBox[ep].X, (int)detect.BBox[ep].Y), new Scalar(0, 255, 0));
}
}
Cv2.NamedWindow("ocr-det", WindowFlags.GuiExpanded);
Cv2.ImShow("ocr-det", imgs[0]);
CvWaitKey();
handle.Close();
}
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MMDeployCSharp" Version="1.3.1" />
<PackageReference Include="OpenCvSharp4" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.runtime.win" Version="4.5.5.20211231" />
</ItemGroup>
</Project>
using System;
using System.Collections.Generic;
using OpenCvSharp;
using MMDeploy;
namespace ocr_recognition
{
class Program
{
static void CvMatToMat(OpenCvSharp.Mat[] cvMats, out MMDeploy.Mat[] mats)
{
mats = new MMDeploy.Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
static void CvWaitKey()
{
Cv2.WaitKey();
}
static void Main(string[] args)
{
if (args.Length != 3)
{
Console.WriteLine("usage:\n ocr_recognition deviceName modelPath imagePath\n");
Environment.Exit(1);
}
string deviceName = args[0];
string modelPath = args[1];
string imagePath = args[2];
// 1. create handle
TextRecognizer handle = new TextRecognizer(modelPath, deviceName, 0);
// 2. prepare input
OpenCvSharp.Mat[] imgs = new OpenCvSharp.Mat[1] { Cv2.ImRead(imagePath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
// 3. process
List<TextRecognizerOutput> output = handle.Apply(mats);
//// 4. show result
foreach (var box in output[0].Results)
{
string text = System.Text.Encoding.UTF8.GetString(box.Text);
Cv2.PutText(imgs[0], text, new Point(20, 20), HersheyFonts.HersheySimplex, 0.7, new Scalar(0, 255, 0), 1);
}
Cv2.NamedWindow("ocr-reg", WindowFlags.GuiExpanded);
Cv2.ImShow("ocr-reg", imgs[0]);
CvWaitKey();
handle.Close();
}
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|AnyCPU'">
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="MMDeployCSharp" Version="1.3.1" />
<PackageReference Include="OpenCvSharp4" Version="4.5.5.20211231" />
<PackageReference Include="OpenCvSharp4.runtime.win" Version="4.5.5.20211231" />
</ItemGroup>
</Project>
using System;
using System.Collections.Generic;
using OpenCvSharp;
using MMDeploy;
namespace pose_detection
{
class Program
{
static void CvMatToMat(OpenCvSharp.Mat[] cvMats, out MMDeploy.Mat[] mats)
{
mats = new MMDeploy.Mat[cvMats.Length];
unsafe
{
for (int i = 0; i < cvMats.Length; i++)
{
mats[i].Data = cvMats[i].DataPointer;
mats[i].Height = cvMats[i].Height;
mats[i].Width = cvMats[i].Width;
mats[i].Channel = cvMats[i].Dims;
mats[i].Format = PixelFormat.BGR;
mats[i].Type = DataType.Int8;
mats[i].Device = null;
}
}
}
static void CvWaitKey()
{
Cv2.WaitKey();
}
static Vec3b[] GenPalette(int n)
{
Random rnd = new Random(2);
Vec3b[] palette = new Vec3b[n];
for (int i = 0; i < n; i++)
{
byte v1 = (byte)rnd.Next(0, 255);
byte v2 = (byte)rnd.Next(0, 255);
byte v3 = (byte)rnd.Next(0, 255);
palette[i] = new Vec3b(v1, v2, v3);
}
return palette;
}
static void Main(string[] args)
{
if (args.Length != 3)
{
Console.WriteLine("usage:\n pose_detection deviceName modelPath imagePath\n");
Environment.Exit(1);
}
string deviceName = args[0];
string modelPath = args[1];
string imagePath = args[2];
// 1. create handle
PoseDetector handle = new PoseDetector(modelPath, deviceName, 0);
// 2. prepare input
OpenCvSharp.Mat[] imgs = new OpenCvSharp.Mat[1] { Cv2.ImRead(imagePath, ImreadModes.Color) };
CvMatToMat(imgs, out var mats);
// 3. process
List<PoseDetectorOutput> output = handle.Apply(mats);
// 4. show result
Vec3b[] palette = GenPalette(output[0].Count);
int index = 0;
foreach (var box in output[0].Results)
{
for (int i = 0; i < box.Points.Count; i++)
{
Cv2.Circle(imgs[0], (int)box.Points[i].X, (int)box.Points[i].Y, 1,
new Scalar(palette[index][0], palette[index][1], palette[index][2]), 2);
}
index++;
}
Cv2.NamedWindow("pose", WindowFlags.GuiExpanded);
Cv2.ImShow("pose", imgs[0]);
CvWaitKey();
handle.Close();
}
}
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment