Commit a144865d authored by gaoqiong's avatar gaoqiong
Browse files

update v1.14.0

parent cf1acfd2
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using Microsoft.ML.OnnxRuntime.InferenceSample;
namespace CSharpUsage
{
class Program
{
public static void Main(string[] args)
{
Console.WriteLine("Using API");
using (var inferenceSampleApi = new InferenceSampleApi())
{
inferenceSampleApi.Execute();
}
Console.WriteLine("Done");
}
}
}
\ No newline at end of file
using System;
using System.Collections.Generic;
using System.IO;
using Microsoft.ML.OnnxRuntime.Tensors;
namespace Microsoft.ML.OnnxRuntime.InferenceSample
{
public class InferenceSampleApi : IDisposable
{
public InferenceSampleApi()
{
model = LoadModelFromEmbeddedResource("TestData.squeezenet.onnx");
// this is the data for only one input tensor for this model
var inputTensor = LoadTensorFromEmbeddedResource("TestData.bench.in");
// create default session with default session options
// Creating an InferenceSession and loading the model is an expensive operation, so generally you would
// do this once. InferenceSession.Run can be called multiple times, and concurrently.
CreateInferenceSession();
// setup sample input data
inputData = new List<NamedOnnxValue>();
var inputMeta = inferenceSession.InputMetadata;
foreach (var name in inputMeta.Keys)
{
// note: DenseTensor takes a copy of the provided data
var tensor = new DenseTensor<float>(inputTensor, inputMeta[name].Dimensions);
inputData.Add(NamedOnnxValue.CreateFromTensor<float>(name, tensor));
}
}
public void CreateInferenceSession(SessionOptions options = null)
{
// Optional : Create session options and set any relevant values.
// If an additional execution provider is needed it should be added to the SessionOptions prior to
// creating the InferenceSession. The CPU Execution Provider is always added by default.
if (options == null)
{
options = new SessionOptions { LogId = "Sample" };
}
inferenceSession = new InferenceSession(model, options);
}
public void Execute()
{
// Run the inference
// 'results' is an IDisposableReadOnlyCollection<DisposableNamedOnnxValue> container
using (var results = inferenceSession.Run(inputData))
{
// dump the results
foreach (var r in results)
{
Console.WriteLine("Output for {0}", r.Name);
Console.WriteLine(r.AsTensor<float>().GetArrayString());
}
}
}
protected virtual void Dispose(bool disposing)
{
if (disposing && inferenceSession != null)
{
inferenceSession.Dispose();
inferenceSession = null;
}
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
static float[] LoadTensorFromEmbeddedResource(string path)
{
var tensorData = new List<float>();
var assembly = typeof(InferenceSampleApi).Assembly;
using (var inputFile =
new StreamReader(assembly.GetManifestResourceStream($"{assembly.GetName().Name}.{path}")))
{
inputFile.ReadLine(); // skip the input name
string[] dataStr = inputFile.ReadLine().Split(new char[] { ',', '[', ']' },
StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < dataStr.Length; i++)
{
tensorData.Add(Single.Parse(dataStr[i]));
}
}
return tensorData.ToArray();
}
static byte[] LoadModelFromEmbeddedResource(string path)
{
var assembly = typeof(InferenceSampleApi).Assembly;
byte[] model = null;
using (Stream stream = assembly.GetManifestResourceStream($"{assembly.GetName().Name}.{path}"))
{
using (MemoryStream memoryStream = new MemoryStream())
{
stream.CopyTo(memoryStream);
model = memoryStream.ToArray();
}
}
return model;
}
private readonly byte[] model;
private readonly List<NamedOnnxValue> inputData;
private InferenceSession inferenceSession;
}
}
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<TargetFramework>netstandard2.0</TargetFramework>
</PropertyGroup>
<PropertyGroup>
<AutoGenerateBindingRedirects>True</AutoGenerateBindingRedirects>
</PropertyGroup>
<ItemGroup>
<ProjectReference Include="..\..\..\src\Microsoft.ML.OnnxRuntime\Microsoft.ML.OnnxRuntime.csproj" />
</ItemGroup>
<ItemGroup>
<EmbeddedResource Include="..\..\..\testdata\bench.in">
<Link>TestData\bench.in</Link>
</EmbeddedResource>
<EmbeddedResource Include="..\..\..\testdata\squeezenet.onnx">
<Link>TestData\squeezenet.onnx</Link>
</EmbeddedResource>
</ItemGroup>
</Project>
To test the iOS or Android samples the native build of ONNX Runtime is required and must be in a specific location.
Only the native build for the platform you are testing on is required.
e.g. if you're testing using an Android device that is arm64, you only need the libonnxruntime.so for arm64-v8a.
The version of the native build should match the checked-out version of the ONNX Runtime repository you're currently using as closely as possible.
Otherwise mismatches with the native entry points is possible and could cause crashes.
To acquire the native build you can:
- build it yourself
- [Android](https://onnxruntime.ai/docs/build/android.html) build instructions
- [iOS](https://onnxruntime.ai/docs/build/ios.html) build instructions
- extract it from the Microsoft.ML.OnnxRuntime nuget package using [NuGetPackageExplorer](https://github.com/NuGetPackageExplorer/NuGetPackageExplorer)
- release version is [here](https://www.nuget.org/packages/Microsoft.ML.OnnxRuntime/)
- integration test version is [here](https://int.nugettest.org/packages/Microsoft.ML.OnnxRuntime/)
- this is frequently updated and should work if you're currently using the `master` branch of ONNX Runtime
- or if you have access to the internal packaging pipelines
- the Zip-Nuget-Java-Nodejs Packaging Pipeline produces the native package as an artifact under `drop-signed-nuget-CPU`
- run a build for your current branch in the pipeline to ensure the native build matches exactly
For iOS the native build should be at one or more of:
- <ORT repo root>\build\iOS\iphoneos\Release\Release-iphoneos\onnxruntime.framework for an iOS device
- <ORT repo root>\build\iOS\iphonesimulator\Release\Release-iphonesimulator\onnxruntime.framework for an iOS simulator
For Android the native build should be at one or more of:
- <ORT repo root>\build\Android\arm64-v8a\Release\libonnxruntime.so for an 64-bit arm device
- <ORT repo root>\build\Android\armeabi-v7a\Release\libonnxruntime.so for an 32-bit arm device
- <ORT repo root>\build\Android\x86\Release\libonnxruntime.so for an x86 Android emulator
- <ORT repo root>\build\Android\x86_64\Release\libonnxruntime.so for an x86_64 Android emulator
\ No newline at end of file
namespace Microsoft.ML.OnnxRuntime.FasterRcnnSample
{
public class LabelMap
{
public static readonly string[] Labels = new[] {"__background",
"person",
"bicycle",
"car",
"motorcycle",
"airplane",
"bus",
"train",
"truck",
"boat",
"traffic light",
"fire hydrant",
"stop sign",
"parking meter",
"bench",
"bird",
"cat",
"dog",
"horse",
"sheep",
"cow",
"elephant",
"bear",
"zebra",
"giraffe",
"backpack",
"umbrella",
"handbag",
"tie",
"suitcase",
"frisbee",
"skis",
"snowboard",
"sports ball",
"kite",
"baseball bat",
"baseball glove",
"skateboard",
"surfboard",
"tennis racket",
"bottle",
"wine glass",
"cup",
"fork",
"knife",
"spoon",
"bowl",
"banana",
"apple",
"sandwich",
"orange",
"broccoli",
"carrot",
"hot dog",
"pizza",
"donut",
"cake",
"chair",
"couch",
"potted plant",
"bed",
"dining table",
"toilet",
"tv",
"laptop",
"mouse",
"remote",
"keyboard",
"cell phone",
"microwave",
"oven",
"toaster",
"sink",
"refrigerator",
"book",
"clock",
"vase",
"scissors",
"teddy bear",
"hair drier",
"toothbrush"};
}
}
\ No newline at end of file
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
<LangVersion>8.0</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.ML.OnnxRuntime" Version="1.11.0" />
<PackageReference Include="Sixlabors.ImageSharp" Version="2.1.1" />
<PackageReference Include="SixLabors.ImageSharp.Drawing" Version="1.0.0-beta14" />
</ItemGroup>
</Project>
\ No newline at end of file

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 16
VisualStudioVersion = 16.0.810.9
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.OnnxRuntime.FasterRcnnSample", "Microsoft.ML.OnnxRuntime.FasterRcnnSample.csproj", "{FE61EF04-8640-4BBC-B4F4-C2F1B83DC00A}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{FE61EF04-8640-4BBC-B4F4-C2F1B83DC00A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{FE61EF04-8640-4BBC-B4F4-C2F1B83DC00A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{FE61EF04-8640-4BBC-B4F4-C2F1B83DC00A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{FE61EF04-8640-4BBC-B4F4-C2F1B83DC00A}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {17ED443E-EA35-4B00-82C8-9950BC5F653F}
EndGlobalSection
EndGlobal
namespace Microsoft.ML.OnnxRuntime.FasterRcnnSample
{
public class Prediction
{
public Box Box { get; set; }
public string Label { get; set; }
public float Confidence { get; set; }
}
public class Box
{
public float Xmin { get; set; }
public float Ymin { get; set; }
public float Xmax { get; set; }
public float Ymax { get; set; }
public Box(float xmin, float ymin, float xmax, float ymax)
{
Xmin = xmin;
Ymin = ymin;
Xmax = xmax;
Ymax = ymax;
}
}
}
\ No newline at end of file
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Microsoft.ML.OnnxRuntime.Tensors;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing;
using SixLabors.ImageSharp.Drawing.Processing;
using SixLabors.Fonts;
namespace Microsoft.ML.OnnxRuntime.FasterRcnnSample
{
class Program
{
public static void Main(string[] args)
{
// Read paths
string modelFilePath = args[0];
string imageFilePath = args[1];
string outImageFilePath = args[2];
// Read image
using Image<Rgb24> image = Image.Load<Rgb24>(imageFilePath);
// Resize image
float ratio = 800f / Math.Min(image.Width, image.Height);
image.Mutate(x => x.Resize((int)(ratio * image.Width), (int)(ratio * image.Height)));
// Preprocess image
var paddedHeight = (int)(Math.Ceiling(image.Height / 32f) * 32f);
var paddedWidth = (int)(Math.Ceiling(image.Width / 32f) * 32f);
Tensor<float> input = new DenseTensor<float>(new[] { 3, paddedHeight, paddedWidth });
var mean = new[] { 102.9801f, 115.9465f, 122.7717f };
image.ProcessPixelRows(accessor =>
{
for (int y = paddedHeight - accessor.Height; y < accessor.Height; y++)
{
Span<Rgb24> pixelSpan = accessor.GetRowSpan(y);
for (int x = paddedWidth - accessor.Width; x < accessor.Width; x++)
{
input[0, y, x] = pixelSpan[x].B - mean[0];
input[1, y, x] = pixelSpan[x].G - mean[1];
input[2, y, x] = pixelSpan[x].R - mean[2];
}
}
});
// Setup inputs and outputs
var inputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor("image", input)
};
// Run inference
using var session = new InferenceSession(modelFilePath);
using IDisposableReadOnlyCollection<DisposableNamedOnnxValue> results = session.Run(inputs);
// Postprocess to get predictions
var resultsArray = results.ToArray();
float[] boxes = resultsArray[0].AsEnumerable<float>().ToArray();
long[] labels = resultsArray[1].AsEnumerable<long>().ToArray();
float[] confidences = resultsArray[2].AsEnumerable<float>().ToArray();
var predictions = new List<Prediction>();
var minConfidence = 0.7f;
for (int i = 0; i < boxes.Length - 4; i += 4)
{
var index = i / 4;
if (confidences[index] >= minConfidence)
{
predictions.Add(new Prediction
{
Box = new Box(boxes[i], boxes[i + 1], boxes[i + 2], boxes[i + 3]),
Label = LabelMap.Labels[labels[index]],
Confidence = confidences[index]
});
}
}
// Put boxes, labels and confidence on image and save for viewing
using var outputImage = File.OpenWrite(outImageFilePath);
Font font = SystemFonts.CreateFont("Arial", 16);
foreach (var p in predictions)
{
image.Mutate(x =>
{
x.DrawLines(Color.Red, 2f, new PointF[] {
new PointF(p.Box.Xmin, p.Box.Ymin),
new PointF(p.Box.Xmax, p.Box.Ymin),
new PointF(p.Box.Xmax, p.Box.Ymin),
new PointF(p.Box.Xmax, p.Box.Ymax),
new PointF(p.Box.Xmax, p.Box.Ymax),
new PointF(p.Box.Xmin, p.Box.Ymax),
new PointF(p.Box.Xmin, p.Box.Ymax),
new PointF(p.Box.Xmin, p.Box.Ymin)
});
x.DrawText($"{p.Label}, {p.Confidence:0.00}", font, Color.White, new PointF(p.Box.Xmin, p.Box.Ymin));
});
}
image.SaveAsJpeg(outputImage);
}
}
}
\ No newline at end of file
namespace Microsoft.ML.OnnxRuntime.ResNet50v2Sample
{
public class LabelMap
{
public static readonly string[] Labels = new[] {"tench",
"goldfish",
"great white shark",
"tiger shark",
"hammerhead shark",
"electric ray",
"stingray",
"cock",
"hen",
"ostrich",
"brambling",
"goldfinch",
"house finch",
"junco",
"indigo bunting",
"American robin",
"bulbul",
"jay",
"magpie",
"chickadee",
"American dipper",
"kite",
"bald eagle",
"vulture",
"great grey owl",
"fire salamander",
"smooth newt",
"newt",
"spotted salamander",
"axolotl",
"American bullfrog",
"tree frog",
"tailed frog",
"loggerhead sea turtle",
"leatherback sea turtle",
"mud turtle",
"terrapin",
"box turtle",
"banded gecko",
"green iguana",
"Carolina anole",
"desert grassland whiptail lizard",
"agama",
"frilled-necked lizard",
"alligator lizard",
"Gila monster",
"European green lizard",
"chameleon",
"Komodo dragon",
"Nile crocodile",
"American alligator",
"triceratops",
"worm snake",
"ring-necked snake",
"eastern hog-nosed snake",
"smooth green snake",
"kingsnake",
"garter snake",
"water snake",
"vine snake",
"night snake",
"boa constrictor",
"African rock python",
"Indian cobra",
"green mamba",
"sea snake",
"Saharan horned viper",
"eastern diamondback rattlesnake",
"sidewinder",
"trilobite",
"harvestman",
"scorpion",
"yellow garden spider",
"barn spider",
"European garden spider",
"southern black widow",
"tarantula",
"wolf spider",
"tick",
"centipede",
"black grouse",
"ptarmigan",
"ruffed grouse",
"prairie grouse",
"peacock",
"quail",
"partridge",
"grey parrot",
"macaw",
"sulphur-crested cockatoo",
"lorikeet",
"coucal",
"bee eater",
"hornbill",
"hummingbird",
"jacamar",
"toucan",
"duck",
"red-breasted merganser",
"goose",
"black swan",
"tusker",
"echidna",
"platypus",
"wallaby",
"koala",
"wombat",
"jellyfish",
"sea anemone",
"brain coral",
"flatworm",
"nematode",
"conch",
"snail",
"slug",
"sea slug",
"chiton",
"chambered nautilus",
"Dungeness crab",
"rock crab",
"fiddler crab",
"red king crab",
"American lobster",
"spiny lobster",
"crayfish",
"hermit crab",
"isopod",
"white stork",
"black stork",
"spoonbill",
"flamingo",
"little blue heron",
"great egret",
"bittern",
"crane (bird)",
"limpkin",
"common gallinule",
"American coot",
"bustard",
"ruddy turnstone",
"dunlin",
"common redshank",
"dowitcher",
"oystercatcher",
"pelican",
"king penguin",
"albatross",
"grey whale",
"killer whale",
"dugong",
"sea lion",
"Chihuahua",
"Japanese Chin",
"Maltese",
"Pekingese",
"Shih Tzu",
"King Charles Spaniel",
"Papillon",
"toy terrier",
"Rhodesian Ridgeback",
"Afghan Hound",
"Basset Hound",
"Beagle",
"Bloodhound",
"Bluetick Coonhound",
"Black and Tan Coonhound",
"Treeing Walker Coonhound",
"English foxhound",
"Redbone Coonhound",
"borzoi",
"Irish Wolfhound",
"Italian Greyhound",
"Whippet",
"Ibizan Hound",
"Norwegian Elkhound",
"Otterhound",
"Saluki",
"Scottish Deerhound",
"Weimaraner",
"Staffordshire Bull Terrier",
"American Staffordshire Terrier",
"Bedlington Terrier",
"Border Terrier",
"Kerry Blue Terrier",
"Irish Terrier",
"Norfolk Terrier",
"Norwich Terrier",
"Yorkshire Terrier",
"Wire Fox Terrier",
"Lakeland Terrier",
"Sealyham Terrier",
"Airedale Terrier",
"Cairn Terrier",
"Australian Terrier",
"Dandie Dinmont Terrier",
"Boston Terrier",
"Miniature Schnauzer",
"Giant Schnauzer",
"Standard Schnauzer",
"Scottish Terrier",
"Tibetan Terrier",
"Australian Silky Terrier",
"Soft-coated Wheaten Terrier",
"West Highland White Terrier",
"Lhasa Apso",
"Flat-Coated Retriever",
"Curly-coated Retriever",
"Golden Retriever",
"Labrador Retriever",
"Chesapeake Bay Retriever",
"German Shorthaired Pointer",
"Vizsla",
"English Setter",
"Irish Setter",
"Gordon Setter",
"Brittany",
"Clumber Spaniel",
"English Springer Spaniel",
"Welsh Springer Spaniel",
"Cocker Spaniels",
"Sussex Spaniel",
"Irish Water Spaniel",
"Kuvasz",
"Schipperke",
"Groenendael",
"Malinois",
"Briard",
"Australian Kelpie",
"Komondor",
"Old English Sheepdog",
"Shetland Sheepdog",
"collie",
"Border Collie",
"Bouvier des Flandres",
"Rottweiler",
"German Shepherd Dog",
"Dobermann",
"Miniature Pinscher",
"Greater Swiss Mountain Dog",
"Bernese Mountain Dog",
"Appenzeller Sennenhund",
"Entlebucher Sennenhund",
"Boxer",
"Bullmastiff",
"Tibetan Mastiff",
"French Bulldog",
"Great Dane",
"St. Bernard",
"husky",
"Alaskan Malamute",
"Siberian Husky",
"Dalmatian",
"Affenpinscher",
"Basenji",
"pug",
"Leonberger",
"Newfoundland",
"Pyrenean Mountain Dog",
"Samoyed",
"Pomeranian",
"Chow Chow",
"Keeshond",
"Griffon Bruxellois",
"Pembroke Welsh Corgi",
"Cardigan Welsh Corgi",
"Toy Poodle",
"Miniature Poodle",
"Standard Poodle",
"Mexican hairless dog",
"grey wolf",
"Alaskan tundra wolf",
"red wolf",
"coyote",
"dingo",
"dhole",
"African wild dog",
"hyena",
"red fox",
"kit fox",
"Arctic fox",
"grey fox",
"tabby cat",
"tiger cat",
"Persian cat",
"Siamese cat",
"Egyptian Mau",
"cougar",
"lynx",
"leopard",
"snow leopard",
"jaguar",
"lion",
"tiger",
"cheetah",
"brown bear",
"American black bear",
"polar bear",
"sloth bear",
"mongoose",
"meerkat",
"tiger beetle",
"ladybug",
"ground beetle",
"longhorn beetle",
"leaf beetle",
"dung beetle",
"rhinoceros beetle",
"weevil",
"fly",
"bee",
"ant",
"grasshopper",
"cricket",
"stick insect",
"cockroach",
"mantis",
"cicada",
"leafhopper",
"lacewing",
"dragonfly",
"damselfly",
"red admiral",
"ringlet",
"monarch butterfly",
"small white",
"sulphur butterfly",
"gossamer-winged butterfly",
"starfish",
"sea urchin",
"sea cucumber",
"cottontail rabbit",
"hare",
"Angora rabbit",
"hamster",
"porcupine",
"fox squirrel",
"marmot",
"beaver",
"guinea pig",
"common sorrel",
"zebra",
"pig",
"wild boar",
"warthog",
"hippopotamus",
"ox",
"water buffalo",
"bison",
"ram",
"bighorn sheep",
"Alpine ibex",
"hartebeest",
"impala",
"gazelle",
"dromedary",
"llama",
"weasel",
"mink",
"European polecat",
"black-footed ferret",
"otter",
"skunk",
"badger",
"armadillo",
"three-toed sloth",
"orangutan",
"gorilla",
"chimpanzee",
"gibbon",
"siamang",
"guenon",
"patas monkey",
"baboon",
"macaque",
"langur",
"black-and-white colobus",
"proboscis monkey",
"marmoset",
"white-headed capuchin",
"howler monkey",
"titi",
"Geoffroy's spider monkey",
"common squirrel monkey",
"ring-tailed lemur",
"indri",
"Asian elephant",
"African bush elephant",
"red panda",
"giant panda",
"snoek",
"eel",
"coho salmon",
"rock beauty",
"clownfish",
"sturgeon",
"garfish",
"lionfish",
"pufferfish",
"abacus",
"abaya",
"academic gown",
"accordion",
"acoustic guitar",
"aircraft carrier",
"airliner",
"airship",
"altar",
"ambulance",
"amphibious vehicle",
"analog clock",
"apiary",
"apron",
"waste container",
"assault rifle",
"backpack",
"bakery",
"balance beam",
"balloon",
"ballpoint pen",
"Band-Aid",
"banjo",
"baluster",
"barbell",
"barber chair",
"barbershop",
"barn",
"barometer",
"barrel",
"wheelbarrow",
"baseball",
"basketball",
"bassinet",
"bassoon",
"swimming cap",
"bath towel",
"bathtub",
"station wagon",
"lighthouse",
"beaker",
"military cap",
"beer bottle",
"beer glass",
"bell-cot",
"bib",
"tandem bicycle",
"bikini",
"ring binder",
"binoculars",
"birdhouse",
"boathouse",
"bobsleigh",
"bolo tie",
"poke bonnet",
"bookcase",
"bookstore",
"bottle cap",
"bow",
"bow tie",
"brass",
"bra",
"breakwater",
"breastplate",
"broom",
"bucket",
"buckle",
"bulletproof vest",
"high-speed train",
"butcher shop",
"taxicab",
"cauldron",
"candle",
"cannon",
"canoe",
"can opener",
"cardigan",
"car mirror",
"carousel",
"tool kit",
"carton",
"car wheel",
"automated teller machine",
"cassette",
"cassette player",
"castle",
"catamaran",
"CD player",
"cello",
"mobile phone",
"chain",
"chain-link fence",
"chain mail",
"chainsaw",
"chest",
"chiffonier",
"chime",
"china cabinet",
"Christmas stocking",
"church",
"movie theater",
"cleaver",
"cliff dwelling",
"cloak",
"clogs",
"cocktail shaker",
"coffee mug",
"coffeemaker",
"coil",
"combination lock",
"computer keyboard",
"confectionery store",
"container ship",
"convertible",
"corkscrew",
"cornet",
"cowboy boot",
"cowboy hat",
"cradle",
"crane (machine)",
"crash helmet",
"crate",
"infant bed",
"Crock Pot",
"croquet ball",
"crutch",
"cuirass",
"dam",
"desk",
"desktop computer",
"rotary dial telephone",
"diaper",
"digital clock",
"digital watch",
"dining table",
"dishcloth",
"dishwasher",
"disc brake",
"dock",
"dog sled",
"dome",
"doormat",
"drilling rig",
"drum",
"drumstick",
"dumbbell",
"Dutch oven",
"electric fan",
"electric guitar",
"electric locomotive",
"entertainment center",
"envelope",
"espresso machine",
"face powder",
"feather boa",
"filing cabinet",
"fireboat",
"fire engine",
"fire screen sheet",
"flagpole",
"flute",
"folding chair",
"football helmet",
"forklift",
"fountain",
"fountain pen",
"four-poster bed",
"freight car",
"French horn",
"frying pan",
"fur coat",
"garbage truck",
"gas mask",
"gas pump",
"goblet",
"go-kart",
"golf ball",
"golf cart",
"gondola",
"gong",
"gown",
"grand piano",
"greenhouse",
"grille",
"grocery store",
"guillotine",
"barrette",
"hair spray",
"half-track",
"hammer",
"hamper",
"hair dryer",
"hand-held computer",
"handkerchief",
"hard disk drive",
"harmonica",
"harp",
"harvester",
"hatchet",
"holster",
"home theater",
"honeycomb",
"hook",
"hoop skirt",
"horizontal bar",
"horse-drawn vehicle",
"hourglass",
"iPod",
"clothes iron",
"jack-o'-lantern",
"jeans",
"jeep",
"T-shirt",
"jigsaw puzzle",
"pulled rickshaw",
"joystick",
"kimono",
"knee pad",
"knot",
"lab coat",
"ladle",
"lampshade",
"laptop computer",
"lawn mower",
"lens cap",
"paper knife",
"library",
"lifeboat",
"lighter",
"limousine",
"ocean liner",
"lipstick",
"slip-on shoe",
"lotion",
"speaker",
"loupe",
"sawmill",
"magnetic compass",
"mail bag",
"mailbox",
"tights",
"tank suit",
"manhole cover",
"maraca",
"marimba",
"mask",
"match",
"maypole",
"maze",
"measuring cup",
"medicine chest",
"megalith",
"microphone",
"microwave oven",
"military uniform",
"milk can",
"minibus",
"miniskirt",
"minivan",
"missile",
"mitten",
"mixing bowl",
"mobile home",
"Model T",
"modem",
"monastery",
"monitor",
"moped",
"mortar",
"square academic cap",
"mosque",
"mosquito net",
"scooter",
"mountain bike",
"tent",
"computer mouse",
"mousetrap",
"moving van",
"muzzle",
"nail",
"neck brace",
"necklace",
"nipple",
"notebook computer",
"obelisk",
"oboe",
"ocarina",
"odometer",
"oil filter",
"organ",
"oscilloscope",
"overskirt",
"bullock cart",
"oxygen mask",
"packet",
"paddle",
"paddle wheel",
"padlock",
"paintbrush",
"pajamas",
"palace",
"pan flute",
"paper towel",
"parachute",
"parallel bars",
"park bench",
"parking meter",
"passenger car",
"patio",
"payphone",
"pedestal",
"pencil case",
"pencil sharpener",
"perfume",
"Petri dish",
"photocopier",
"plectrum",
"Pickelhaube",
"picket fence",
"pickup truck",
"pier",
"piggy bank",
"pill bottle",
"pillow",
"ping-pong ball",
"pinwheel",
"pirate ship",
"pitcher",
"hand plane",
"planetarium",
"plastic bag",
"plate rack",
"plow",
"plunger",
"Polaroid camera",
"pole",
"police van",
"poncho",
"billiard table",
"soda bottle",
"pot",
"potter's wheel",
"power drill",
"prayer rug",
"printer",
"prison",
"projectile",
"projector",
"hockey puck",
"punching bag",
"purse",
"quill",
"quilt",
"race car",
"racket",
"radiator",
"radio",
"radio telescope",
"rain barrel",
"recreational vehicle",
"reel",
"reflex camera",
"refrigerator",
"remote control",
"restaurant",
"revolver",
"rifle",
"rocking chair",
"rotisserie",
"eraser",
"rugby ball",
"ruler",
"running shoe",
"safe",
"safety pin",
"salt shaker",
"sandal",
"sarong",
"saxophone",
"scabbard",
"weighing scale",
"school bus",
"schooner",
"scoreboard",
"CRT screen",
"screw",
"screwdriver",
"seat belt",
"sewing machine",
"shield",
"shoe store",
"shoji",
"shopping basket",
"shopping cart",
"shovel",
"shower cap",
"shower curtain",
"ski",
"ski mask",
"sleeping bag",
"slide rule",
"sliding door",
"slot machine",
"snorkel",
"snowmobile",
"snowplow",
"soap dispenser",
"soccer ball",
"sock",
"solar thermal collector",
"sombrero",
"soup bowl",
"space bar",
"space heater",
"space shuttle",
"spatula",
"motorboat",
"spider web",
"spindle",
"sports car",
"spotlight",
"stage",
"steam locomotive",
"through arch bridge",
"steel drum",
"stethoscope",
"scarf",
"stone wall",
"stopwatch",
"stove",
"strainer",
"tram",
"stretcher",
"couch",
"stupa",
"submarine",
"suit",
"sundial",
"sunglass",
"sunglasses",
"sunscreen",
"suspension bridge",
"mop",
"sweatshirt",
"swimsuit",
"swing",
"switch",
"syringe",
"table lamp",
"tank",
"tape player",
"teapot",
"teddy bear",
"television",
"tennis ball",
"thatched roof",
"front curtain",
"thimble",
"threshing machine",
"throne",
"tile roof",
"toaster",
"tobacco shop",
"toilet seat",
"torch",
"totem pole",
"tow truck",
"toy store",
"tractor",
"semi-trailer truck",
"tray",
"trench coat",
"tricycle",
"trimaran",
"tripod",
"triumphal arch",
"trolleybus",
"trombone",
"tub",
"turnstile",
"typewriter keyboard",
"umbrella",
"unicycle",
"upright piano",
"vacuum cleaner",
"vase",
"vault",
"velvet",
"vending machine",
"vestment",
"viaduct",
"violin",
"volleyball",
"waffle iron",
"wall clock",
"wallet",
"wardrobe",
"military aircraft",
"sink",
"washing machine",
"water bottle",
"water jug",
"water tower",
"whiskey jug",
"whistle",
"wig",
"window screen",
"window shade",
"Windsor tie",
"wine bottle",
"wing",
"wok",
"wooden spoon",
"wool",
"split-rail fence",
"shipwreck",
"yawl",
"yurt",
"website",
"comic book",
"crossword",
"traffic sign",
"traffic light",
"dust jacket",
"menu",
"plate",
"guacamole",
"consomme",
"hot pot",
"trifle",
"ice cream",
"ice pop",
"baguette",
"bagel",
"pretzel",
"cheeseburger",
"hot dog",
"mashed potato",
"cabbage",
"broccoli",
"cauliflower",
"zucchini",
"spaghetti squash",
"acorn squash",
"butternut squash",
"cucumber",
"artichoke",
"bell pepper",
"cardoon",
"mushroom",
"Granny Smith",
"strawberry",
"orange",
"lemon",
"fig",
"pineapple",
"banana",
"jackfruit",
"custard apple",
"pomegranate",
"hay",
"carbonara",
"chocolate syrup",
"dough",
"meatloaf",
"pizza",
"pot pie",
"burrito",
"red wine",
"espresso",
"cup",
"eggnog",
"alp",
"bubble",
"cliff",
"coral reef",
"geyser",
"lakeshore",
"promontory",
"shoal",
"seashore",
"valley",
"volcano",
"baseball player",
"bridegroom",
"scuba diver",
"rapeseed",
"daisy",
"yellow lady's slipper",
"corn",
"acorn",
"rose hip",
"horse chestnut seed",
"coral fungus",
"agaric",
"gyromitra",
"stinkhorn mushroom",
"earth star",
"hen-of-the-woods",
"bolete",
"ear",
"toilet paper"};
}
}
\ No newline at end of file
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<OutputType>Exe</OutputType>
<TargetFramework>netcoreapp3.1</TargetFramework>
<LangVersion>8.0</LangVersion>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.ML.OnnxRuntime" Version="1.11.0" />
<PackageReference Include="Sixlabors.ImageSharp" Version="2.1.1" />
</ItemGroup>
</Project>

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio Version 16
VisualStudioVersion = 16.0.810.9
MinimumVisualStudioVersion = 10.0.40219.1
Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "Microsoft.ML.OnnxRuntime.ResNet50v2Sample", "Microsoft.ML.OnnxRuntime.ResNet50v2Sample.csproj", "{6D2E6920-691F-4CDB-970F-324D8743BF8A}"
EndProject
Global
GlobalSection(SolutionConfigurationPlatforms) = preSolution
Debug|Any CPU = Debug|Any CPU
Release|Any CPU = Release|Any CPU
EndGlobalSection
GlobalSection(ProjectConfigurationPlatforms) = postSolution
{6D2E6920-691F-4CDB-970F-324D8743BF8A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU
{6D2E6920-691F-4CDB-970F-324D8743BF8A}.Debug|Any CPU.Build.0 = Debug|Any CPU
{6D2E6920-691F-4CDB-970F-324D8743BF8A}.Release|Any CPU.ActiveCfg = Release|Any CPU
{6D2E6920-691F-4CDB-970F-324D8743BF8A}.Release|Any CPU.Build.0 = Release|Any CPU
EndGlobalSection
GlobalSection(SolutionProperties) = preSolution
HideSolutionNode = FALSE
EndGlobalSection
GlobalSection(ExtensibilityGlobals) = postSolution
SolutionGuid = {B0A0513C-D336-4B4A-B22B-13429F23CA44}
EndGlobalSection
EndGlobal
namespace Microsoft.ML.OnnxRuntime.ResNet50v2Sample
{
internal class Prediction
{
public string Label { get; set; }
public float Confidence { get; set; }
}
}
\ No newline at end of file
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML.OnnxRuntime.Tensors;
using SixLabors.ImageSharp;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing;
namespace Microsoft.ML.OnnxRuntime.ResNet50v2Sample
{
class Program
{
public static void Main(string[] args)
{
// Read paths
string modelFilePath = args[0];
string imageFilePath = args[1];
// Read image
using Image<Rgb24> image = Image.Load<Rgb24>(imageFilePath);
// Resize image
image.Mutate(x =>
{
x.Resize(new ResizeOptions
{
Size = new Size(224, 224),
Mode = ResizeMode.Crop
});
});
// Preprocess image
Tensor<float> input = new DenseTensor<float>(new[] { 1, 3, 224, 224 });
var mean = new[] { 0.485f, 0.456f, 0.406f };
var stddev = new[] { 0.229f, 0.224f, 0.225f };
image.ProcessPixelRows(accessor =>
{
for (int y = 0; y < accessor.Height; y++)
{
Span<Rgb24> pixelSpan = accessor.GetRowSpan(y);
for (int x = 0; x < accessor.Width; x++)
{
input[0, 0, y, x] = ((pixelSpan[x].R / 255f) - mean[0]) / stddev[0];
input[0, 1, y, x] = ((pixelSpan[x].G / 255f) - mean[1]) / stddev[1];
input[0, 2, y, x] = ((pixelSpan[x].B / 255f) - mean[2]) / stddev[2];
}
}
});
// Setup inputs
var inputs = new List<NamedOnnxValue>
{
NamedOnnxValue.CreateFromTensor("data", input)
};
// Run inference
using var session = new InferenceSession(modelFilePath);
using IDisposableReadOnlyCollection<DisposableNamedOnnxValue> results = session.Run(inputs);
// Postprocess to get softmax vector
IEnumerable<float> output = results.First().AsEnumerable<float>();
float sum = output.Sum(x => (float)Math.Exp(x));
IEnumerable<float> softmax = output.Select(x => (float)Math.Exp(x) / sum);
// Extract top 10 predicted classes
IEnumerable<Prediction> top10 = softmax.Select((x, i) => new Prediction { Label = LabelMap.Labels[i], Confidence = x })
.OrderByDescending(x => x.Confidence)
.Take(10);
// Print results to console
Console.WriteLine("Top 10 predictions for ResNet50 v2...");
Console.WriteLine("--------------------------------------------------------------");
foreach (var t in top10)
{
Console.WriteLine($"Label: {t.Label}, Confidence: {t.Confidence}");
}
}
}
}
Generated Files
\ No newline at end of file
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<ProjectName>Microsoft.AI.MachineLearning.Interop</ProjectName>
<TargetFramework>net5.0-windows10.0.17763.0</TargetFramework>
<AllowUnsafeBlocks>true</AllowUnsafeBlocks>
<Platform>Any CPU</Platform>
<Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration>
<OnnxRuntimeBuildDirectory Condition="'$(OnnxRuntimeBuildDirectory)'==''">..\..\..\build\Windows</OnnxRuntimeBuildDirectory>
<BuildOutputDir>$(OnnxRuntimeBuildDirectory)\$(Configuration)\$(Configuration)</BuildOutputDir>
<WindowsAIInteropOutputDir>$(BuildOutputDir)\Microsoft.AI.MachineLearning.Interop</WindowsAIInteropOutputDir>
<OutputPath>$(WindowsAIInteropOutputDir)</OutputPath>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Windows.CsWinRT" Version="1.1.0" targetFramework="net5.0-windows10.0.17763.0" />
<PackageReference Include="Microsoft.Windows.SDK.Contracts" Version="10.0.17763.1000" targetFramework="net5.0-windows10.0.17763.0" />
</ItemGroup>
<ItemGroup>
<Folder Include="Generated Files\" />
</ItemGroup>
<Target Name="GenerateProjection" BeforeTargets="DispatchToInnerBuilds;Build;CoreCompile">
<ConvertToAbsolutePath Paths="$(OnnxRuntimeBuildDirectory)">
<Output TaskParameter="AbsolutePaths" PropertyName="OnnxRuntimeBuildDirectoryAbs" />
</ConvertToAbsolutePath>
<ItemGroup>
<WindowsAIsWinMDs Include="$(OnnxRuntimeBuildDirectoryAbs)\$(Configuration)\Microsoft.AI.MachineLearning.winmd" />
<WindowsAIsWinMDs Include="$(OnnxRuntimeBuildDirectoryAbs)\$(Configuration)\Microsoft.AI.MachineLearning.Experimental.winmd" />
</ItemGroup>
<PropertyGroup>
<CsWinRTCommand>$(CsWinRTPath)cswinrt.exe -verbose -in local -in @(WindowsAIsWinMDs->'"%(FullPath)"', ' ') -out "$(ProjectDir)Generated Files" -include Microsoft.AI.MachineLearning</CsWinRTCommand>
</PropertyGroup>
<Message Text="Generating $(ProjectName) CS projection sources with command:" />
<Message Text="$(CsWinRTCommand)" />
<Exec Command="$(CsWinRTCommand)" />
<ItemGroup>
<Compile Include="$(ProjectDir)Generated Files/*.cs" Exclude="@(Compile)" />
</ItemGroup>
</Target>
</Project>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<ItemDefinitionGroup>
<ClCompile>
<AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)../../build/native/include/;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ClCompile>
<ResourceCompile>
<AdditionalIncludeDirectories>$(MSBuildThisFileDirectory)../../build/native/include/;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
</ResourceCompile>
</ItemDefinitionGroup>
<ItemGroup>
<PropertyPageSchema Include="$(MSBuildThisFileDirectory)\Microsoft.AI.MachineLearning.Rules.Project.xml"/>
</ItemGroup>
<PropertyGroup>
<WindowsAI-Platform Condition="'$(PlatformTarget)' == 'x64' OR ('$(PlatformTarget)' == 'AnyCPU' AND '$(Prefer32Bit)' != 'true')">x64</WindowsAI-Platform>
<WindowsAI-Platform Condition="'$(PlatformTarget)' == 'x86' OR ('$(PlatformTarget)' == 'AnyCPU' AND '$(Prefer32Bit)' == 'true')">x86</WindowsAI-Platform>
<WindowsAI-Platform Condition="'$(PlatformTarget)' == '' AND '$(Prefer32Bit)' != 'true'">x64</WindowsAI-Platform>
<WindowsAI-Platform Condition="'$(PlatformTarget)' == '' AND '$(Prefer32Bit)' == 'true'">x86</WindowsAI-Platform>
<WindowsAI-Platform Condition="'$(WindowsAI-Platform)' == ''">$(PlatformTarget)</WindowsAI-Platform>
</PropertyGroup>
<PropertyGroup>
<RuntimesDirectory>$(MSBuildThisFileDirectory)..\..\runtimes\win-$(WindowsAI-Platform)\_native</RuntimesDirectory>
<WindowsAIBinarySubfolder Condition="'$(UseWindowsMLStaticRuntime)' == 'true'">static\</WindowsAIBinarySubfolder>
<WindowsAIBinary>$(RuntimesDirectory)\$(WindowsAIBinarySubfolder)microsoft.ai.machinelearning.dll</WindowsAIBinary>
<WindowsAIBinary>$([System.IO.Path]::GetFullPath($(WindowsAIBinary)))</WindowsAIBinary>
<OnnxRuntimeBinary>$(RuntimesDirectory)\$(WindowsAIBinarySubfolder)onnxruntime.dll</OnnxRuntimeBinary>
<OnnxRuntimeBinary>$([System.IO.Path]::GetFullPath($(OnnxRuntimeBinary)))</OnnxRuntimeBinary>
<WindowsAIWinMD>$(MSBuildThisFileDirectory)..\..\winmds\Microsoft.AI.MachineLearning.winmd</WindowsAIWinMD>
<WindowsAIExperimentalWinMD>$(MSBuildThisFileDirectory)..\..\winmds\Microsoft.AI.MachineLearning.Experimental.winmd</WindowsAIExperimentalWinMD>
<WindowsAIWinMD>$([System.IO.Path]::GetFullPath($(WindowsAIWinMD)))</WindowsAIWinMD>
<WindowsAIExperimentalWinMD>$([System.IO.Path]::GetFullPath($(WindowsAIExperimentalWinMD)))</WindowsAIExperimentalWinMD>
</PropertyGroup>
<PropertyGroup Label="Globals">
<!-- Assume apps using the WinML package only want the DirectML binaries (no need for a build dependency). -->
<Microsoft_AI_DirectML_SkipDebugLayerCopy>true</Microsoft_AI_DirectML_SkipDebugLayerCopy>
<Microsoft_AI_DirectML_SkipLink>true</Microsoft_AI_DirectML_SkipLink>
<Microsoft_AI_DirectML_SkipIncludeDir>true</Microsoft_AI_DirectML_SkipIncludeDir>
</PropertyGroup>
</Project>
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment