MainPage.xaml.cs 3.33 KB
Newer Older
gaoqiong's avatar
gaoqiong committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
namespace Microsoft.ML.OnnxRuntime.InferenceSample.Maui;
//using Microsoft.Maui.Controls;

public partial class MainPage : ContentPage
{
	public MainPage()
	{
		InitializeComponent();

        // Best Practice: create the inference session (which loads and optimizes the model) once and not per inference
        // as it can be expensive and time consuming.
        inferenceSampleApi = new InferenceSampleApi();
    }

    protected override void OnAppearing()
    {
        base.OnAppearing();

        OutputLabel.Text = "Press 'Run Tests'.\n";
    }

    private readonly InferenceSampleApi inferenceSampleApi;

    private async Task ExecuteTests()
    {
        Action<Label, string> addOutput = (label, text) =>
        {
            Application.Current.Dispatcher.Dispatch(() => { label.Text += text; });
            //Device.BeginInvokeOnMainThread(() => { label.Text += text; });
            Console.Write(text);
        };

        OutputLabel.Text = "Testing execution\nComplete output is written to Console in this trivial example.\n\n";

        // run the testing in a background thread so updates to the UI aren't blocked
        await Task.Run(() =>
        {
            addOutput(OutputLabel, "Testing using default platform-specific session options... ");
            inferenceSampleApi.Execute();
            addOutput(OutputLabel, "done.\n");
            Thread.Sleep(1000); // artificial delay so the UI updates gradually

            // demonstrate a range of usages by recreating the inference session with different session options.
            addOutput(OutputLabel, "Testing using default platform-specific session options... ");
            inferenceSampleApi.CreateInferenceSession(SessionOptionsContainer.Create());
            inferenceSampleApi.Execute();
            addOutput(OutputLabel, "done.\n");
            Thread.Sleep(1000);

            addOutput(OutputLabel, "Testing using named platform-specific session options... ");
            inferenceSampleApi.CreateInferenceSession(SessionOptionsContainer.Create("ort_with_npu"));
            inferenceSampleApi.Execute();
            addOutput(OutputLabel, "done.\n");
            Thread.Sleep(1000);

            addOutput(OutputLabel, "Testing using default platform-specific session options via ApplyConfiguration extension... ");
            inferenceSampleApi.CreateInferenceSession(new SessionOptions().ApplyConfiguration());
            inferenceSampleApi.Execute();
            addOutput(OutputLabel, "done.\n");
            Thread.Sleep(1000);

            addOutput(OutputLabel, "Testing using named platform-specific session options via ApplyConfiguration extension... ");
            inferenceSampleApi.CreateInferenceSession(new SessionOptions().ApplyConfiguration("ort_with_npu"));
            inferenceSampleApi.Execute();
            addOutput(OutputLabel, "done.\n\n");
            Thread.Sleep(1000);
        });

        addOutput(OutputLabel, "Testing successfully completed! See the Console log for more info.");
    }

    private async void Start_Clicked(object sender, EventArgs e)
    {
        await ExecuteTests()
            .ContinueWith(
            (task) =>
            {
                if (task.IsFaulted)
                    MainThread.BeginInvokeOnMainThread(() => DisplayAlert("Error", task.Exception.Message, "OK"));
            })
            .ConfigureAwait(false);
    }
}