Grammar Alert Class - Part 2

Now to use the grammar file.  In this article you will see the encapulated commands I have used to isolate the voice activation and recognition. 

<p>Grammar Alert Class - Part 2</p>

This class will alert any application about changes to the voice commands.  The class uses 2 events as its main triggers, one that says it has received speech, and one that parses the commands into usable switch case statements in the calling application.

 

using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Windows.ApplicationModel;
using Windows.Media.SpeechRecognition;
using Windows.Storage;

namespace PCIT.Universal.Media.Speech
{
    
    public class GrammarAlert
    {
        private const string SRGS_FILE = "Grammar\\AttentionCommand.xml";
        SpeechRecognizer recognizer;
        private async void Command_ResultGenerated(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionResultGeneratedEventArgs args)
        {
            if ((args.Result.Confidence == SpeechRecognitionConfidence.Medium)|| (args.Result.Confidence == SpeechRecognitionConfidence.High))
            {
                
                await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () =>
                {
                    if (CommandReceived != null)
                        await CommandReceived(args.Result.Text.ToString());
                });
                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
            
        }

        public async Task InitSpeech()
        {

            recognizer = new SpeechRecognizer();
            recognizer.ContinuousRecognitionSession.ResultGenerated += ContinuousRecognitionSession_ResultGenerated;
            string fileName = String.Format(SRGS_FILE);
            StorageFile grammarContentFile = await Package.Current.InstalledLocation.GetFileAsync(fileName);
            SpeechRecognitionGrammarFileConstraint grammarConstraint = new SpeechRecognitionGrammarFileConstraint(grammarContentFile);
            recognizer.Constraints.Add(grammarConstraint);
            recognizer.ContinuousRecognitionSession.AutoStopSilenceTimeout = TimeSpan.FromDays(30);
            SpeechRecognitionCompilationResult compilationResult = await recognizer.CompileConstraintsAsync();
            
            if (compilationResult.Status == SpeechRecognitionResultStatus.Success)
            {
                await recognizer.ContinuousRecognitionSession.StartAsync();
            }
        }

        private async Task CommandRecognizer_CommandReceived(string text)
        {
            if (CommandReceived != null)
                await CommandReceived(text);
        }

        private async void ContinuousRecognitionSession_ResultGenerated(SpeechContinuousRecognitionSession sender, SpeechContinuousRecognitionResultGeneratedEventArgs args)
        {
            if ((args.Result.Confidence == SpeechRecognitionConfidence.High)|| (args.Result.Confidence == SpeechRecognitionConfidence.Medium))
            {

                    await Windows.ApplicationModel.Core.CoreApplication.MainView.CoreWindow.Dispatcher.RunAsync(Windows.UI.Core.CoreDispatcherPriority.Normal, async () =>
                    {
                        if (SpeechAlert != null)
                            await SpeechAlert(args.Result.SemanticInterpretation.Properties["command"].First().ToString());
                    });
                
                

            }
        }

        private void Recognizer_StateChanged(SpeechRecognizer sender, SpeechRecognizerStateChangedEventArgs args)
        {
            SpeechAlert(args.State.ToString());
        }
        public event SpeechEvnetHandler SpeechAlert;
        public event SpeechEvnetHandler CommandReceived;

    }
    public delegate Task SpeechEvnetHandler(string text);
    
}
Last Updated: 7/24/2016 12:00:00 AM