本文整理汇总了C#中GrammarBuilder.AppendRuleReference方法的典型用法代码示例。如果您正苦于以下问题:C# GrammarBuilder.AppendRuleReference方法的具体用法?C# GrammarBuilder.AppendRuleReference怎么用?C# GrammarBuilder.AppendRuleReference使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类GrammarBuilder
的用法示例。
在下文中一共展示了GrammarBuilder.AppendRuleReference方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: KinectStart
/// <summary>
/// Initialize Kinect with words.
/// </summary>
protected void KinectStart()
{
RecognizerInfo ri = GetKinectRecognizer();
if (null != ri)
{
//Populate the speech engine with keywords we are interested in.
this.speechEngine = new SpeechRecognitionEngine(ri.Id);
var gb = new GrammarBuilder { Culture = ri.Culture };
//Make the path point to current directory
string path = Directory.GetCurrentDirectory();
path += "\\";
path += pathToXML;
if (pathToXML != null)
{
gb.AppendRuleReference(path);
}
else if (dictionary != null)
gb.Append(dictionary);
else
throw new NullReferenceException();
var g = new Grammar(gb);
speechEngine.LoadGrammar(g);
// For long recognition sessions (a few hours or more), it may be beneficial to turn off adaptation of the acoustic model.
// This will prevent recognition accuracy from degrading over time.
speechEngine.UpdateRecognizerSetting("AdaptationOn", 0);
this.audioStream = sensor.AudioSource.Start();
speechEngine.SetInputToAudioStream(
this.audioStream, new SpeechAudioFormatInfo(EncodingFormat.Pcm, 16000, 16, 1, 32000, 2, null));
//speechEngine.RecognizeAsync(RecognizeMode.Multiple);
kinnectStatus = true;
}
else
{
//Speech Recognization not found
}
}
示例2: SpeechRecognizer
public SpeechRecognizer(TranscriptRecorderSession transcriptRecorder)
{
_transcriptRecorder = transcriptRecorder;
_speechTranscript = new List<Microsoft.Speech.Recognition.RecognitionResult>();
_isActive = false;
_isRecognizing = false;
// Create a speech recognition connector
_speechRecognitionConnector = new SpeechRecognitionConnector();
_currentSRLocale = ConfigurationManager.AppSettings[SpeechRecogLocaleKey];
if (String.IsNullOrEmpty(_currentSRLocale))
{
NonBlockingConsole.WriteLine("No locale specified, using default locale for speech recognition: " + DefaultLocale);
_currentSRLocale = DefaultLocale;
}
// Create speech recognition engine and start recognizing by attaching connector to engine
try
{
_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine();
/*
System.Globalization.CultureInfo localeCultureInfo = new System.Globalization.CultureInfo(_currentSRLocale);
foreach (RecognizerInfo r in Microsoft.Speech.Recognition.SpeechRecognitionEngine.InstalledRecognizers())
{
if (r.Culture.Equals(localeCultureInfo))
{
_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(r);
break;
}
}
if (_speechRecognitionEngine == null)
{
_speechRecognitionEngine = new SpeechRecognitionEngine();
}
*/
//_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale));
}
catch (Exception e)
{
NonBlockingConsole.WriteLine("Error: Unable to load SpeechRecognition locale: " + _currentSRLocale + ". Exception: " + e.ToString());
// Use default locale
NonBlockingConsole.WriteLine("Falling back to default locale for SpeechRecognitionEngine: " + DefaultLocale);
_currentSRLocale = DefaultLocale;
_speechRecognitionEngine = new SpeechRecognitionEngine();
//_speechRecognitionEngine = new Microsoft.Speech.Recognition.SpeechRecognitionEngine(new System.Globalization.CultureInfo(_currentSRLocale));
}
_speechRecognitionEngine.SpeechDetected += new EventHandler<Microsoft.Speech.Recognition.SpeechDetectedEventArgs>(SpeechRecognitionEngine_SpeechDetected);
_speechRecognitionEngine.RecognizeCompleted += new EventHandler<Microsoft.Speech.Recognition.RecognizeCompletedEventArgs>(SpeechRecognitionEngine_RecognizeCompleted);
_speechRecognitionEngine.LoadGrammarCompleted += new EventHandler<Microsoft.Speech.Recognition.LoadGrammarCompletedEventArgs>(SpeechRecognitionEngine_LoadGrammarCompleted);
_grammars = new List<Microsoft.Speech.Recognition.Grammar>();
// TODO: Add default installed speech recognizer grammar
// TODO: Might already be done via compiling with Recognition Settings File?
// Add default locale language grammar file (if it exists)
String localLanguageGrammarFilePath = Path.Combine(Environment.CurrentDirectory, @"en-US.cfgpp");
if (File.Exists(localLanguageGrammarFilePath))
{
NonBlockingConsole.WriteLine("SpeechRecognizer(). Adding locale language file at path: " + localLanguageGrammarFilePath);
GrammarBuilder builder = new GrammarBuilder();
builder.AppendRuleReference(localLanguageGrammarFilePath);
Grammar localeLanguageGrammar = new Grammar(builder);
localeLanguageGrammar.Name = "Local language grammar";
//localeLanguageGrammar.Priority = 1;
_grammars.Add(localeLanguageGrammar);
}
string[] recognizedString = { "hello", "bye", "yes", "no", "help", "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine", "ten", "exit" };
Choices numberChoices = new Choices(recognizedString);
Grammar basicGrammar = new Grammar(new GrammarBuilder(numberChoices));
basicGrammar.Name = "Basic Grammar";
//basicGrammar.Priority = 2;
_grammars.Add(basicGrammar);
LoadSpeechGrammarAsync();
}