本文整理汇总了Python中httpClient.AsyncOpenHttp.make_xunfei_request方法的典型用法代码示例。如果您正苦于以下问题:Python AsyncOpenHttp.make_xunfei_request方法的具体用法?Python AsyncOpenHttp.make_xunfei_request怎么用?Python AsyncOpenHttp.make_xunfei_request使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类httpClient.AsyncOpenHttp
的用法示例。
在下文中一共展示了AsyncOpenHttp.make_xunfei_request方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Python代码示例。
示例1: SiriProtocolHandler
# 需要导入模块: from httpClient import AsyncOpenHttp [as 别名]
# 或者: from httpClient.AsyncOpenHttp import make_xunfei_request [as 别名]
#.........这里部分代码省略.........
elif ObjectIsCommand(plist, SpeechPacket):
self.logger.debug("Decoding speech packet")
speechPacket = SpeechPacket(plist)
if speechPacket.refId in self.speech:
(decoder, encoder, dictation) = self.speech[speechPacket.refId]
if decoder:
pcm = decoder.decode(speechPacket.packets)
else:
pcm = SpeechPacket.data # <- probably data... if pcm
encoder.encode(pcm)
else:
self.logger.debug("Got a speech packet that did not match any current request")
elif plist['class'] == 'StartCorrectedSpeechRequest':
self.process_recognized_speech({u'hypotheses': [{'confidence': 1.0, 'utterance': plist['properties']['utterance']}]}, plist['aceId'], False)
elif ObjectIsCommand(plist, FinishSpeech):
self.logger.debug("End of speech received")
finishSpeech = FinishSpeech(plist)
if finishSpeech.refId in self.speech:
(decoder, encoder, dictation) = self.speech[finishSpeech.refId]
if decoder:
decoder.destroy()
flacBin = None
if encoder:
encoder.finish()
flacBin = encoder.getBinary()
encoder.destroy()
del self.speech[finishSpeech.refId]
if flacBin != None:
self.logger.info("Sending flac to google for recognition")
try:
self.current_google_request = self.httpClient.make_xunfei_request(flacBin, finishSpeech.refId, dictation, language=self.assistant.language, allowCurses=True)
#self.current_google_request = self.httpClient.make_google_request(flacBin, finishSpeech.refId, dictation, language=self.assistant.language, allowCurses=True)
except (AttributeError, TypeError):
self.logger.warning("Unable to find language record for this assistant. Try turning Siri off and then back on.")
else:
self.logger.info("There was no speech")
else:
self.logger.debug("Got a finish speech packet that did not match any current request")
elif ObjectIsCommand(plist, CancelRequest):
# this is probably called when we need to kill a plugin
# wait for thread to finish a send
self.logger.debug("Should cancel current request")
cancelRequest = CancelRequest(plist)
if cancelRequest.refId in self.speech:
(decoder, encoder, dictation) = self.speech[cancelRequest.refId]
if decoder:
decoder.destroy()
if encoder:
encoder.finish()
encoder.destory()
del self.speech[cancelRequest.refId]
if self.current_google_request != None:
self.current_google_request.cancel()
# if a google request is running (follow up listening..., plugin might get killed there by user)
if self.current_running_plugin != None:
if self.current_running_plugin.waitForResponse != None:
self.current_running_plugin._abortPluginRun()
self.current_running_plugin.waitForResponse.set()
# if a plugin is running (processing, but not waiting for data from the device we kill it)
if self.current_running_plugin != None:
if self.current_running_plugin.waitForResponse == None: