本文整理汇总了Java中com.google.assistant.embedded.v1alpha1.ConverseConfig类的典型用法代码示例。如果您正苦于以下问题:Java ConverseConfig类的具体用法?Java ConverseConfig怎么用?Java ConverseConfig使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ConverseConfig类属于com.google.assistant.embedded.v1alpha1包,在下文中一共展示了ConverseConfig类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: run
import com.google.assistant.embedded.v1alpha1.ConverseConfig; //导入依赖的package包/类
@Override
public void run() {
Log.i(TAG, "starting assistant request");
mAudioRecord.startRecording();
mAssistantRequestObserver = mAssistantService.converse(mAssistantResponseObserver);
ConverseConfig.Builder converseConfigBuilder = ConverseConfig.newBuilder()
.setAudioInConfig(AudioInConfig.newBuilder()
.setEncoding(ENCODING_INPUT)
.setSampleRateHertz(SAMPLE_RATE)
.build())
.setAudioOutConfig(AudioOutConfig.newBuilder()
.setEncoding(ENCODING_OUTPUT)
.setSampleRateHertz(SAMPLE_RATE)
.setVolumePercentage(mVolumePercentage)
.build());
if (mConversationState != null) {
converseConfigBuilder.setConverseState(ConverseState.newBuilder()
.setConversationState(mConversationState)
.build());
}
mAssistantRequestObserver.onNext(
ConverseRequest.newBuilder()
.setConfig(converseConfigBuilder.build())
.build());
mAssistantHandler.post(mStreamAssistantRequest);
}
示例2: run
import com.google.assistant.embedded.v1alpha1.ConverseConfig; //导入依赖的package包/类
@Override
public void run() {
Log.i(TAG, "starting assistant request");
mAudioRecord.startRecording();
mAssistantRequestObserver = mAssistantService.converse(mAssistantResponseObserver);
ConverseConfig.Builder converseConfigBuilder = ConverseConfig.newBuilder()
.setAudioInConfig(ASSISTANT_AUDIO_REQUEST_CONFIG)
.setAudioOutConfig(ASSISTANT_AUDIO_RESPONSE_CONFIG);
mAssistantRequestObserver.onNext(
ConverseRequest.newBuilder()
.setConfig(converseConfigBuilder.build())
.build());
mAssistantHandler.post(mStreamAssistantRequest);
}
示例3: startConversation
import com.google.assistant.embedded.v1alpha1.ConverseConfig; //导入依赖的package包/类
/**
* Starts a request to the Assistant.
*/
public void startConversation() {
mAudioRecord.startRecording();
mRequestHandler.post(new Runnable() {
@Override
public void run() {
mRequestCallback.onRequestStart();
}
});
mAssistantHandler.post(new Runnable() {
@Override
public void run() {
mAssistantRequestObserver = mAssistantService.converse(mAssistantResponseObserver);
ConverseConfig.Builder converseConfigBuilder = ConverseConfig.newBuilder()
.setAudioInConfig(mAudioInConfig)
.setAudioOutConfig(mAudioOutConfig);
if (mConversationState != null) {
converseConfigBuilder.setConverseState(ConverseState.newBuilder()
.setConversationState(mConversationState)
.build());
}
mAssistantRequestObserver.onNext(
ConverseRequest.newBuilder()
.setConfig(converseConfigBuilder.build())
.build());
}
});
mAssistantHandler.post(mStreamAssistantRequest);
}
示例4: startRecognizing
import com.google.assistant.embedded.v1alpha1.ConverseConfig; //导入依赖的package包/类
public void startRecognizing(int sampleRate) {
if (mApi == null) {
Log.w(TAG, "API not ready. Ignoring the request.");
return;
}
//Log.d(TAG,"request sending");
for (Listener listener : mListeners) {
listener.onRequestStart();
// Log.d(TAG,"request sending");
}
// Configure the API
mRequestObserver = mApi.converse(mResponseObserver);
ConverseConfig.Builder converseConfigBuilder =ConverseConfig.newBuilder()
.setAudioInConfig(AudioInConfig.newBuilder()
.setEncoding(AudioInConfig.Encoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.build())
.setAudioOutConfig(AudioOutConfig.newBuilder()
.setEncoding(AudioOutConfig.Encoding.LINEAR16)
.setSampleRateHertz(sampleRate)
.setVolumePercentage(DEFAULT_VOLUME)
.build());
if (vConversationState != null) {
converseConfigBuilder.setConverseState(
ConverseState.newBuilder()
.setConversationState(vConversationState)
.build());
}
mRequestObserver.onNext(ConverseRequest.newBuilder()
.setConfig(converseConfigBuilder.build())
.build());
}