本文整理汇总了C++中AudioNodeStream::SetBuffer方法的典型用法代码示例。如果您正苦于以下问题:C++ AudioNodeStream::SetBuffer方法的具体用法?C++ AudioNodeStream::SetBuffer怎么用?C++ AudioNodeStream::SetBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类AudioNodeStream
的用法示例。
在下文中一共展示了AudioNodeStream::SetBuffer方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: ThreadSharedFloatArrayBufferList
void
ConvolverNode::SetBuffer(JSContext* aCx, AudioBuffer* aBuffer, ErrorResult& aRv)
{
if (aBuffer) {
switch (aBuffer->NumberOfChannels()) {
case 1:
case 2:
case 4:
// Supported number of channels
break;
default:
aRv.Throw(NS_ERROR_DOM_SYNTAX_ERR);
return;
}
}
mBuffer = aBuffer;
// Send the buffer to the stream
AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get());
MOZ_ASSERT(ns, "Why don't we have a stream here?");
if (mBuffer) {
uint32_t length = mBuffer->Length();
nsRefPtr<ThreadSharedFloatArrayBufferList> data =
mBuffer->GetThreadSharedChannelsForRate(aCx);
if (data && length < WEBAUDIO_BLOCK_SIZE) {
// For very small impulse response buffers, we need to pad the
// buffer with 0 to make sure that the Reverb implementation
// has enough data to compute FFTs from.
length = WEBAUDIO_BLOCK_SIZE;
nsRefPtr<ThreadSharedFloatArrayBufferList> paddedBuffer =
new ThreadSharedFloatArrayBufferList(data->GetChannels());
float* channelData = (float*) malloc(sizeof(float) * length * data->GetChannels());
for (uint32_t i = 0; i < data->GetChannels(); ++i) {
PodCopy(channelData + length * i, data->GetData(i), mBuffer->Length());
PodZero(channelData + length * i + mBuffer->Length(), WEBAUDIO_BLOCK_SIZE - mBuffer->Length());
paddedBuffer->SetData(i, (i == 0) ? channelData : nullptr, free, channelData);
}
data = paddedBuffer;
}
SendInt32ParameterToStream(ConvolverNodeEngine::BUFFER_LENGTH, length);
SendDoubleParameterToStream(ConvolverNodeEngine::SAMPLE_RATE,
mBuffer->SampleRate());
ns->SetBuffer(data.forget());
} else {
ns->SetBuffer(nullptr);
}
}
示例2: SendPeriodicWaveToStream
void OscillatorNode::SendPeriodicWaveToStream()
{
NS_ASSERTION(mType == OscillatorType::Custom,
"Sending custom waveform to engine thread with non-custom type");
AudioNodeStream* ns = static_cast<AudioNodeStream*>(mStream.get());
MOZ_ASSERT(ns, "Missing node stream.");
MOZ_ASSERT(mPeriodicWave, "Send called without PeriodicWave object.");
SendInt32ParameterToStream(OscillatorNodeEngine::PERIODICWAVE,
mPeriodicWave->DataLength());
nsRefPtr<ThreadSharedFloatArrayBufferList> data =
mPeriodicWave->GetThreadSharedBuffer();
ns->SetBuffer(data.forget());
}
示例3: SendOffsetAndDurationParametersToStream
void
AudioBufferSourceNode::SendBufferParameterToStream(JSContext* aCx)
{
AudioNodeStream* ns = mStream;
if (!ns) {
return;
}
if (mBuffer) {
RefPtr<ThreadSharedFloatArrayBufferList> data =
mBuffer->GetThreadSharedChannelsForRate(aCx);
ns->SetBuffer(data.forget());
if (mStartCalled) {
SendOffsetAndDurationParametersToStream(ns);
}
} else {
ns->SetInt32Parameter(BUFFEREND, 0);
ns->SetBuffer(nullptr);
MarkInactive();
}
}