本文整理汇总了C++中TrackInfo::GetAsAudioInfo方法的典型用法代码示例。如果您正苦于以下问题:C++ TrackInfo::GetAsAudioInfo方法的具体用法?C++ TrackInfo::GetAsAudioInfo怎么用?C++ TrackInfo::GetAsAudioInfo使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类TrackInfo
的用法示例。
在下文中一共展示了TrackInfo::GetAsAudioInfo方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: SupportsMimeType
already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
FlushableTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer)
{
nsRefPtr<MediaDataDecoder> m;
bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType);
if (aConfig.GetAsAudioInfo()) {
if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) {
m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
} else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) {
m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
} else {
m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
}
return m.forget();
}
if (!aConfig.GetAsVideoInfo()) {
return nullptr;
}
if (H264Converter::IsH264(aConfig)) {
m = new H264Converter(this,
*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
aCallback);
} else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) {
m = new VPXDecoder(*aConfig.GetAsVideoInfo(),
aImageContainer,
aTaskQueue,
aCallback);
} else {
m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
aCallback);
}
return m.forget();
}
示例2: CreateAudioDecoder
already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
FlushableTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer)
{
nsRefPtr<MediaDataDecoder> m;
if (aConfig.GetAsAudioInfo()) {
m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
return m.forget();
}
if (!aConfig.GetAsVideoInfo()) {
return nullptr;
}
if (H264Converter::IsH264(aConfig)) {
m = new H264Converter(this,
*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
aCallback);
} else {
m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
aCallback);
}
return m.forget();
}
示例3: DecoderCallbackFuzzingWrapper
already_AddRefed<MediaDataDecoder>
PDMFactory::CreateDecoderWithPDM(PlatformDecoderModule* aPDM,
const TrackInfo& aConfig,
FlushableTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer)
{
MOZ_ASSERT(aPDM);
RefPtr<MediaDataDecoder> m;
if (aConfig.GetAsAudioInfo()) {
m = aPDM->CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
return m.forget();
}
if (!aConfig.GetAsVideoInfo()) {
return nullptr;
}
MediaDataDecoderCallback* callback = aCallback;
RefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper;
if (sEnableFuzzingWrapper) {
callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback);
callbackWrapper->SetVideoOutputMinimumInterval(
TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms));
callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted);
callback = callbackWrapper.get();
}
if (H264Converter::IsH264(aConfig)) {
RefPtr<H264Converter> h
= new H264Converter(aPDM,
*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
callback);
const nsresult rv = h->GetLastError();
if (NS_SUCCEEDED(rv) || rv == NS_ERROR_NOT_INITIALIZED) {
// The H264Converter either successfully created the wrapped decoder,
// or there wasn't enough AVCC data to do so. Otherwise, there was some
// problem, for example WMF DLLs were missing.
m = h.forget();
}
} else {
m = aPDM->CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
callback);
}
if (callbackWrapper && m) {
m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget());
}
return m.forget();
}
示例4: SupportsMimeType
already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
FlushableTaskQueue* aTaskQueue,
MediaDataDecoderCallback* aCallback,
layers::LayersBackend aLayersBackend,
layers::ImageContainer* aImageContainer)
{
nsRefPtr<MediaDataDecoder> m;
bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType);
if (aConfig.GetAsAudioInfo()) {
if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) {
m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
} else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) {
m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
} else {
m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
aTaskQueue,
aCallback);
}
return m.forget();
}
if (!aConfig.GetAsVideoInfo()) {
return nullptr;
}
MediaDataDecoderCallback* callback = aCallback;
nsRefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper;
if (sEnableFuzzingWrapper) {
callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback);
callbackWrapper->SetVideoOutputMinimumInterval(
TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms));
callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted);
callback = callbackWrapper.get();
}
if (H264Converter::IsH264(aConfig)) {
m = new H264Converter(this,
*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
callback);
} else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) {
m = new VPXDecoder(*aConfig.GetAsVideoInfo(),
aImageContainer,
aTaskQueue,
callback);
} else {
m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
aLayersBackend,
aImageContainer,
aTaskQueue,
callback);
}
if (callbackWrapper && m) {
m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget());
}
return m.forget();
}