本文整理汇总了C++中CreateDecoderParams::VideoConfig方法的典型用法代码示例。如果您正苦于以下问题:C++ CreateDecoderParams::VideoConfig方法的具体用法?C++ CreateDecoderParams::VideoConfig怎么用?C++ CreateDecoderParams::VideoConfig使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类CreateDecoderParams
的用法示例。
在下文中一共展示了CreateDecoderParams::VideoConfig方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: mImageContainer
VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams)
: mImageContainer(aParams.mImageContainer)
, mTaskQueue(aParams.mTaskQueue)
, mInfo(aParams.VideoConfig())
, mCodec(MimeTypeToCodec(aParams.VideoConfig().mMimeType))
{
MOZ_COUNT_CTOR(VPXDecoder);
PodZero(&mVPX);
PodZero(&mVPXAlpha);
}
示例2: mImageContainer
VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams)
: mImageContainer(aParams.mImageContainer),
mImageAllocator(aParams.mKnowsCompositor),
mTaskQueue(aParams.mTaskQueue),
mInfo(aParams.VideoConfig()),
mCodec(MimeTypeToCodec(aParams.VideoConfig().mMimeType)),
mLowLatency(
aParams.mOptions.contains(CreateDecoderParams::Option::LowLatency)) {
MOZ_COUNT_CTOR(VPXDecoder);
PodZero(&mVPX);
PodZero(&mVPXAlpha);
}
示例3: mImageContainer
AOMDecoder::AOMDecoder(const CreateDecoderParams& aParams)
: mImageContainer(aParams.mImageContainer)
, mTaskQueue(aParams.mTaskQueue)
, mInfo(aParams.VideoConfig())
{
PodZero(&mCodec);
}
示例4: AppleVTDecoder
already_AddRefed<MediaDataDecoder> AppleDecoderModule::CreateVideoDecoder(
const CreateDecoderParams& aParams) {
RefPtr<MediaDataDecoder> decoder =
new AppleVTDecoder(aParams.VideoConfig(), aParams.mTaskQueue,
aParams.mImageContainer, aParams.mOptions);
return decoder.forget();
}
示例5: mPDM
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
const CreateDecoderParams& aParams)
: mPDM(aPDM)
, mOriginalConfig(aParams.VideoConfig())
, mCurrentConfig(aParams.VideoConfig())
, mLayersBackend(aParams.mLayersBackend)
, mImageContainer(aParams.mImageContainer)
, mTaskQueue(aParams.mTaskQueue)
, mCallback(aParams.mCallback)
, mDecoder(nullptr)
, mGMPCrashHelper(aParams.mCrashHelper)
, mNeedAVCC(aPDM->DecoderNeedsConversion(aParams.mConfig) == PlatformDecoderModule::kNeedAVCC)
, mLastError(NS_OK)
{
CreateDecoder(aParams.mDiagnostics);
}
示例6: mConfig
GMPVideoDecoderParams::GMPVideoDecoderParams(const CreateDecoderParams& aParams)
: mConfig(aParams.VideoConfig())
, mTaskQueue(aParams.mTaskQueue)
, mImageContainer(aParams.mImageContainer)
, mLayersBackend(aParams.GetLayersBackend())
, mCrashHelper(aParams.mCrashHelper)
{
}
示例7: GonkMediaDataDecoder
already_AddRefed<MediaDataDecoder>
GonkDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams)
{
RefPtr<MediaDataDecoder> decoder =
new GonkMediaDataDecoder(new GonkVideoDecoderManager(aParams.mImageContainer, aParams.VideoConfig()),
aParams.mCallback);
return decoder.forget();
}
示例8: CreateVideoDecoder
// Decode thread.
already_AddRefed<MediaDataDecoder>
CreateVideoDecoder(const CreateDecoderParams& aParams) override {
const VideoInfo& config = aParams.VideoConfig();
BlankVideoDataCreator* creator = new BlankVideoDataCreator(
config.mDisplay.width, config.mDisplay.height, aParams.mImageContainer);
RefPtr<MediaDataDecoder> decoder =
new BlankMediaDataDecoder<BlankVideoDataCreator>(creator, aParams);
return decoder.forget();
}
示例9: mConfig
GMPVideoDecoderParams::GMPVideoDecoderParams(const CreateDecoderParams& aParams)
: mConfig(aParams.VideoConfig())
, mTaskQueue(aParams.mTaskQueue)
, mCallback(nullptr)
, mAdapter(nullptr)
, mImageContainer(aParams.mImageContainer)
, mLayersBackend(aParams.mLayersBackend)
, mCrashHelper(aParams.mCrashHelper)
{}
示例10: BlankMediaDataDecoder
BlankMediaDataDecoder(BlankMediaDataCreator* aCreator,
const CreateDecoderParams& aParams)
: mCreator(aCreator)
, mCallback(aParams.mCallback)
, mMaxRefFrames(aParams.mConfig.GetType() == TrackInfo::kVideoTrack &&
MP4Decoder::IsH264(aParams.mConfig.mMimeType)
? mp4_demuxer::H264::ComputeMaxRefFrames(aParams.VideoConfig().mExtraData)
: 0)
, mType(aParams.mConfig.GetType())
{
}
示例11: mPDM
H264Converter::H264Converter(PlatformDecoderModule* aPDM,
const CreateDecoderParams& aParams)
: mPDM(aPDM)
, mOriginalConfig(aParams.VideoConfig())
, mCurrentConfig(aParams.VideoConfig())
, mKnowsCompositor(aParams.mKnowsCompositor)
, mImageContainer(aParams.mImageContainer)
, mTaskQueue(aParams.mTaskQueue)
, mDecoder(nullptr)
, mGMPCrashHelper(aParams.mCrashHelper)
, mLastError(NS_OK)
, mType(aParams.mType)
, mOnWaitingForKeyEvent(aParams.mOnWaitingForKeyEvent)
, mDecoderOptions(aParams.mOptions)
, mRate(aParams.mRate)
{
mLastError = CreateDecoder(mOriginalConfig, aParams.mDiagnostics);
if (mDecoder) {
MOZ_ASSERT(H264::HasSPS(mOriginalConfig.mExtraData));
// The video metadata contains out of band SPS/PPS (AVC1) store it.
mOriginalExtraData = mOriginalConfig.mExtraData;
}
}
示例12: WMFVideoMFTManager
already_AddRefed<MediaDataDecoder>
WMFDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams)
{
nsAutoPtr<WMFVideoMFTManager> manager(
new WMFVideoMFTManager(aParams.VideoConfig(),
aParams.mKnowsCompositor,
aParams.mImageContainer,
sDXVAEnabled));
if (!manager->Init()) {
return nullptr;
}
RefPtr<MediaDataDecoder> decoder =
new WMFMediaDataDecoder(manager.forget(), aParams.mTaskQueue, aParams.mCallback);
return decoder.forget();
}
示例13:
already_AddRefed<MediaDataDecoder> AndroidDecoderModule::CreateVideoDecoder(
const CreateDecoderParams& aParams) {
// Temporary - forces use of VPXDecoder when alpha is present.
// Bug 1263836 will handle alpha scenario once implemented. It will shift
// the check for alpha to PDMFactory but not itself remove the need for a
// check.
if (aParams.VideoConfig().HasAlpha()) {
return nullptr;
}
nsString drmStubId;
if (mProxy) {
drmStubId = mProxy->GetMediaDrmStubId();
}
RefPtr<MediaDataDecoder> decoder =
RemoteDataDecoder::CreateVideoDecoder(aParams, drmStubId, mProxy);
return decoder.forget();
}
示例14: result
already_AddRefed<MediaDataDecoder> RemoteDecoderModule::CreateVideoDecoder(
const CreateDecoderParams& aParams) {
LaunchRDDProcessIfNeeded();
if (!mManagerThread) {
return nullptr;
}
RefPtr<RemoteVideoDecoderChild> child = new RemoteVideoDecoderChild();
MediaResult result(NS_OK);
// We can use child as a ref here because this is a sync dispatch. In
// the error case for InitIPDL, we can't just let the RefPtr go out of
// scope at the end of the method because it will release the
// RemoteVideoDecoderChild on the wrong thread. This will assert in
// RemoteDecoderChild's destructor. Passing the RefPtr by reference
// allows us to release the RemoteVideoDecoderChild on the manager
// thread during this single dispatch.
RefPtr<Runnable> task =
NS_NewRunnableFunction("RemoteDecoderModule::CreateVideoDecoder", [&]() {
result = child->InitIPDL(aParams.VideoConfig(), aParams.mRate.mValue,
aParams.mOptions);
if (NS_FAILED(result)) {
// Release RemoteVideoDecoderChild here, while we're on
// manager thread. Don't just let the RefPtr go out of scope.
child = nullptr;
}
});
SyncRunnable::DispatchToThread(mManagerThread, task);
if (NS_FAILED(result)) {
if (aParams.mError) {
*aParams.mError = result;
}
return nullptr;
}
RefPtr<RemoteMediaDataDecoder> object = new RemoteMediaDataDecoder(
child, mManagerThread,
RemoteDecoderManagerChild::GetManagerAbstractThread());
return object.forget();
}
示例15: RemoteVideoDecoder
already_AddRefed<MediaDataDecoder>
RemoteDecoderModule::CreateVideoDecoder(const CreateDecoderParams& aParams)
{
if (!aParams.mKnowsCompositor) {
return nullptr;
}
MediaDataDecoderCallback* callback = aParams.mCallback;
MOZ_ASSERT(callback->OnReaderTaskQueue());
RefPtr<RemoteVideoDecoder> object = new RemoteVideoDecoder(callback);
VideoInfo info = aParams.VideoConfig();
RefPtr<layers::KnowsCompositor> knowsCompositor = aParams.mKnowsCompositor;
VideoDecoderManagerChild::GetManagerThread()->Dispatch(NS_NewRunnableFunction([=]() {
object->mActor->InitIPDL(callback, info, knowsCompositor);
}), NS_DISPATCH_NORMAL);
return object.forget();
}