当前位置: 首页>>代码示例>>C++>>正文


C++ TrackInfo::GetAsVideoInfo方法代码示例

本文整理汇总了C++中TrackInfo::GetAsVideoInfo方法的典型用法代码示例。如果您正苦于以下问题:C++ TrackInfo::GetAsVideoInfo方法的具体用法?C++ TrackInfo::GetAsVideoInfo怎么用?C++ TrackInfo::GetAsVideoInfo使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在TrackInfo的用法示例。


在下文中一共展示了TrackInfo::GetAsVideoInfo方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1:

void
WMFVideoMFTManager::ConfigurationChanged(const TrackInfo& aConfig)
{
  MOZ_ASSERT(aConfig.GetAsVideoInfo());
  mVideoInfo = *aConfig.GetAsVideoInfo();
  mImageSize = mVideoInfo.mImage;
}
开发者ID:,项目名称:,代码行数:7,代码来源:

示例2: CheckResult

 void
 AddMediaFormatChecker(const TrackInfo& aTrackConfig)
 {
   if (aTrackConfig.IsVideo()) {
     auto mimeType = aTrackConfig.GetAsVideoInfo()->mMimeType;
     RefPtr<MediaByteBuffer> extraData =
       aTrackConfig.GetAsVideoInfo()->mExtraData;
     AddToCheckList([mimeType, extraData]() {
       if (MP4Decoder::IsH264(mimeType)) {
         mp4_demuxer::SPSData spsdata;
         // WMF H.264 Video Decoder and Apple ATDecoder
         // do not support YUV444 format.
         // For consistency, all decoders should be checked.
         if (mp4_demuxer::H264::DecodeSPSFromExtraData(extraData, spsdata)
             && (spsdata.profile_idc == 244 /* Hi444PP */
                 || spsdata.chroma_format_idc == PDMFactory::kYUV444)) {
           return CheckResult(
             SupportChecker::Reason::kVideoFormatNotSupported,
             MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
                         RESULT_DETAIL("Decoder may not have the capability "
                                       "to handle the requested video format "
                                       "with YUV444 chroma subsampling.")));
         }
       }
       return CheckResult(SupportChecker::Reason::kSupported);
     });
   }
 }
开发者ID:bgrins,项目名称:gecko-dev,代码行数:28,代码来源:PDMFactory.cpp

示例3: SupportsMimeType

already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
                                     FlushableTaskQueue* aTaskQueue,
                                     MediaDataDecoderCallback* aCallback,
                                     layers::LayersBackend aLayersBackend,
                                     layers::ImageContainer* aImageContainer)
{
  nsRefPtr<MediaDataDecoder> m;

  bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType);

  if (aConfig.GetAsAudioInfo()) {
    if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) {
      m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(),
                                aTaskQueue,
                                aCallback);
    } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) {
      m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(),
                              aTaskQueue,
                              aCallback);
    } else {
      m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                             aTaskQueue,
                             aCallback);
    }
    return m.forget();
  }

  if (!aConfig.GetAsVideoInfo()) {
    return nullptr;
  }

  if (H264Converter::IsH264(aConfig)) {
    m = new H264Converter(this,
                          *aConfig.GetAsVideoInfo(),
                          aLayersBackend,
                          aImageContainer,
                          aTaskQueue,
                          aCallback);
  } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) {
    m = new VPXDecoder(*aConfig.GetAsVideoInfo(),
                       aImageContainer,
                       aTaskQueue,
                       aCallback);
  } else {
    m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                           aLayersBackend,
                           aImageContainer,
                           aTaskQueue,
                           aCallback);
  }
  return m.forget();
}
开发者ID:norihirou,项目名称:gecko-dev,代码行数:53,代码来源:PlatformDecoderModule.cpp

示例4: AddMediaFormatChecker

 void
 AddMediaFormatChecker(const TrackInfo& aTrackConfig)
 {
   if (aTrackConfig.IsVideo()) {
   auto mimeType = aTrackConfig.GetAsVideoInfo()->mMimeType;
   RefPtr<MediaByteBuffer> extraData = aTrackConfig.GetAsVideoInfo()->mExtraData;
   AddToCheckList(
     [mimeType, extraData]() {
       if (MP4Decoder::IsH264(mimeType)) {
         mp4_demuxer::SPSData spsdata;
         // WMF H.264 Video Decoder and Apple ATDecoder
         // do not support YUV444 format.
         // For consistency, all decoders should be checked.
         if (mp4_demuxer::H264::DecodeSPSFromExtraData(extraData, spsdata) &&
             spsdata.chroma_format_idc == PDMFactory::kYUV444) {
           return SupportChecker::Result::kVideoFormatNotSupported;
         }
       }
       return SupportChecker::Result::kSupported;
     });
   }
 }
开发者ID:nwgh,项目名称:gecko-dev,代码行数:22,代码来源:PDMFactory.cpp

示例5: CreateAudioDecoder

already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
                                     FlushableTaskQueue* aTaskQueue,
                                     MediaDataDecoderCallback* aCallback,
                                     layers::LayersBackend aLayersBackend,
                                     layers::ImageContainer* aImageContainer)
{
  nsRefPtr<MediaDataDecoder> m;

  if (aConfig.GetAsAudioInfo()) {
    m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                           aTaskQueue,
                           aCallback);
    return m.forget();
  }

  if (!aConfig.GetAsVideoInfo()) {
    return nullptr;
  }

  if (H264Converter::IsH264(aConfig)) {
    m = new H264Converter(this,
                          *aConfig.GetAsVideoInfo(),
                          aLayersBackend,
                          aImageContainer,
                          aTaskQueue,
                          aCallback);
  } else {
    m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                           aLayersBackend,
                           aImageContainer,
                           aTaskQueue,
                           aCallback);
  }
  return m.forget();
}
开发者ID:imace,项目名称:gecko-dev-speech,代码行数:36,代码来源:PlatformDecoderModule.cpp

示例6:

bool
WMFDecoderModule::Supports(const TrackInfo& aTrackInfo,
                           DecoderDoctorDiagnostics* aDiagnostics) const
{
  if ((aTrackInfo.mMimeType.EqualsLiteral("audio/mp4a-latm") ||
       aTrackInfo.mMimeType.EqualsLiteral("audio/mp4")) &&
       WMFDecoderModule::HasAAC()) {
    return true;
  }
  if (MP4Decoder::IsH264(aTrackInfo.mMimeType) && WMFDecoderModule::HasH264()) {
    const VideoInfo* videoInfo = aTrackInfo.GetAsVideoInfo();
    MOZ_ASSERT(videoInfo);
    // Check Windows format constraints, based on:
    // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
    if (IsWin8OrLater()) {
      // Windows >7 supports at most 4096x2304.
      if (videoInfo->mImage.width > 4096 || videoInfo->mImage.height > 2304) {
        return false;
      }
    } else {
      // Windows <=7 supports at most 1920x1088.
      if (videoInfo->mImage.width > 1920 || videoInfo->mImage.height > 1088) {
        return false;
      }
    }
    return true;
  }
  if (aTrackInfo.mMimeType.EqualsLiteral("audio/mpeg") &&
      CanCreateWMFDecoder<CLSID_CMP3DecMediaObject>()) {
    return true;
  }
  if (MediaPrefs::PDMWMFIntelDecoderEnabled() && sDXVAEnabled) {
    if (VPXDecoder::IsVP8(aTrackInfo.mMimeType) &&
        CanCreateWMFDecoder<CLSID_WebmMfVp8Dec>()) {
      return true;
    }
    if (VPXDecoder::IsVP9(aTrackInfo.mMimeType) &&
        CanCreateWMFDecoder<CLSID_WebmMfVp9Dec>()) {
      return true;
    }
  }

  // Some unsupported codec.
  return false;
}
开发者ID:MichaelKohler,项目名称:gecko-dev,代码行数:45,代码来源:WMFDecoderModule.cpp

示例7: DecoderCallbackFuzzingWrapper

already_AddRefed<MediaDataDecoder>
PDMFactory::CreateDecoderWithPDM(PlatformDecoderModule* aPDM,
                                 const TrackInfo& aConfig,
                                 FlushableTaskQueue* aTaskQueue,
                                 MediaDataDecoderCallback* aCallback,
                                 layers::LayersBackend aLayersBackend,
                                 layers::ImageContainer* aImageContainer)
{
  MOZ_ASSERT(aPDM);
  RefPtr<MediaDataDecoder> m;

  if (aConfig.GetAsAudioInfo()) {
    m = aPDM->CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                                 aTaskQueue,
                                 aCallback);
    return m.forget();
  }

  if (!aConfig.GetAsVideoInfo()) {
    return nullptr;
  }

  MediaDataDecoderCallback* callback = aCallback;
  RefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper;
  if (sEnableFuzzingWrapper) {
    callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback);
    callbackWrapper->SetVideoOutputMinimumInterval(
      TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms));
    callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted);
    callback = callbackWrapper.get();
  }

  if (H264Converter::IsH264(aConfig)) {
    RefPtr<H264Converter> h
      = new H264Converter(aPDM,
                          *aConfig.GetAsVideoInfo(),
                          aLayersBackend,
                          aImageContainer,
                          aTaskQueue,
                          callback);
    const nsresult rv = h->GetLastError();
    if (NS_SUCCEEDED(rv) || rv == NS_ERROR_NOT_INITIALIZED) {
      // The H264Converter either successfully created the wrapped decoder,
      // or there wasn't enough AVCC data to do so. Otherwise, there was some
      // problem, for example WMF DLLs were missing.
      m = h.forget();
    }
  } else {
    m = aPDM->CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                                 aLayersBackend,
                                 aImageContainer,
                                 aTaskQueue,
                                 callback);
  }

  if (callbackWrapper && m) {
    m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget());
  }

  return m.forget();
}
开发者ID:Shaif95,项目名称:gecko-dev,代码行数:61,代码来源:PDMFactory.cpp

示例8: SupportsMimeType

already_AddRefed<MediaDataDecoder>
PlatformDecoderModule::CreateDecoder(const TrackInfo& aConfig,
                                     FlushableTaskQueue* aTaskQueue,
                                     MediaDataDecoderCallback* aCallback,
                                     layers::LayersBackend aLayersBackend,
                                     layers::ImageContainer* aImageContainer)
{
    nsRefPtr<MediaDataDecoder> m;

    bool hasPlatformDecoder = SupportsMimeType(aConfig.mMimeType);

    if (aConfig.GetAsAudioInfo()) {
        if (!hasPlatformDecoder && VorbisDataDecoder::IsVorbis(aConfig.mMimeType)) {
            m = new VorbisDataDecoder(*aConfig.GetAsAudioInfo(),
                                      aTaskQueue,
                                      aCallback);
        } else if (!hasPlatformDecoder && OpusDataDecoder::IsOpus(aConfig.mMimeType)) {
            m = new OpusDataDecoder(*aConfig.GetAsAudioInfo(),
                                    aTaskQueue,
                                    aCallback);
        } else {
            m = CreateAudioDecoder(*aConfig.GetAsAudioInfo(),
                                   aTaskQueue,
                                   aCallback);
        }
        return m.forget();
    }

    if (!aConfig.GetAsVideoInfo()) {
        return nullptr;
    }

    MediaDataDecoderCallback* callback = aCallback;
    nsRefPtr<DecoderCallbackFuzzingWrapper> callbackWrapper;
    if (sEnableFuzzingWrapper) {
        callbackWrapper = new DecoderCallbackFuzzingWrapper(aCallback);
        callbackWrapper->SetVideoOutputMinimumInterval(
            TimeDuration::FromMilliseconds(sVideoOutputMinimumInterval_ms));
        callbackWrapper->SetDontDelayInputExhausted(sDontDelayInputExhausted);
        callback = callbackWrapper.get();
    }

    if (H264Converter::IsH264(aConfig)) {
        m = new H264Converter(this,
                              *aConfig.GetAsVideoInfo(),
                              aLayersBackend,
                              aImageContainer,
                              aTaskQueue,
                              callback);
    } else if (!hasPlatformDecoder && VPXDecoder::IsVPX(aConfig.mMimeType)) {
        m = new VPXDecoder(*aConfig.GetAsVideoInfo(),
                           aImageContainer,
                           aTaskQueue,
                           callback);
    } else {
        m = CreateVideoDecoder(*aConfig.GetAsVideoInfo(),
                               aLayersBackend,
                               aImageContainer,
                               aTaskQueue,
                               callback);
    }

    if (callbackWrapper && m) {
        m = new DecoderFuzzingWrapper(m.forget(), callbackWrapper.forget());
    }

    return m.forget();
}
开发者ID:jreyles,项目名称:gecko-dev,代码行数:68,代码来源:PlatformDecoderModule.cpp


注:本文中的TrackInfo::GetAsVideoInfo方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。