本文整理汇总了C++中GrallocTextureClientOGL::SetMediaBuffer方法的典型用法代码示例。如果您正苦于以下问题:C++ GrallocTextureClientOGL::SetMediaBuffer方法的具体用法?C++ GrallocTextureClientOGL::SetMediaBuffer怎么用?C++ GrallocTextureClientOGL::SetMediaBuffer使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类GrallocTextureClientOGL
的用法示例。
在下文中一共展示了GrallocTextureClientOGL::SetMediaBuffer方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: size
already_AddRefed<VideoData>
GonkVideoDecoderManager::CreateVideoDataFromGraphicBuffer(MediaBuffer* aSource,
gfx::IntRect& aPicture)
{
sp<GraphicBuffer> srcBuffer(aSource->graphicBuffer());
RefPtr<TextureClient> textureClient;
if (mNeedsCopyBuffer) {
// Copy buffer contents for bug 1199809.
if (!mCopyAllocator) {
mCopyAllocator = new TextureClientRecycleAllocator(ImageBridgeChild::GetSingleton());
}
if (!mCopyAllocator) {
GVDM_LOG("Create buffer allocator failed!");
return nullptr;
}
gfx::IntSize size(Align(aPicture.width, 2) , Align(aPicture.height, 2));
textureClient =
mCopyAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV, size,
BackendSelector::Content,
TextureFlags::DEFAULT,
ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
if (!textureClient) {
GVDM_LOG("Copy buffer allocation failed!");
return nullptr;
}
// Update size to match buffer's.
aPicture.width = size.width;
aPicture.height = size.height;
sp<GraphicBuffer> destBuffer =
static_cast<GrallocTextureClientOGL*>(textureClient.get())->GetGraphicBuffer();
CopyGraphicBuffer(srcBuffer, destBuffer);
} else {
textureClient = mNativeWindow->getTextureClientFromBuffer(srcBuffer.get());
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(aSource);
}
RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo,
mImageContainer,
0, // Filled later by caller.
0, // Filled later by caller.
1, // No way to pass sample duration from muxer to
// OMX codec, so we hardcode the duration here.
textureClient,
false, // Filled later by caller.
-1,
aPicture);
return data.forget();
}
示例2: QueueFrameTimeOut
nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
{
*v = nullptr;
nsRefPtr<VideoData> data;
int64_t timeUs;
int32_t keyFrame;
if (mVideoBuffer == nullptr) {
GVDM_LOG("Video Buffer is not valid!");
return NS_ERROR_UNEXPECTED;
}
if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
GVDM_LOG("Decoder did not return frame time");
return NS_ERROR_UNEXPECTED;
}
int64_t duration;
nsresult rv = QueueFrameTimeOut(timeUs, duration);
NS_ENSURE_SUCCESS(rv, rv);
if (mVideoBuffer->range_length() == 0) {
// Some decoders may return spurious empty buffers that we just want to ignore
// quoted from Android's AwesomePlayer.cpp
ReleaseVideoBuffer();
return NS_ERROR_NOT_AVAILABLE;
}
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
keyFrame = 0;
}
gfx::IntRect picture = ToIntRect(mPicture);
if (mFrameInfo.mWidth != mInitialFrame.width ||
mFrameInfo.mHeight != mInitialFrame.height) {
// Frame size is different from what the container reports. This is legal,
// and we will preserve the ratio of the crop rectangle as it
// was reported relative to the picture size reported by the container.
picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
}
RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}
if (textureClient) {
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer);
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
data = VideoData::Create(mInfo.mVideo,
mImageContainer,
aStreamOffset,
timeUs,
duration,
textureClient,
keyFrame,
-1,
picture);
} else {
if (!mVideoBuffer->data()) {
GVDM_LOG("No data in Video Buffer!");
return NS_ERROR_UNEXPECTED;
}
uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
int32_t stride = mFrameInfo.mStride;
int32_t slice_height = mFrameInfo.mSliceHeight;
// Converts to OMX_COLOR_FormatYUV420Planar
if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ARect crop;
crop.top = 0;
crop.bottom = mFrameInfo.mHeight;
crop.left = 0;
crop.right = mFrameInfo.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
ReleaseVideoBuffer();
GVDM_LOG("Color conversion failed!");
return NS_ERROR_UNEXPECTED;
}
stride = mFrameInfo.mWidth;
slice_height = mFrameInfo.mHeight;
}
size_t yuv420p_y_size = stride * slice_height;
size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
uint8_t *yuv420p_y = yuv420p_buffer;
uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
// This is the approximate byte position in the stream.
//.........这里部分代码省略.........
示例3: ReadVideo
bool OmxDecoder::ReadVideo(VideoFrame *aFrame, int64_t aTimeUs,
bool aKeyframeSkip, bool aDoSeek)
{
if (!mVideoSource.get())
return false;
ReleaseVideoBuffer();
status_t err;
if (aDoSeek) {
{
Mutex::Autolock autoLock(mSeekLock);
ReleaseAllPendingVideoBuffersLocked();
mIsVideoSeeking = true;
}
MediaSource::ReadOptions options;
options.setSeekTo(aTimeUs, MediaSource::ReadOptions::SEEK_PREVIOUS_SYNC);
err = mVideoSource->read(&mVideoBuffer, &options);
{
Mutex::Autolock autoLock(mSeekLock);
mIsVideoSeeking = false;
PostReleaseVideoBuffer(nullptr, FenceHandle());
}
aDoSeek = false;
} else {
err = mVideoSource->read(&mVideoBuffer);
}
aFrame->mSize = 0;
if (err == OK) {
int64_t timeUs;
int32_t unreadable;
int32_t keyFrame;
size_t length = mVideoBuffer->range_length();
if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs) ) {
NS_WARNING("OMX decoder did not return frame time");
return false;
}
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
keyFrame = 0;
}
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsUnreadable, &unreadable)) {
unreadable = 0;
}
RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}
if (textureClient) {
// Manually increment reference count to keep MediaBuffer alive
// during TextureClient is in use.
mVideoBuffer->add_ref();
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer);
// Set recycle callback for TextureClient
textureClient->SetRecycleCallback(OmxDecoder::RecycleCallback, this);
{
Mutex::Autolock autoLock(mPendingVideoBuffersLock);
// Store pending recycle TextureClient.
MOZ_ASSERT(mPendingRecycleTexutreClients.find(textureClient) == mPendingRecycleTexutreClients.end());
mPendingRecycleTexutreClients.insert(textureClient);
}
aFrame->mGraphicBuffer = textureClient;
aFrame->mRotation = mVideoRotation;
aFrame->mTimeUs = timeUs;
aFrame->mKeyFrame = keyFrame;
aFrame->Y.mWidth = mVideoWidth;
aFrame->Y.mHeight = mVideoHeight;
// Release to hold video buffer in OmxDecoder more.
// MediaBuffer's ref count is changed from 2 to 1.
ReleaseVideoBuffer();
} else if (length > 0) {
char *data = static_cast<char *>(mVideoBuffer->data()) + mVideoBuffer->range_offset();
if (unreadable) {
LOG(PR_LOG_DEBUG, "video frame is unreadable");
}
if (!ToVideoFrame(aFrame, timeUs, data, length, keyFrame)) {
return false;
}
}
// Check if this frame is valid or not. If not, skip it.
if ((aKeyframeSkip && timeUs < aTimeUs) || length == 0) {
aFrame->mShouldSkip = true;
}
}
else if (err == INFO_FORMAT_CHANGED) {
// If the format changed, update our cached info.
if (!SetVideoFormat()) {
//.........这里部分代码省略.........