本文整理汇总了C++中GrallocTextureClientOGL类的典型用法代码示例。如果您正苦于以下问题:C++ GrallocTextureClientOGL类的具体用法?C++ GrallocTextureClientOGL怎么用?C++ GrallocTextureClientOGL使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了GrallocTextureClientOGL类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: printf_stderr
void
CanvasClientSurfaceStream::Update(gfx::IntSize aSize, ClientCanvasLayer* aLayer)
{
GLScreenBuffer* screen = aLayer->mGLContext->Screen();
SurfaceStream* stream = screen->Stream();
bool isCrossProcess = !(XRE_GetProcessType() == GeckoProcessType_Default);
bool bufferCreated = false;
if (isCrossProcess) {
#ifdef MOZ_WIDGET_GONK
SharedSurface* surf = stream->SwapConsumer();
if (!surf) {
printf_stderr("surf is null post-SwapConsumer!\n");
return;
}
if (surf->Type() != SharedSurfaceType::Gralloc) {
printf_stderr("Unexpected non-Gralloc SharedSurface in IPC path!");
MOZ_ASSERT(false);
return;
}
SharedSurface_Gralloc* grallocSurf = SharedSurface_Gralloc::Cast(surf);
GrallocTextureClientOGL* grallocTextureClient =
static_cast<GrallocTextureClientOGL*>(grallocSurf->GetTextureClient());
// If IPDLActor is null means this TextureClient didn't AddTextureClient yet
if (!grallocTextureClient->GetIPDLActor()) {
grallocTextureClient->SetTextureFlags(mTextureInfo.mTextureFlags);
AddTextureClient(grallocTextureClient);
}
if (grallocTextureClient->GetIPDLActor()) {
GetForwarder()->UseTexture(this, grallocTextureClient);
}
#else
printf_stderr("isCrossProcess, but not MOZ_WIDGET_GONK! Someone needs to write some code!");
MOZ_ASSERT(false);
#endif
} else {
if (!mBuffer) {
StreamTextureClientOGL* textureClient =
new StreamTextureClientOGL(mTextureInfo.mTextureFlags);
textureClient->InitWith(stream);
mBuffer = textureClient;
bufferCreated = true;
}
if (bufferCreated && !AddTextureClient(mBuffer)) {
mBuffer = nullptr;
}
if (mBuffer) {
GetForwarder()->UseTexture(this, mBuffer);
}
}
aLayer->Painted();
}
示例2:
/* static */
void
GonkVideoDecoderManager::RecycleCallback(TextureClient* aClient, void* aClosure)
{
GonkVideoDecoderManager* videoManager = static_cast<GonkVideoDecoderManager*>(aClosure);
GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
aClient->ClearRecycleCallback();
videoManager->PostReleaseVideoBuffer(client->GetMediaBuffer());
}
示例3: MOZ_ASSERT
/* static */
void
GonkVideoDecoderManager::RecycleCallback(TextureClient* aClient, void* aClosure)
{
MOZ_ASSERT(aClient && !aClient->IsDead());
GonkVideoDecoderManager* videoManager = static_cast<GonkVideoDecoderManager*>(aClosure);
GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
aClient->ClearRecycleCallback();
FenceHandle handle = aClient->GetAndResetReleaseFenceHandle();
videoManager->PostReleaseVideoBuffer(client->GetMediaBuffer(), handle);
}
示例4: srcBuffer
already_AddRefed<VideoData>
GonkVideoDecoderManager::CreateVideoDataFromGraphicBuffer(MediaBuffer* aSource,
gfx::IntRect& aPicture)
{
sp<GraphicBuffer> srcBuffer(aSource->graphicBuffer());
RefPtr<TextureClient> textureClient;
if (mNeedsCopyBuffer) {
// Copy buffer contents for bug 1199809.
if (!mCopyAllocator) {
mCopyAllocator = new TextureClientRecycleAllocator(ImageBridgeChild::GetSingleton());
}
if (!mCopyAllocator) {
GVDM_LOG("Create buffer allocator failed!");
return nullptr;
}
gfx::IntSize size(Align(aPicture.width, 2) , Align(aPicture.height, 2));
textureClient =
mCopyAllocator->CreateOrRecycle(gfx::SurfaceFormat::YUV, size,
BackendSelector::Content,
TextureFlags::DEFAULT,
ALLOC_DISALLOW_BUFFERTEXTURECLIENT);
if (!textureClient) {
GVDM_LOG("Copy buffer allocation failed!");
return nullptr;
}
// Update size to match buffer's.
aPicture.width = size.width;
aPicture.height = size.height;
sp<GraphicBuffer> destBuffer =
static_cast<GrallocTextureClientOGL*>(textureClient.get())->GetGraphicBuffer();
CopyGraphicBuffer(srcBuffer, destBuffer);
} else {
textureClient = mNativeWindow->getTextureClientFromBuffer(srcBuffer.get());
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(aSource);
}
RefPtr<VideoData> data = VideoData::Create(mInfo.mVideo,
mImageContainer,
0, // Filled later by caller.
0, // Filled later by caller.
1, // No way to pass sample duration from muxer to
// OMX codec, so we hardcode the duration here.
textureClient,
false, // Filled later by caller.
-1,
aPicture);
return data.forget();
}
示例5: ReleaseVideoBuffer
void OmxDecoder::ReleaseMediaResources() {
mMediaResourcePromise.RejectIfExists(true, __func__);
ReleaseVideoBuffer();
ReleaseAudioBuffer();
{
Mutex::Autolock autoLock(mPendingVideoBuffersLock);
MOZ_ASSERT(mPendingRecycleTexutreClients.empty());
// Release all pending recycle TextureClients, if they are not recycled yet.
// This should not happen. See Bug 1042308.
if (!mPendingRecycleTexutreClients.empty()) {
printf_stderr("OmxDecoder::ReleaseMediaResources -- TextureClients are not recycled yet\n");
for (std::set<TextureClient*>::iterator it=mPendingRecycleTexutreClients.begin();
it!=mPendingRecycleTexutreClients.end(); it++)
{
GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(*it);
client->ClearRecycleCallback();
if (client->GetMediaBuffer()) {
mPendingVideoBuffers.push(BufferItem(client->GetMediaBuffer(), client->GetAndResetReleaseFenceHandle()));
}
}
mPendingRecycleTexutreClients.clear();
}
}
{
// Free all pending video buffers.
Mutex::Autolock autoLock(mSeekLock);
ReleaseAllPendingVideoBuffersLocked();
}
if (mVideoSource.get()) {
mVideoSource->stop();
mVideoSource.clear();
}
if (mAudioSource.get()) {
mAudioSource->stop();
mAudioSource.clear();
}
mNativeWindowClient.clear();
mNativeWindow.clear();
// Reset this variable to make the first seek go to the previous keyframe
// when resuming
mLastSeekTime = -1;
}
示例6: ReleaseVideoBuffer
void OmxDecoder::ReleaseMediaResources() {
ReleaseVideoBuffer();
ReleaseAudioBuffer();
{
Mutex::Autolock autoLock(mPendingVideoBuffersLock);
MOZ_ASSERT(mPendingRecycleTexutreClients.empty());
// Release all pending recycle TextureClients, if they are not recycled yet.
// This should not happen. See Bug 1042308.
if (!mPendingRecycleTexutreClients.empty()) {
printf_stderr("OmxDecoder::ReleaseMediaResources -- TextureClients are not recycled yet\n");
for (std::set<TextureClient*>::iterator it=mPendingRecycleTexutreClients.begin();
it!=mPendingRecycleTexutreClients.end(); it++)
{
GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(*it);
client->ClearRecycleCallback();
if (client->GetMediaBuffer()) {
mPendingVideoBuffers.push(BufferItem(client->GetMediaBuffer(), client->GetReleaseFenceHandle()));
}
}
mPendingRecycleTexutreClients.clear();
}
}
{
// Free all pending video buffers.
Mutex::Autolock autoLock(mSeekLock);
ReleaseAllPendingVideoBuffersLocked();
}
if (mVideoSource.get()) {
mVideoSource->stop();
mVideoSource.clear();
}
if (mAudioSource.get()) {
mAudioSource->stop();
mAudioSource.clear();
}
mNativeWindowClient.clear();
mNativeWindow.clear();
}
示例7: autoLock
void OmxDecoder::RecycleCallbackImp(TextureClient* aClient)
{
aClient->ClearRecycleCallback();
{
Mutex::Autolock autoLock(mPendingVideoBuffersLock);
if (mPendingRecycleTexutreClients.find(aClient) == mPendingRecycleTexutreClients.end()) {
printf_stderr("OmxDecoder::RecycleCallbackImp -- TextureClient is not pending recycle\n");
return;
}
mPendingRecycleTexutreClients.erase(aClient);
GrallocTextureClientOGL* client = static_cast<GrallocTextureClientOGL*>(aClient);
if (client->GetMediaBuffer()) {
mPendingVideoBuffers.push(BufferItem(client->GetMediaBuffer(), client->GetReleaseFenceHandle()));
}
}
sp<AMessage> notify =
new AMessage(kNotifyPostReleaseVideoBuffer, mReflector->id());
// post AMessage to OmxDecoder via ALooper.
notify->post();
}
示例8: GVDM_LOG
nsresult
GonkVideoDecoderManager::CreateVideoData(int64_t aStreamOffset, VideoData **v)
{
*v = nullptr;
nsRefPtr<VideoData> data;
int64_t timeUs;
int32_t keyFrame;
if (mVideoBuffer == nullptr) {
GVDM_LOG("Video Buffer is not valid!");
return NS_ERROR_UNEXPECTED;
}
if (!mVideoBuffer->meta_data()->findInt64(kKeyTime, &timeUs)) {
GVDM_LOG("Decoder did not return frame time");
return NS_ERROR_UNEXPECTED;
}
int64_t duration;
nsresult rv = QueueFrameTimeOut(timeUs, duration);
NS_ENSURE_SUCCESS(rv, rv);
if (mVideoBuffer->range_length() == 0) {
// Some decoders may return spurious empty buffers that we just want to ignore
// quoted from Android's AwesomePlayer.cpp
ReleaseVideoBuffer();
return NS_ERROR_NOT_AVAILABLE;
}
if (!mVideoBuffer->meta_data()->findInt32(kKeyIsSyncFrame, &keyFrame)) {
keyFrame = 0;
}
gfx::IntRect picture = ToIntRect(mPicture);
if (mFrameInfo.mWidth != mInitialFrame.width ||
mFrameInfo.mHeight != mInitialFrame.height) {
// Frame size is different from what the container reports. This is legal,
// and we will preserve the ratio of the crop rectangle as it
// was reported relative to the picture size reported by the container.
picture.x = (mPicture.x * mFrameInfo.mWidth) / mInitialFrame.width;
picture.y = (mPicture.y * mFrameInfo.mHeight) / mInitialFrame.height;
picture.width = (mFrameInfo.mWidth * mPicture.width) / mInitialFrame.width;
picture.height = (mFrameInfo.mHeight * mPicture.height) / mInitialFrame.height;
}
RefPtr<mozilla::layers::TextureClient> textureClient;
if ((mVideoBuffer->graphicBuffer().get())) {
textureClient = mNativeWindow->getTextureClientFromBuffer(mVideoBuffer->graphicBuffer().get());
}
if (textureClient) {
GrallocTextureClientOGL* grallocClient = static_cast<GrallocTextureClientOGL*>(textureClient.get());
grallocClient->SetMediaBuffer(mVideoBuffer);
textureClient->SetRecycleCallback(GonkVideoDecoderManager::RecycleCallback, this);
data = VideoData::Create(mInfo.mVideo,
mImageContainer,
aStreamOffset,
timeUs,
duration,
textureClient,
keyFrame,
-1,
picture);
} else {
if (!mVideoBuffer->data()) {
GVDM_LOG("No data in Video Buffer!");
return NS_ERROR_UNEXPECTED;
}
uint8_t *yuv420p_buffer = (uint8_t *)mVideoBuffer->data();
int32_t stride = mFrameInfo.mStride;
int32_t slice_height = mFrameInfo.mSliceHeight;
// Converts to OMX_COLOR_FormatYUV420Planar
if (mFrameInfo.mColorFormat != OMX_COLOR_FormatYUV420Planar) {
ARect crop;
crop.top = 0;
crop.bottom = mFrameInfo.mHeight;
crop.left = 0;
crop.right = mFrameInfo.mWidth;
yuv420p_buffer = GetColorConverterBuffer(mFrameInfo.mWidth, mFrameInfo.mHeight);
if (mColorConverter.convertDecoderOutputToI420(mVideoBuffer->data(),
mFrameInfo.mWidth, mFrameInfo.mHeight, crop, yuv420p_buffer) != OK) {
ReleaseVideoBuffer();
GVDM_LOG("Color conversion failed!");
return NS_ERROR_UNEXPECTED;
}
stride = mFrameInfo.mWidth;
slice_height = mFrameInfo.mHeight;
}
size_t yuv420p_y_size = stride * slice_height;
size_t yuv420p_u_size = ((stride + 1) / 2) * ((slice_height + 1) / 2);
uint8_t *yuv420p_y = yuv420p_buffer;
uint8_t *yuv420p_u = yuv420p_y + yuv420p_y_size;
uint8_t *yuv420p_v = yuv420p_u + yuv420p_u_size;
// This is the approximate byte position in the stream.
//.........这里部分代码省略.........
示例9: MOZ_ASSERT
SharedSurface_Gralloc*
SharedSurface_Gralloc::Create(GLContext* prodGL,
const GLFormats& formats,
const gfx::IntSize& size,
bool hasAlpha,
ISurfaceAllocator* allocator)
{
static bool runOnce = true;
if (runOnce) {
sForceReadPixelsToFence = false;
mozilla::Preferences::AddBoolVarCache(&sForceReadPixelsToFence,
"gfx.gralloc.fence-with-readpixels");
runOnce = false;
}
GLLibraryEGL* egl = &sEGLLibrary;
MOZ_ASSERT(egl);
DEBUG_PRINT("SharedSurface_Gralloc::Create -------\n");
if (!HasExtensions(egl, prodGL))
return nullptr;
gfxContentType type = hasAlpha ? GFX_CONTENT_COLOR_ALPHA
: GFX_CONTENT_COLOR;
gfxImageFormat format
= gfxPlatform::GetPlatform()->OptimalFormatForContent(type);
GrallocTextureClientOGL* grallocTC =
new GrallocTextureClientOGL(
allocator,
gfx::ImageFormatToSurfaceFormat(format),
TEXTURE_FLAGS_DEFAULT);
if (!grallocTC->AllocateForGLRendering(size)) {
return nullptr;
}
sp<GraphicBuffer> buffer = grallocTC->GetGraphicBuffer();
EGLDisplay display = egl->Display();
EGLClientBuffer clientBuffer = buffer->getNativeBuffer();
EGLint attrs[] = {
LOCAL_EGL_NONE, LOCAL_EGL_NONE
};
EGLImage image = egl->fCreateImage(display,
EGL_NO_CONTEXT,
LOCAL_EGL_NATIVE_BUFFER_ANDROID,
clientBuffer, attrs);
if (!image) {
grallocTC->DropTextureData()->DeallocateSharedData(allocator);
return nullptr;
}
prodGL->MakeCurrent();
GLuint prodTex = 0;
prodGL->fGenTextures(1, &prodTex);
ScopedBindTexture autoTex(prodGL, prodTex);
prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_MIN_FILTER, LOCAL_GL_LINEAR);
prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_MAG_FILTER, LOCAL_GL_LINEAR);
prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_WRAP_S, LOCAL_GL_CLAMP_TO_EDGE);
prodGL->fTexParameteri(LOCAL_GL_TEXTURE_2D, LOCAL_GL_TEXTURE_WRAP_T, LOCAL_GL_CLAMP_TO_EDGE);
prodGL->fEGLImageTargetTexture2D(LOCAL_GL_TEXTURE_2D, image);
egl->fDestroyImage(display, image);
SharedSurface_Gralloc *surf = new SharedSurface_Gralloc(prodGL, size, hasAlpha, egl, allocator, grallocTC, prodTex);
DEBUG_PRINT("SharedSurface_Gralloc::Create: success -- surface %p, GraphicBuffer %p.\n", surf, buffer.get());
return surf;
}