当前位置: 首页>>代码示例>>C++>>正文


C++ android::sp类代码示例

本文整理汇总了C++中android::sp的典型用法代码示例。如果您正苦于以下问题:C++ sp类的具体用法?C++ sp怎么用?C++ sp使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了sp类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。

示例1: postData

void CameraControl::postData(
		int32_t msg_type,
		const android::sp<android::IMemory>& data,
		camera_frame_metadata_t* metadata)
{
	REPORT_FUNCTION();

	if (!listener)
		return;

	switch (msg_type) {
	case CAMERA_MSG_RAW_IMAGE:
		if (listener->on_data_raw_image_cb)
			listener->on_data_raw_image_cb(data->pointer(), data->size(), listener->context);
		break;
	case CAMERA_MSG_COMPRESSED_IMAGE:
		if (listener->on_data_compressed_image_cb)
			listener->on_data_compressed_image_cb(data->pointer(), data->size(), listener->context);
		break;
	default:
		break;
	}

	camera->releaseRecordingFrame(data);
}
开发者ID:F35X70,项目名称:libhybris,代码行数:25,代码来源:camera_compatibility_layer.cpp

示例2:

bool
TextureHostOGL::SetReleaseFence(const android::sp<android::Fence>& aReleaseFence)
{
  if (!aReleaseFence.get() || !aReleaseFence->isValid()) {
    // HWC might not provide Fence.
    // In this case, HWC implicitly handles buffer's fence.
    return false;
  }

  if (!mReleaseFence.get()) {
    mReleaseFence = aReleaseFence;
  } else {
    android::sp<android::Fence> mergedFence = android::Fence::merge(
                  android::String8::format("TextureHostOGL"),
                  mReleaseFence, aReleaseFence);
    if (!mergedFence.get()) {
      // synchronization is broken, the best we can do is hope fences
      // signal in order so the new fence will act like a union.
      // This error handling is same as android::ConsumerBase does.
      mReleaseFence = aReleaseFence;
      return false;
    }
    mReleaseFence = mergedFence;
  }
  return true;
}
开发者ID:Acidburn0zzz,项目名称:tor-browser,代码行数:26,代码来源:TextureHostOGL.cpp

示例3: submitRequestFrame

MERROR
PipelineModelBase::
submitRequestFrame(
    android::sp<IPipelineFrame> pFrame
)
{
    sp<IPipelineNodeMap const> pPipelineNodeMap = pFrame->getPipelineNodeMap();
    if  ( pPipelineNodeMap == 0 || pPipelineNodeMap->isEmpty() ) {
        MY_LOGE("[frameNo:%d] Bad PipelineNodeMap:%p", pFrame->getFrameNo(), pPipelineNodeMap.get());
        return DEAD_OBJECT;
    }
    //
    IPipelineDAG::NodeObj_T const nodeObj = pFrame->getPipelineDAG().getRootNode();
    sp<IPipelineNode> pNode = pPipelineNodeMap->nodeAt(nodeObj.val);
    if  ( pNode == 0 ) {
        MY_LOGE("[frameNo:%d] Bad root node", pFrame->getFrameNo());
        return DEAD_OBJECT;
    }

    MERROR err = OK;
    RWLock::AutoRLock _l(mflushLock);
    if(mInFlush == MTRUE) {
        err = pNode->flush(pFrame);
    } else {
        err = pNode->queue(pFrame);
    }

    return err;
}
开发者ID:,项目名称:,代码行数:29,代码来源:

示例4: unbind

int OpenSLMediaPlayerVisualizerJNIBinder::unbind(const android::sp<OpenSLMediaPlayerVisualizer> &visualizer) noexcept
{
    if (!visualizer.get())
        return OSLMP_RESULT_ILLEGAL_ARGUMENT;

    return visualizer->setInternalPeriodicCaptureThreadEventListener(nullptr, 0, false, false);
}
开发者ID:HKingz,项目名称:android-openslmediaplayer,代码行数:7,代码来源:OpenSLMediaPlayerVisualizerJNIBinder.cpp

示例5: bind

int OpenSLMediaPlayerHQVisualizerJNIBinder::bind(const android::sp<OpenSLMediaPlayerHQVisualizer> &visualizer,
                                                 uint32_t rate, bool waveform, bool fft) noexcept
{

    if (!visualizer.get())
        return OSLMP_RESULT_ILLEGAL_ARGUMENT;

    return visualizer->setInternalPeriodicCaptureThreadEventListener(this, rate, waveform, fft);
}
开发者ID:HKingz,项目名称:android-openslmediaplayer,代码行数:9,代码来源:OpenSLMediaPlayerHQVisualizerJNIBinder.cpp

示例6: android_Player_setPlayState

/**
 * pre-condition: gp != 0
 */
XAresult android_Player_setPlayState(const android::sp<android::GenericPlayer> &gp,
        SLuint32 playState,
        AndroidObjectState* pObjState)
{
    XAresult result = XA_RESULT_SUCCESS;
    AndroidObjectState objState = *pObjState;

    switch (playState) {
     case SL_PLAYSTATE_STOPPED: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_STOPPED");
         gp->stop();
         }
         break;
     case SL_PLAYSTATE_PAUSED: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PAUSED");
         switch(objState) {
         case ANDROID_UNINITIALIZED:
             *pObjState = ANDROID_PREPARING;
             gp->prepare();
             break;
         case ANDROID_PREPARING:
             break;
         case ANDROID_READY:
             gp->pause();
             break;
         default:
             SL_LOGE("Android object in invalid state");
             break;
         }
         }
         break;
     case SL_PLAYSTATE_PLAYING: {
         SL_LOGV("setting AVPlayer to SL_PLAYSTATE_PLAYING");
         switch(objState) {
         case ANDROID_UNINITIALIZED:
             *pObjState = ANDROID_PREPARING;
             gp->prepare();
             // intended fall through
         case ANDROID_PREPARING:
             // intended fall through
         case ANDROID_READY:
             gp->play();
             break;
         default:
             SL_LOGE("Android object in invalid state");
             break;
         }
         }
         break;
     default:
         // checked by caller, should not happen
         break;
     }

    return result;
}
开发者ID:DARKPOP,项目名称:frameworks_wilhelm,代码行数:59,代码来源:MediaPlayer_to_android.cpp

示例7: setVideoSurfaceTexture

    android::status_t setVideoSurfaceTexture(const android::sp<android::SurfaceTexture> &surfaceTexture)
    {
        REPORT_FUNCTION();

        surfaceTexture->getBufferQueue()->setBufferCount(5);
        texture = surfaceTexture;
        texture->setFrameAvailableListener(frame_listener);

        return MediaPlayer::setVideoSurfaceTexture(surfaceTexture->getBufferQueue());
    }
开发者ID:CoryXie,项目名称:ubuntu-phablet_libhybris,代码行数:10,代码来源:media_compatibility_layer.cpp

示例8: SurfaceTexture_setSurfaceTexture

static void SurfaceTexture_setSurfaceTexture(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<android::SurfaceTexture>& surfaceTexture)
{
    android::SurfaceTexture* const p = (android::SurfaceTexture*)thiz->mSurfaceTexture;
    if (surfaceTexture.get()) {
        surfaceTexture->incStrong(thiz);
    }
    if (p) {
        p->decStrong(thiz);
    }
    thiz->mSurfaceTexture = (Int32)surfaceTexture.get();
}
开发者ID:TheTypoMaster,项目名称:ElastosRDK5_0,代码行数:13,代码来源:CSurfaceTexture.cpp

示例9: SurfaceTexture_setSurfaceTexture

static void SurfaceTexture_setSurfaceTexture(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<GLConsumer>& surfaceTexture)
{
    GLConsumer* const p = (GLConsumer*)thiz->mSurfaceTexture;
    if (surfaceTexture.get()) {
        surfaceTexture->incStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    thiz->mSurfaceTexture = (Int64)surfaceTexture.get();
}
开发者ID:,项目名称:,代码行数:13,代码来源:

示例10: doPlaybackOrRecord

bool AudioPlaybackLocal::doPlaybackOrRecord(android::sp<Buffer>& buffer)
{
    if (buffer->amountToHandle() < (size_t)mSizes) {
        mSizes = buffer->amountToHandle();
    }
    if (pcm_write(mPcmHandle, buffer->getUnhanledData(), mSizes)) {
        LOGE("AudioPlaybackLocal error %s", pcm_get_error(mPcmHandle));
        return false;
    }
    buffer->increaseHandled(mSizes);
    LOGV("AudioPlaybackLocal::doPlaybackOrRecord %d", buffer->amountHandled());
    return true;
}
开发者ID:Abocer,项目名称:android-4.2_r1,代码行数:13,代码来源:AudioPlaybackLocal.cpp

示例11: SurfaceTexture_setFrameAvailableListener

static void SurfaceTexture_setFrameAvailableListener(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ android::sp<GLConsumer::FrameAvailableListener> listener)
{
    GLConsumer::FrameAvailableListener* const p = (GLConsumer::FrameAvailableListener*)thiz->mFrameAvailableListener;
    if (listener.get()) {
        listener->incStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setSurfaceTexture);
    }
    thiz->mFrameAvailableListener = (Int64)listener.get();
}
开发者ID:,项目名称:,代码行数:13,代码来源:

示例12: SurfaceTexture_setProducer

static void SurfaceTexture_setProducer(
    /* [in] */ CSurfaceTexture* thiz,
    /* [in] */ const android::sp<IGraphicBufferProducer>& producer)
{
    IGraphicBufferProducer* const p =
        (IGraphicBufferProducer*)thiz->mProducer;
    if (producer.get()) {
        producer->incStrong((void*)SurfaceTexture_setProducer);
    }
    if (p) {
        p->decStrong((void*)SurfaceTexture_setProducer);
    }
    thiz->mProducer = (Int64)producer.get();
}
开发者ID:,项目名称:,代码行数:14,代码来源:

示例13: android_fxSend_attach

/**
 * pre-condition:
 *    ap != NULL
 *    for media players:
 *      ap->mAPlayer != 0
 *      ap->mTrackPlayer->mAudioTrack == 0
 *    for buffer queue players:
 *      ap->mAPlayer == 0
 *      ap->mTrackPlayer->mAudioTrack != 0 is optional; if no track yet then the setting is deferred
 */
android::status_t android_fxSend_attach(CAudioPlayer* ap, bool attach,
        const android::sp<android::AudioEffect>& pFx, SLmillibel sendLevel) {

    if (pFx == 0) {
        return android::INVALID_OPERATION;
    }

    // There are 3 cases:
    //  mAPlayer != 0 && mAudioTrack == 0 means playing decoded audio
    //  mAPlayer == 0 && mAudioTrack != 0 means playing PCM audio
    //  mAPlayer == 0 && mAudioTrack == 0 means player not fully configured yet
    // The asserts document and verify this.
    if (ap->mAPlayer != 0) {
        assert(ap->mTrackPlayer->mAudioTrack == 0);
        if (attach) {
            ap->mAPlayer->attachAuxEffect(pFx->id());
            ap->mAPlayer->setAuxEffectSendLevel( sles_to_android_amplification(sendLevel) );
        } else {
            ap->mAPlayer->attachAuxEffect(0);
        }
        return android::NO_ERROR;
    }

    if (ap->mTrackPlayer->mAudioTrack == 0) {
        // the player doesn't have an AudioTrack at the moment, so store this info to use it
        // when the AudioTrack becomes available
        if (attach) {
            ap->mAuxEffect = pFx;
        } else {
            ap->mAuxEffect.clear();
        }
        // we keep track of the send level, independently of the current audio player level
        ap->mAuxSendLevel = sendLevel - ap->mVolume.mLevel;
        return android::NO_ERROR;
    }

    if (attach) {
        android::status_t status = ap->mTrackPlayer->mAudioTrack->attachAuxEffect(pFx->id());
        //SL_LOGV("attachAuxEffect(%d) returned %d", pFx->id(), status);
        if (android::NO_ERROR == status) {
            status =
                ap->mTrackPlayer->mAudioTrack->setAuxEffectSendLevel(
                        sles_to_android_amplification(sendLevel) );
        }
        return status;
    } else {
        return ap->mTrackPlayer->mAudioTrack->attachAuxEffect(0);
    }
}
开发者ID:MIPS,项目名称:frameworks-wilhelm,代码行数:59,代码来源:android_Effect.cpp

示例14: onDisplayConnected

    virtual void onDisplayConnected(
        /* [in] */ const android::sp<android::ISurfaceTexture>& surfaceTexture,
        /* [in] */ uint32_t width,
        /* [in] */ uint32_t height,
        /* [in] */ uint32_t flags)
    {
        if (surfaceTexture == NULL) {
            return;
        }

        android::sp<android::Surface> surface(new android::Surface(surfaceTexture));
        if (surface == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create Surface from surface texture %p provided by media server.",
                surfaceTexture.get());
            return;
        }

        AutoPtr<ISurface> surfaceObj;
        CSurface::New((ISurface**)&surfaceObj);
        if (surfaceObj == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create instance of Surface from ISurfaceTexture.");
            return;
        }

        surfaceObj->SetSurface((Handle32)surface.get());

        mRemoteDisplayObjGlobal->NotifyDisplayConnected(surfaceObj, width, height, flags);

        CheckAndClearExceptionFromCallback("notifyDisplayConnected");
    }
开发者ID:TheTypoMaster,项目名称:ElastosRDK5_0,代码行数:30,代码来源:CRemoteDisplay.cpp

示例15: onDisplayConnected

    virtual void onDisplayConnected(
        /* [in] */ const android::sp<android::IGraphicBufferProducer>& bufferProducer,
        /* [in] */ uint32_t width,
        /* [in] */ uint32_t height,
        /* [in] */ uint32_t flags,
        /* [in] */ uint32_t session)
    {
        if (bufferProducer == NULL) {
            return;
        }

        android::sp<android::Surface> surface(new android::Surface(bufferProducer));
        if (surface == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create Surface from surface texture %p provided by media server.",
                bufferProducer.get());
            return;
        }

        AutoPtr<ISurface> surfaceObj;
        CSurface::New((ISurface**)&surfaceObj);
        if (surfaceObj == NULL) {
            Logger::E("NativeRemoteDisplayClient", "Could not create instance of Surface from ISurfaceTexture.");
            return;
        }

        mRemoteDisplayObjGlobal->NotifyDisplayConnected(surfaceObj, width, height, flags, session);

        CheckAndClearExceptionFromCallback("notifyDisplayConnected");
    }
开发者ID:elastos,项目名称:Elastos5,代码行数:29,代码来源:CRemoteDisplay.cpp


注:本文中的android::sp类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。