本文整理汇总了C++中sp::findCString方法的典型用法代码示例。如果您正苦于以下问题:C++ sp::findCString方法的具体用法?C++ sp::findCString怎么用?C++ sp::findCString使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sp
的用法示例。
在下文中一共展示了sp::findCString方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: AMessage
void DashPlayer::Decoder::configure(const sp<MetaData> &meta) {
CHECK(mCodec == NULL);
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
ALOGV("@@@@:: Decoder::configure :: mime is --- %s ---",mime);
sp<AMessage> notifyMsg =
new AMessage(kWhatCodecNotify, id());
sp<AMessage> format = makeFormat(meta);
if (mNativeWindow != NULL) {
format->setObject("native-window", mNativeWindow);
}
// Current video decoders do not return from OMX_FillThisBuffer
// quickly, violating the OpenMAX specs, until that is remedied
// we need to invest in an extra looper to free the main event
// queue.
bool isVideo = !strncasecmp(mime, "video/", 6);
if(!isVideo) {
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
}
ALOGV("@@@@:: DashCodec created ");
mCodec = new DashCodec;
bool needDedicatedLooper = false;
if (isVideo){
needDedicatedLooper = true;
if(mCodecLooper == NULL) {
ALOGV("@@@@:: Creating Looper for %s",(isVideo?"Video":"Audio"));
mCodecLooper = new ALooper;
mCodecLooper->setName("DashPlayerDecoder");
mCodecLooper->start(false, false, ANDROID_PRIORITY_AUDIO);
}
}
(needDedicatedLooper ? mCodecLooper : looper())->registerHandler(mCodec);
mCodec->setNotificationMessage(notifyMsg);
mCodec->initiateSetup(format);
}
示例2: if
AudioSource::AudioSource( audio_source_t inputSource, const sp<MetaData>& meta )
: mStarted(false),
mPrevSampleTimeUs(0),
mNumFramesReceived(0),
mNumClientOwnedBuffers(0),
mFormat(AUDIO_FORMAT_PCM_16_BIT),
mMime(MEDIA_MIMETYPE_AUDIO_RAW) {
const char * mime;
ALOGE("SK: in AudioSource : inputSource: %d", inputSource);
CHECK( meta->findCString( kKeyMIMEType, &mime ) );
mMime = mime;
int32_t sampleRate = 0; //these are the only supported values
int32_t channels = 0; //for the below tunnel formats
CHECK( meta->findInt32( kKeyChannelCount, &channels ) );
CHECK( meta->findInt32( kKeySampleRate, &sampleRate ) );
int32_t frameSize = -1;
mSampleRate = sampleRate;
if ( !strcasecmp( mime, MEDIA_MIMETYPE_AUDIO_AMR_NB ) ) {
mFormat = AUDIO_FORMAT_AMR_NB;
frameSize = AMR_FRAMESIZE;
mMaxBufferSize = AMR_FRAMESIZE*10;
}
else if ( !strcasecmp( mime, MEDIA_MIMETYPE_AUDIO_QCELP ) ) {
mFormat = AUDIO_FORMAT_QCELP;
frameSize = QCELP_FRAMESIZE;
mMaxBufferSize = QCELP_FRAMESIZE*10;
}
else if ( !strcasecmp( mime, MEDIA_MIMETYPE_AUDIO_EVRC ) ) {
mFormat = AUDIO_FORMAT_EVRC;
frameSize = EVRC_FRAMESIZE;
mMaxBufferSize = EVRC_FRAMESIZE*10;
}
else if ( !strcasecmp( mime, MEDIA_MIMETYPE_AUDIO_AMR_WB ) ) {
mFormat = AUDIO_FORMAT_AMR_WB;
frameSize = AMR_WB_FRAMESIZE;
mMaxBufferSize = AMR_WB_FRAMESIZE*10;
}
else {
CHECK(0);
}
mAutoRampStartUs = 0;
CHECK(channels == 1 || channels == 2);
mRecord = new AudioRecord(
inputSource, sampleRate, mFormat,
channels > 1? AUDIO_CHANNEL_IN_STEREO:
AUDIO_CHANNEL_IN_MONO,
4*mMaxBufferSize/channels/frameSize,
AudioRecordCallbackFunction,
this);
mInitCheck = mRecord->initCheck();
}
示例3: mIsAudio
AnotherPacketSource::AnotherPacketSource(const sp<MetaData> &meta)
: mIsAudio(false),
mFormat(meta),
mEOSResult(OK) {
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strncasecmp("audio/", mime, 6)) {
mIsAudio = true;
} else {
CHECK(!strncasecmp("video/", mime, 6));
}
}
示例4: mIsAudio
DashPacketSource::DashPacketSource(const sp<MetaData> &meta)
: mIsAudio(false),
mFormat(meta),
mEOSResult(OK),
mStreamPID(0),
mProgramPID(0),
mFirstPTS(0) {
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strncasecmp("audio/", mime, 6)) {
mIsAudio = true;
}
}
示例5: setFormat
void AnotherPacketSource::setFormat(const sp<MetaData> &meta) {
CHECK(mFormat == NULL);
mIsAudio = false;
if (meta == NULL) {
return;
}
mFormat = meta;
const char *mime;
CHECK(meta->findCString(kKeyMIMEType, &mime));
if (!strncasecmp("audio/", mime, 6)) {
mIsAudio = true;
} else {
CHECK(!strncasecmp("video/", mime, 6));
}
}
示例6: OMXCodecProxy
// static
sp<OMXCodecProxy> OMXCodecProxy::Create(
const sp<IOMX> &omx,
const sp<MetaData> &meta, bool createEncoder,
const sp<MediaSource> &source,
const char *matchComponentName,
uint32_t flags,
const sp<ANativeWindow> &nativeWindow)
{
sp<OMXCodecProxy> proxy;
const char *mime;
if (!meta->findCString(kKeyMIMEType, &mime)) {
return nullptr;
}
if (!strncasecmp(mime, "video/", 6)) {
proxy = new OMXCodecProxy(omx, meta, createEncoder, source, matchComponentName, flags, nativeWindow);
}
return proxy;
}
示例7: prepare
status_t VideoRecorderCtrl::prepare(const sp<MetaData> &videoMetaData, void *cbData, int remote_port,
int vRTPSocket, int vRTCPSocket, psdpcb sdpcb){
F_LOG;
const char *remoteIP ;
Surface *previewSurface;
status_t retval ;
int64_t tmp ;
videoMetaData->findCString(kKeyP2PRemoteIP, (const char**)&remoteIP);
videoMetaData->findPointer(kKeyP2PPreviewSurface, (void **) &previewSurface);
CHECK_EQ(m_omxClient.connect(), OK);
m_IOMX = m_omxClient.interface();
retval = m_cameraCtrl.setupCamera(videoMetaData, previewSurface);
if (retval == OK) {
retval = m_encoderCtrl.setupEncoder(m_IOMX, m_cameraCtrl.GetCameraSource(), videoMetaData);
if (retval == OK) {
m_pRTPWriter = new ARTPStreamer(remote_port, vRTPSocket, vRTCPSocket, remoteIP, cbData, sdpcb);
if (m_pRTPWriter != NULL) {
retval = m_pRTPWriter->addSource(m_encoderCtrl.GetEncoderMediaSource().get());
if ( retval != OK) {
LOGE("RTPWriter->addSource failed !!!");
}
}else {
LOGE("Unable to allocate ARTPWriter !!!");
retval = NO_MEMORY;
}
}
else {
LOGE("m_encoderCtrl.setupEncoder failed !!!");
}
} else {
LOGE("cameraCtrl.setupCamera failed !!!");
}
if (retval != OK)
stop();
return retval;
}
示例8: CHECK
static bool isYUV420PlanarSupported(
OMXClient *client,
const sp<MetaData> &trackMeta) {
const char *mime;
CHECK(trackMeta->findCString(kKeyMIMEType, &mime));
Vector<CodecCapabilities> caps;
if (QueryCodecs(client->interface(), mime,
true, /* queryDecoders */
true, /* hwCodecOnly */
&caps) == OK) {
for (size_t j = 0; j < caps.size(); ++j) {
CodecCapabilities cap = caps[j];
for (size_t i = 0; i < cap.mColorFormats.size(); ++i) {
if (cap.mColorFormats[i] == OMX_COLOR_FormatYUV420Planar) {
return true;
}
}
}
}
return false;
}
示例9: configureVideoCodec
void ExtendedCodec::configureVideoCodec(
const sp<MetaData> &meta, sp<IOMX> OMXhandle,
const uint32_t flags, IOMX::node_id nodeID, char* componentName ) {
if (strncmp(componentName, "OMX.qcom.", 9)) {
//do nothing for non QC component
return;
}
int32_t arbitraryMode = 0;
bool success = meta->findInt32(kKeyUseArbitraryMode, &arbitraryMode);
bool useFrameByFrameMode = true; //default option
if (success && arbitraryMode) {
useFrameByFrameMode = false;
}
if (useFrameByFrameMode) {
ALOGI("Enable frame by frame mode");
OMX_QCOM_PARAM_PORTDEFINITIONTYPE portFmt;
portFmt.nPortIndex = kPortIndexInput;
portFmt.nFramePackingFormat = OMX_QCOM_FramePacking_OnlyOneCompleteFrame;
status_t err = OMXhandle->setParameter(
nodeID, (OMX_INDEXTYPE)OMX_QcomIndexPortDefn, (void *)&portFmt, sizeof(portFmt));
if(err != OK) {
ALOGW("Failed to set frame packing format on component");
}
} else {
ALOGI("Decoder should be in arbitrary mode");
}
// Enable timestamp reordering for AVI file type, mpeg4 and vc1 codec types
const char *fileFormat;
success = meta->findCString(kKeyFileFormat, &fileFormat);
if (!strcmp(componentName, "OMX.qcom.video.decoder.vc1") ||
!strcmp(componentName, "OMX.qcom.video.decoder.mpeg4") ||
(success && !strncmp(fileFormat, "video/avi", 9))) {
ALOGI("Enabling timestamp reordering");
QOMX_INDEXTIMESTAMPREORDER reorder;
InitOMXParams(&reorder);
reorder.nPortIndex = kPortIndexOutput;
reorder.bEnable = OMX_TRUE;
status_t err = OMXhandle->setParameter(nodeID,
(OMX_INDEXTYPE)OMX_QcomIndexParamEnableTimeStampReorder,
(void *)&reorder, sizeof(reorder));
if(err != OK) {
ALOGW("Failed to enable timestamp reordering");
}
}
// Enable Sync-frame decode mode for thumbnails
if (flags & OMXCodec::kClientNeedsFramebuffer) {
ALOGV("Enabling thumbnail mode.");
QOMX_ENABLETYPE enableType;
OMX_INDEXTYPE indexType;
status_t err = OMXhandle->getExtensionIndex(
nodeID, OMX_QCOM_INDEX_PARAM_VIDEO_SYNCFRAMEDECODINGMODE,
&indexType);
if(err != OK) {
ALOGW("Failed to get extension for SYNCFRAMEDECODINGMODE");
return;
}
enableType.bEnable = OMX_TRUE;
err = OMXhandle->setParameter(nodeID,indexType,
(void *)&enableType, sizeof(enableType));
if(err != OK) {
ALOGW("Failed to get extension for SYNCFRAMEDECODINGMODE");
return;
}
ALOGI("Thumbnail mode enabled.");
}
}