本文整理汇总了C++中sp::findInt32方法的典型用法代码示例。如果您正苦于以下问题:C++ sp::findInt32方法的具体用法?C++ sp::findInt32怎么用?C++ sp::findInt32使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sp
的用法示例。
在下文中一共展示了sp::findInt32方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: FindData
void
CryptoTrack::Update(sp<MetaData>& aMetaData)
{
valid = aMetaData->findInt32(kKeyCryptoMode, &mode) &&
aMetaData->findInt32(kKeyCryptoDefaultIVSize, &iv_size) &&
FindData(aMetaData, kKeyCryptoKey, &key);
}
示例2: overrideComponentName
void FFMPEGSoftCodec::overrideComponentName(
uint32_t /*quirks*/, const sp<AMessage> &msg, AString* componentName, AString* mime, int32_t isEncoder) {
int32_t wmvVersion = 0;
if (!strncasecmp(mime->c_str(), MEDIA_MIMETYPE_VIDEO_WMV, strlen(MEDIA_MIMETYPE_VIDEO_WMV)) &&
msg->findInt32(ExtendedCodec::getMsgKey(kKeyWMVVersion), &wmvVersion)) {
ALOGD("Found WMV version key %d", wmvVersion);
if (wmvVersion == 1) {
ALOGD("Use FFMPEG for unsupported WMV track");
componentName->setTo("OMX.ffmpeg.wmv.decoder");
}
}
int32_t encodeOptions = 0;
if (!isEncoder && !strncasecmp(mime->c_str(), MEDIA_MIMETYPE_AUDIO_WMA, strlen(MEDIA_MIMETYPE_AUDIO_WMA)) &&
!msg->findInt32(ExtendedCodec::getMsgKey(kKeyWMAEncodeOpt), &encodeOptions)) {
ALOGD("Use FFMPEG for unsupported WMA track");
componentName->setTo("OMX.ffmpeg.wma.decoder");
}
// Google's decoder doesn't support MAIN profile
int32_t aacProfile = 0;
if (!isEncoder && !strncasecmp(mime->c_str(), MEDIA_MIMETYPE_AUDIO_AAC, strlen(MEDIA_MIMETYPE_AUDIO_AAC)) &&
msg->findInt32(ExtendedCodec::getMsgKey(kKeyAACAOT), &aacProfile)) {
if (aacProfile == OMX_AUDIO_AACObjectMain) {
ALOGD("Use FFMPEG for AAC MAIN profile");
componentName->setTo("OMX.ffmpeg.aac.decoder");
}
}
}
示例3: setFFmpegVideoFormat
status_t FFMPEGSoftCodec::setFFmpegVideoFormat(
const sp<AMessage> &msg, sp<IOMX> OMXhandle, IOMX::node_id nodeID)
{
int32_t codec_id = 0;
int32_t width = 0;
int32_t height = 0;
OMX_VIDEO_PARAM_FFMPEGTYPE param;
ALOGD("setFFmpegVideoFormat");
CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyCodecId), &codec_id));
CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyWidth), &width));
CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyHeight), &height));
InitOMXParams(¶m);
param.nPortIndex = kPortIndexInput;
status_t err = OMXhandle->getParameter(
nodeID, OMX_IndexParamVideoFFmpeg, ¶m, sizeof(param));
if (err != OK)
return err;
param.eCodecId = codec_id;
param.nWidth = width;
param.nHeight = height;
err = OMXhandle->setParameter(
nodeID, OMX_IndexParamVideoFFmpeg, ¶m, sizeof(param));
return err;
}
示例4: SendMetaDataToHal
void AudioOffloadPlayer::SendMetaDataToHal(sp<AudioSink>& aSink,
const sp<MetaData>& aMeta)
{
int32_t sampleRate = 0;
int32_t bitRate = 0;
int32_t channelMask = 0;
int32_t delaySamples = 0;
int32_t paddingSamples = 0;
CHECK(aSink.get());
AudioParameter param = AudioParameter();
if (aMeta->findInt32(kKeySampleRate, &sampleRate)) {
param.addInt(String8(AUDIO_OFFLOAD_CODEC_SAMPLE_RATE), sampleRate);
}
if (aMeta->findInt32(kKeyChannelMask, &channelMask)) {
param.addInt(String8(AUDIO_OFFLOAD_CODEC_NUM_CHANNEL), channelMask);
}
if (aMeta->findInt32(kKeyBitRate, &bitRate)) {
param.addInt(String8(AUDIO_OFFLOAD_CODEC_AVG_BIT_RATE), bitRate);
}
if (aMeta->findInt32(kKeyEncoderDelay, &delaySamples)) {
param.addInt(String8(AUDIO_OFFLOAD_CODEC_DELAY_SAMPLES), delaySamples);
}
if (aMeta->findInt32(kKeyEncoderPadding, &paddingSamples)) {
param.addInt(String8(AUDIO_OFFLOAD_CODEC_PADDING_SAMPLES), paddingSamples);
}
AUDIO_OFFLOAD_LOG(PR_LOG_DEBUG, ("SendMetaDataToHal: bitRate %d,"
" sampleRate %d, chanMask %d, delaySample %d, paddingSample %d", bitRate,
sampleRate, channelMask, delaySamples, paddingSamples));
aSink->SetParameters(param.toString());
return;
}
示例5: setDTSFormat
status_t FFMPEGSoftCodec::setDTSFormat(
const sp<AMessage> &msg, sp<IOMX> OMXhandle, IOMX::node_id nodeID)
{
int32_t numChannels = 0;
int32_t sampleRate = 0;
int32_t bitsPerSample = 0;
OMX_AUDIO_PARAM_DTSTYPE param;
CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeyChannelCount), &numChannels));
CHECK(msg->findInt32(ExtendedCodec::getMsgKey(kKeySampleRate), &sampleRate));
ALOGV("Channels: %d, SampleRate: %d",
numChannels, sampleRate);
status_t err = setRawAudioFormat(msg, OMXhandle, nodeID);
if (err != OK)
return err;
InitOMXParams(¶m);
param.nPortIndex = kPortIndexInput;
err = OMXhandle->getParameter(
nodeID, OMX_IndexParamAudioDts, ¶m, sizeof(param));
if (err != OK)
return err;
param.nChannels = numChannels;
param.nSamplingRate = sampleRate;
return OMXhandle->setParameter(
nodeID, OMX_IndexParamAudioDts, ¶m, sizeof(param));
}
示例6: CHECK
void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
int32_t audio;
CHECK(msg->findInt32("audio", &audio));
if (dropBufferWhileFlushing(audio, msg)) {
return;
}
int32_t finalResult;
CHECK(msg->findInt32("finalResult", &finalResult));
QueueEntry entry;
entry.mOffset = 0;
entry.mFinalResult = finalResult;
if (audio) {
if (mAudioQueue.empty() && mSyncQueues) {
syncQueuesDone();
}
mAudioQueue.push_back(entry);
postDrainAudioQueue();
} else {
if (mVideoQueue.empty() && mSyncQueues) {
syncQueuesDone();
}
mVideoQueue.push_back(entry);
postDrainVideoQueue();
}
}
示例7: onReconnect
void ARTSPConnection::onReconnect(const sp<AMessage> &msg) {
ALOGV("onReconnect");
sp<AMessage> reply;
CHECK(msg->findMessage("reply", &reply));
int32_t connectionID;
CHECK(msg->findInt32("connection-id", &connectionID));
if ((connectionID != mConnectionID) || mState != CONNECTING) {
// While we were attempting to connect, the attempt was
// cancelled.
reply->setInt32("result", -ECONNABORTED);
reply->post();
if (mAddrHeader != NULL) {
freeaddrinfo((struct addrinfo *)mAddrHeader);
mAddrHeader = NULL;
}
return;
}
int32_t port;
CHECK(msg->findInt32("port", &port));
if (!createSocketAndConnect(mAddrHeader, port, reply)) {
ALOGV("Failed to reconnect");
reply->setInt32("result", -errno);
mState = DISCONNECTED;
mSocket = -1;
reply->post();
freeaddrinfo((struct addrinfo *)mAddrHeader);
mAddrHeader = NULL;
}
}
示例8: mYUVMode
CedarXSoftwareRenderer::CedarXSoftwareRenderer(
const sp<ANativeWindow> &nativeWindow, const sp<MetaData> &meta)
: mYUVMode(None),
mNativeWindow(nativeWindow) {
int32_t tmp;
CHECK(meta->findInt32(kKeyColorFormat, &tmp));
mColorFormat = (OMX_COLOR_FORMATTYPE)tmp;
//CHECK(meta->findInt32(kKeyScreenID, &screenID));
//CHECK(meta->findInt32(kKeyColorFormat, &halFormat));
CHECK(meta->findInt32(kKeyWidth, &mWidth));
CHECK(meta->findInt32(kKeyHeight, &mHeight));
int32_t rotationDegrees;
if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
rotationDegrees = 0;
}
int halFormat;
size_t bufWidth, bufHeight;
halFormat = HAL_PIXEL_FORMAT_YV12;
bufWidth = mWidth;
bufHeight = mHeight;
CHECK(mNativeWindow != NULL);
CHECK_EQ(0,
native_window_set_usage(
mNativeWindow.get(),
GRALLOC_USAGE_SW_READ_NEVER | GRALLOC_USAGE_SW_WRITE_OFTEN
| GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP));
CHECK_EQ(0,
native_window_set_scaling_mode(
mNativeWindow.get(),
NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW));
// Width must be multiple of 32???
CHECK_EQ(0, native_window_set_buffers_geometry(
mNativeWindow.get(),
bufWidth,
bufHeight,
halFormat));
uint32_t transform;
switch (rotationDegrees) {
case 0: transform = 0; break;
case 90: transform = HAL_TRANSFORM_ROT_90; break;
case 180: transform = HAL_TRANSFORM_ROT_180; break;
case 270: transform = HAL_TRANSFORM_ROT_270; break;
default: transform = 0; break;
}
if (transform) {
CHECK_EQ(0, native_window_set_buffers_transform(
mNativeWindow.get(), transform));
}
}
示例9: CHECK
sp<AMessage> DashPlayer::Decoder::makeFormat(const sp<MetaData> &meta) {
CHECK(mCSD.isEmpty());
sp<AMessage> msg;
uint32_t type;
const void *data;
size_t size;
CHECK_EQ(convertMetaDataToMessage(meta, &msg), (status_t)OK);
int32_t value;
if (meta->findInt32(kKeySmoothStreaming, &value)) {
msg->setInt32("smooth-streaming", value);
}
if (meta->findInt32(kKeyIsDRM, &value)) {
msg->setInt32("secure-op", 1);
}
if (meta->findInt32(kKeyRequiresSecureBuffers, &value)) {
msg->setInt32("requires-secure-buffers", 1);
}
if (meta->findInt32(kKeyEnableDecodeOrder, &value)) {
msg->setInt32("decodeOrderEnable", value);
}
if (meta->findData(kKeyAacCodecSpecificData, &type, &data, &size)) {
if (size > 0 && data != NULL) {
sp<ABuffer> buffer = new ABuffer(size);
if (buffer != NULL) {
memcpy(buffer->data(), data, size);
buffer->meta()->setInt32("csd", true);
buffer->meta()->setInt64("timeUs", 0);
msg->setBuffer("csd-0", buffer);
}
else {
ALOGE("kKeyAacCodecSpecificData ABuffer Allocation failed");
}
}
else {
ALOGE("Not a valid data pointer or size == 0");
}
}
mCSDIndex = 0;
for (size_t i = 0;; ++i) {
sp<ABuffer> csd;
if (!msg->findBuffer(StringPrintf("csd-%d", i).c_str(), &csd)) {
break;
}
mCSD.push(csd);
}
return msg;
}
示例10: onMessageReceived
void SimpleSoftOMXComponent::onMessageReceived(const sp<AMessage> &msg) {
Mutex::Autolock autoLock(mLock);
uint32_t msgType = msg->what();
ALOGV("msgType = %d", msgType);
switch (msgType) {
case kWhatSendCommand:
{
int32_t cmd, param;
CHECK(msg->findInt32("cmd", &cmd));
CHECK(msg->findInt32("param", ¶m));
onSendCommand((OMX_COMMANDTYPE)cmd, (OMX_U32)param);
break;
}
case kWhatEmptyThisBuffer:
case kWhatFillThisBuffer:
{
OMX_BUFFERHEADERTYPE *header;
CHECK(msg->findPointer("header", (void **)&header));
CHECK(mState == OMX_StateExecuting && mTargetState == mState);
bool found = false;
size_t portIndex = (kWhatEmptyThisBuffer == msgType)?
header->nInputPortIndex: header->nOutputPortIndex;
PortInfo *port = &mPorts.editItemAt(portIndex);
for (size_t j = 0; j < port->mBuffers.size(); ++j) {
BufferInfo *buffer = &port->mBuffers.editItemAt(j);
if (buffer->mHeader == header) {
CHECK(!buffer->mOwnedByUs);
buffer->mOwnedByUs = true;
CHECK((msgType == kWhatEmptyThisBuffer
&& port->mDef.eDir == OMX_DirInput)
|| (port->mDef.eDir == OMX_DirOutput));
port->mQueue.push_back(buffer);
onQueueFilled(portIndex);
found = true;
break;
}
}
CHECK(found);
break;
}
default:
TRESPASS();
break;
}
}
示例11: getColorConfigFromFormat
// static
void ColorUtils::getColorConfigFromFormat(
const sp<AMessage> &format, int32_t *range, int32_t *standard, int32_t *transfer) {
if (!format->findInt32("color-range", range)) {
*range = kColorRangeUnspecified;
}
if (!format->findInt32("color-standard", standard)) {
*standard = kColorStandardUnspecified;
}
if (!format->findInt32("color-transfer", transfer)) {
*transfer = kColorTransferUnspecified;
}
}
示例12: setupCamera
status_t CameraController::setupCamera(const sp<MetaData> &videoMetaData, const sp<Surface> &previewSurface ) {
F_LOG;
int width, height, fps ;
videoMetaData->findInt32(kKeyWidth, &width);
videoMetaData->findInt32(kKeyHeight, &height);
videoMetaData->findInt32(kKeySampleRate, &fps);
// videoMetaData->findInt32(kKeyBitRate, &mVideoBitRate);
//videoMetaData->findCString(kKeyMIMEType, &mVideoEncoder); //should be MEDIA_MIME_TYPE_VIDEO_AVC
mCamera = android::Camera::connect(0);
LOGD("After Camera::connect ");
if (mCamera != NULL) {
android::String8 s = mCamera->getParameters();
mCameraParams = new android::CameraParameters(s);
LOGV("Getting camera parameters");
char buf[50];
sprintf(buf, "%ux%u", width, height);
mCameraParams->set("video-size", buf);
mCameraParams->set("preview-format","yuv420sp");
mCameraParams->setPreviewSize(width, height);
mCameraParams->setPreviewFrameRate(fps);
LOGV("Setting camera params preview_size:%dx%d FPS:%d", width, height, fps);
mCamera->setParameters(mCameraParams->flatten());
mCameraSource = android::CameraSource::CreateFromCamera(mCamera);
LOGV("Setting preview");
mCamera->setPreviewDisplay(previewSurface);
// Get supported preview frame rates from camera driver
memset(mSupportedFps, 0, sizeof(mSupportedFps));
const char *fpsValues = mCameraParams->get(CameraParameters::KEY_SUPPORTED_PREVIEW_FRAME_RATES);
LOGV("Supported camera preview framerates: %s", fpsValues);
char *tokenString = new char[strlen(fpsValues)];
strcpy(tokenString, fpsValues);
char *fpsToken;
fpsToken = strtok(tokenString, ",");
while (fpsToken != NULL) {
if (atoi(fpsToken)< MAX_FRAME_RATE_VALUES) {
mSupportedFps[atoi(fpsToken)] = 1;
}
fpsToken = strtok(NULL, ",");
}
mInitialized = true;
setFramerate(fps);
return OK;
} else {
LOGE("************************* Failed to open camera ************************* ");
return UNKNOWN_ERROR;
}
}
示例13: copyColorConfig
// static
void ColorUtils::copyColorConfig(const sp<AMessage> &source, sp<AMessage> &target) {
// 0 values are unspecified
int32_t value;
if (source->findInt32("color-range", &value)) {
target->setInt32("color-range", value);
}
if (source->findInt32("color-standard", &value)) {
target->setInt32("color-standard", value);
}
if (source->findInt32("color-transfer", &value)) {
target->setInt32("color-transfer", value);
}
}
示例14: onDisconnected
void RTSPSource::onDisconnected(const sp<AMessage> &msg) {
status_t err;
CHECK(msg != NULL);
CHECK(msg->findInt32("result", &err));
CHECK_NE(err, (status_t)OK);
CHECK(mLooper != NULL);
CHECK(mHandler != NULL);
mLooper->unregisterHandler(mHandler->id());
mHandler.clear();
mState = DISCONNECTED;
mFinalResult = err;
if (mDisconnectReplyID != 0) {
finishDisconnectIfPossible();
}
if (mListener) {
// err is always set to UNKNOWN_ERROR from
// Android right now, rename err to NS_ERROR_NET_TIMEOUT.
mListener->OnDisconnected(0, NS_ERROR_NET_TIMEOUT);
}
mAudioTrack = NULL;
mVideoTrack = NULL;
mTracks.clear();
}
示例15: onDecoderNotify
void DirectRenderer::onDecoderNotify(const sp<AMessage> &msg) {
size_t trackIndex;
CHECK(msg->findSize("trackIndex", &trackIndex));
int32_t what;
CHECK(msg->findInt32("what", &what));
switch (what) {
case DecoderContext::kWhatOutputBufferReady:
{
size_t index;
CHECK(msg->findSize("index", &index));
int64_t timeUs;
CHECK(msg->findInt64("timeUs", &timeUs));
sp<ABuffer> buffer;
CHECK(msg->findBuffer("buffer", &buffer));
queueOutputBuffer(trackIndex, index, timeUs, buffer);
break;
}
default:
TRESPASS();
}
}