本文整理汇总了C++中sp::getCameraId方法的典型用法代码示例。如果您正苦于以下问题:C++ sp::getCameraId方法的具体用法?C++ sp::getCameraId怎么用?C++ sp::getCameraId使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类sp
的用法示例。
在下文中一共展示了sp::getCameraId方法的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: mClient
StreamingProcessor::StreamingProcessor(sp<Camera2Client> client):
mClient(client),
mDevice(client->getCameraDevice()),
mId(client->getCameraId()),
mActiveRequest(NONE),
mPaused(false),
mPreviewRequestId(Camera2Client::kPreviewRequestIdStart),
mPreviewStreamId(NO_STREAM),
mRecordingRequestId(Camera2Client::kRecordingRequestIdStart),
mRecordingStreamId(NO_STREAM),
mRecordingFrameAvailable(false),
mRecordingHeapCount(kDefaultRecordingHeapCount),
mRecordingHeapFree(kDefaultRecordingHeapCount)
{
}
示例2: processFaceDetect
status_t FrameProcessor::processFaceDetect(const CameraMetadata &frame,
const sp<Camera2Client> &client) {
status_t res = BAD_VALUE;
ATRACE_CALL();
camera_metadata_ro_entry_t entry;
bool enableFaceDetect;
{
SharedParameters::Lock l(client->getParameters());
enableFaceDetect = l.mParameters.enableFaceDetect;
}
entry = frame.find(ANDROID_STATISTICS_FACE_DETECT_MODE);
// TODO: This should be an error once implementations are compliant
if (entry.count == 0) {
return OK;
}
uint8_t faceDetectMode = entry.data.u8[0];
camera_frame_metadata metadata;
Vector<camera_face_t> faces;
metadata.number_of_faces = 0;
if (enableFaceDetect &&
faceDetectMode != ANDROID_STATISTICS_FACE_DETECT_MODE_OFF) {
SharedParameters::Lock l(client->getParameters());
entry = frame.find(ANDROID_STATISTICS_FACE_RECTANGLES);
if (entry.count == 0) {
// No faces this frame
/* warning: locks SharedCameraCallbacks */
callbackFaceDetection(client, metadata);
return OK;
}
metadata.number_of_faces = entry.count / 4;
if (metadata.number_of_faces >
l.mParameters.fastInfo.maxFaces) {
ALOGE("%s: Camera %d: More faces than expected! (Got %d, max %d)",
__FUNCTION__, client->getCameraId(),
metadata.number_of_faces, l.mParameters.fastInfo.maxFaces);
return res;
}
const int32_t *faceRects = entry.data.i32;
entry = frame.find(ANDROID_STATISTICS_FACE_SCORES);
if (entry.count == 0) {
ALOGE("%s: Camera %d: Unable to read face scores",
__FUNCTION__, client->getCameraId());
return res;
}
const uint8_t *faceScores = entry.data.u8;
const int32_t *faceLandmarks = NULL;
const int32_t *faceIds = NULL;
if (faceDetectMode == ANDROID_STATISTICS_FACE_DETECT_MODE_FULL) {
entry = frame.find(ANDROID_STATISTICS_FACE_LANDMARKS);
if (entry.count == 0) {
ALOGE("%s: Camera %d: Unable to read face landmarks",
__FUNCTION__, client->getCameraId());
return res;
}
faceLandmarks = entry.data.i32;
entry = frame.find(ANDROID_STATISTICS_FACE_IDS);
if (entry.count == 0) {
ALOGE("%s: Camera %d: Unable to read face IDs",
__FUNCTION__, client->getCameraId());
return res;
}
faceIds = entry.data.i32;
}
entry = frame.find(ANDROID_SCALER_CROP_REGION);
if (entry.count < 4) {
ALOGE("%s: Camera %d: Unable to read crop region (count = %d)",
__FUNCTION__, client->getCameraId(), entry.count);
return res;
}
Parameters::CropRegion scalerCrop = {
static_cast<float>(entry.data.i32[0]),
static_cast<float>(entry.data.i32[1]),
static_cast<float>(entry.data.i32[2]),
static_cast<float>(entry.data.i32[3])};
faces.setCapacity(metadata.number_of_faces);
size_t maxFaces = metadata.number_of_faces;
for (size_t i = 0; i < maxFaces; i++) {
if (faceScores[i] == 0) {
metadata.number_of_faces--;
continue;
}
if (faceScores[i] > 100) {
ALOGW("%s: Face index %zu with out of range score %d",
__FUNCTION__, i, faceScores[i]);
}
//.........这里部分代码省略.........
示例3: ALOGV
status_t FrameProcessor::process3aState(const CaptureResult &frame,
const sp<Camera2Client> &client) {
ATRACE_CALL();
const CameraMetadata &metadata = frame.mMetadata;
camera_metadata_ro_entry_t entry;
int cameraId = client->getCameraId();
entry = metadata.find(ANDROID_REQUEST_FRAME_COUNT);
int32_t frameNumber = entry.data.i32[0];
// Don't send 3A notifications for the same frame number twice
if (frameNumber <= mLast3AFrameNumber) {
ALOGV("%s: Already sent 3A for frame number %d, skipping",
__FUNCTION__, frameNumber);
return OK;
}
mLast3AFrameNumber = frameNumber;
// Get 3A states from result metadata
bool gotAllStates = true;
AlgState new3aState;
// TODO: Also use AE mode, AE trigger ID
gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_MODE,
&new3aState.afMode, frameNumber, cameraId);
gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_MODE,
&new3aState.awbMode, frameNumber, cameraId);
gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AE_STATE,
&new3aState.aeState, frameNumber, cameraId);
gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AF_STATE,
&new3aState.afState, frameNumber, cameraId);
gotAllStates &= get3aResult<uint8_t>(metadata, ANDROID_CONTROL_AWB_STATE,
&new3aState.awbState, frameNumber, cameraId);
if (client->getCameraDeviceVersion() >= CAMERA_DEVICE_API_VERSION_3_2) {
new3aState.afTriggerId = frame.mResultExtras.afTriggerId;
new3aState.aeTriggerId = frame.mResultExtras.precaptureTriggerId;
} else {
gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AF_TRIGGER_ID,
&new3aState.afTriggerId, frameNumber, cameraId);
gotAllStates &= get3aResult<int32_t>(metadata, ANDROID_CONTROL_AE_PRECAPTURE_ID,
&new3aState.aeTriggerId, frameNumber, cameraId);
}
if (!gotAllStates) return BAD_VALUE;
if (new3aState.aeState != m3aState.aeState) {
ALOGV("%s: Camera %d: AE state %d->%d",
__FUNCTION__, cameraId,
m3aState.aeState, new3aState.aeState);
client->notifyAutoExposure(new3aState.aeState, new3aState.aeTriggerId);
}
if (new3aState.afState != m3aState.afState ||
new3aState.afMode != m3aState.afMode ||
new3aState.afTriggerId != m3aState.afTriggerId) {
ALOGV("%s: Camera %d: AF state %d->%d. AF mode %d->%d. Trigger %d->%d",
__FUNCTION__, cameraId,
m3aState.afState, new3aState.afState,
m3aState.afMode, new3aState.afMode,
m3aState.afTriggerId, new3aState.afTriggerId);
client->notifyAutoFocus(new3aState.afState, new3aState.afTriggerId);
}
if (new3aState.awbState != m3aState.awbState ||
new3aState.awbMode != m3aState.awbMode) {
ALOGV("%s: Camera %d: AWB state %d->%d. AWB mode %d->%d",
__FUNCTION__, cameraId,
m3aState.awbState, new3aState.awbState,
m3aState.awbMode, new3aState.awbMode);
client->notifyAutoWhitebalance(new3aState.awbState,
new3aState.aeTriggerId);
}
m3aState = new3aState;
return OK;
}