本文整理汇总了C++中KeyedVector::editValueFor方法的典型用法代码示例。如果您正苦于以下问题:C++ KeyedVector::editValueFor方法的具体用法?C++ KeyedVector::editValueFor怎么用?C++ KeyedVector::editValueFor使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类KeyedVector
的用法示例。
在下文中一共展示了KeyedVector::editValueFor方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: removeStream
int JTvInputHal::removeStream(int deviceId, int streamId) {
KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId);
if (connections.indexOfKey(streamId) < 0) {
return BAD_VALUE;
}
Connection& connection = connections.editValueFor(streamId);
if (connection.mSurface == NULL) {
// Nothing to do
return NO_ERROR;
}
if (connection.mThread != NULL) {
connection.mThread->shutdown();
connection.mThread.clear();
}
if (mDevice->close_stream(mDevice, deviceId, streamId) != 0) {
ALOGE("Couldn't remove stream");
return BAD_VALUE;
}
if (connection.mSourceHandle != NULL) {
connection.mSourceHandle.clear();
}
if (Surface::isValid(connection.mSurface)) {
connection.mSurface.clear();
}
if (connection.mSurface != NULL) {
connection.mSurface->setSidebandStream(NULL);
connection.mSurface.clear();
}
return NO_ERROR;
}
示例2: request
status_t GPUHardware::request(int pid, const sp<IGPUCallback>& callback,
ISurfaceComposer::gpu_info_t* gpu)
{
if (callback == 0)
return BAD_VALUE;
sp<IMemory> gpuHandle;
LOGD("pid %d requesting gpu core (owner = %d)", pid, mOwner);
Mutex::Autolock _l(mLock);
status_t err = requestLocked(pid);
if (err == NO_ERROR) {
// it's guaranteed to be there, be construction
Client& client = mClients.editValueFor(pid);
registerCallbackLocked(callback, client);
gpu->count = 2;
gpu->regions[0].region = client.smi.map();
gpu->regions[1].region = client.ebi.map();
gpu->regs = client.reg.map();
gpu->regions[0].reserved = 0;
gpu->regions[1].reserved = GPU_RESERVED_SIZE;
if (gpu->regs != 0) {
//LOGD("gpu core granted to pid %d, handle base=%p",
// mOwner, gpu->regs->pointer());
}
mCallback = callback;
} else {
LOGW("couldn't grant gpu core to pid %d", pid);
}
return err;
}
示例3: onStreamConfigurationsChanged
void JTvInputHal::onStreamConfigurationsChanged(int deviceId) {
{
Mutex::Autolock autoLock(&mLock);
KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId);
for (size_t i = 0; i < connections.size(); ++i) {
removeStream(deviceId, connections.keyAt(i));
}
connections.clear();
}
JNIEnv* env = AndroidRuntime::getJNIEnv();
env->CallVoidMethod(
mThiz,
gTvInputHalClassInfo.streamConfigsChanged,
deviceId);
}
示例4: onCaptured
void JTvInputHal::onCaptured(int deviceId, int streamId, uint32_t seq, bool succeeded) {
sp<BufferProducerThread> thread;
{
Mutex::Autolock autoLock(&mLock);
KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId);
Connection& connection = connections.editValueFor(streamId);
if (connection.mThread == NULL) {
ALOGE("capture thread not existing.");
return;
}
thread = connection.mThread;
}
thread->onCaptured(seq, succeeded);
if (seq == 0) {
JNIEnv* env = AndroidRuntime::getJNIEnv();
env->CallVoidMethod(
mThiz,
gTvInputHalClassInfo.firstFrameCaptured,
deviceId,
streamId);
}
}
示例5: addOrUpdateStream
int JTvInputHal::addOrUpdateStream(int deviceId, int streamId, const sp<Surface>& surface) {
KeyedVector<int, Connection>& connections = mConnections.editValueFor(deviceId);
if (connections.indexOfKey(streamId) < 0) {
connections.add(streamId, Connection());
}
Connection& connection = connections.editValueFor(streamId);
if (connection.mSurface == surface) {
// Nothing to do
return NO_ERROR;
}
// Clear the surface in the connection.
if (connection.mSurface != NULL) {
if (connection.mStreamType == TV_STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE) {
if (Surface::isValid(connection.mSurface)) {
connection.mSurface->setSidebandStream(NULL);
}
}
connection.mSurface.clear();
}
if (connection.mSourceHandle == NULL && connection.mThread == NULL) {
// Need to configure stream
int numConfigs = 0;
const tv_stream_config_t* configs = NULL;
if (mDevice->get_stream_configurations(
mDevice, deviceId, &numConfigs, &configs) != 0) {
ALOGE("Couldn't get stream configs");
return UNKNOWN_ERROR;
}
int configIndex = -1;
for (int i = 0; i < numConfigs; ++i) {
if (configs[i].stream_id == streamId) {
configIndex = i;
break;
}
}
if (configIndex == -1) {
ALOGE("Cannot find a config with given stream ID: %d", streamId);
return BAD_VALUE;
}
connection.mStreamType = configs[configIndex].type;
tv_stream_t stream;
stream.stream_id = configs[configIndex].stream_id;
if (connection.mStreamType == TV_STREAM_TYPE_BUFFER_PRODUCER) {
stream.buffer_producer.width = configs[configIndex].max_video_width;
stream.buffer_producer.height = configs[configIndex].max_video_height;
}
if (mDevice->open_stream(mDevice, deviceId, &stream) != 0) {
ALOGE("Couldn't add stream");
return UNKNOWN_ERROR;
}
if (connection.mStreamType == TV_STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE) {
connection.mSourceHandle = NativeHandle::create(
stream.sideband_stream_source_handle, false);
} else if (connection.mStreamType == TV_STREAM_TYPE_BUFFER_PRODUCER) {
if (connection.mThread != NULL) {
connection.mThread->shutdown();
}
connection.mThread = TvInputHalFactory::get()->createBufferProducerThread(mDevice, deviceId, &stream);
if (connection.mThread == NULL) {
ALOGE("No memory for BufferProducerThread");
// clean up
if (mDevice->close_stream(mDevice, deviceId, streamId) != 0) {
ALOGE("Couldn't remove stream");
}
return NO_MEMORY;
}
}
}
connection.mSurface = surface;
if (connection.mStreamType == TV_STREAM_TYPE_INDEPENDENT_VIDEO_SOURCE) {
connection.mSurface->setSidebandStream(connection.mSourceHandle);
} else if (connection.mStreamType == TV_STREAM_TYPE_BUFFER_PRODUCER) {
if (NO_ERROR != connection.mThread->setSurface(surface))
{
ALOGE("failed to setSurface");
// clean up
connection.mThread.clear();
if (mDevice->close_stream(mDevice, deviceId, streamId) != 0) {
ALOGE("Couldn't remove stream");
}
if (connection.mSurface != NULL) {
connection.mSurface.clear();
}
return UNKNOWN_ERROR;
}
connection.mThread->run();
}
return NO_ERROR;
}
示例6: decode
static int decode(
const android::sp<android::ALooper> &looper,
const char *path,
bool useAudio,
bool useVideo,
const android::sp<android::Surface> &surface) {
using namespace android;
static int64_t kTimeout = 500ll;
sp<NuMediaExtractor> extractor = new NuMediaExtractor;
if (extractor->setDataSource(path) != OK) {
fprintf(stderr, "unable to instantiate extractor.\n");
return 1;
}
KeyedVector<size_t, CodecState> stateByTrack;
bool haveAudio = false;
bool haveVideo = false;
for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<AMessage> format;
status_t err = extractor->getTrackFormat(i, &format);
CHECK_EQ(err, (status_t)OK);
AString mime;
CHECK(format->findString("mime", &mime));
bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
if (useAudio && !haveAudio && isAudio) {
haveAudio = true;
} else if (useVideo && !haveVideo && isVideo) {
haveVideo = true;
} else {
continue;
}
ALOGV("selecting track %d", i);
err = extractor->selectTrack(i);
CHECK_EQ(err, (status_t)OK);
CodecState *state =
&stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
state->mNumBytesDecoded = 0;
state->mNumBuffersDecoded = 0;
state->mIsAudio = isAudio;
state->mCodec = MediaCodec::CreateByType(
looper, mime.c_str(), false /* encoder */);
CHECK(state->mCodec != NULL);
err = state->mCodec->configure(
format, isVideo ? surface : NULL,
NULL /* crypto */,
0 /* flags */);
CHECK_EQ(err, (status_t)OK);
state->mSignalledInputEOS = false;
state->mSawOutputEOS = false;
}
CHECK(!stateByTrack.isEmpty());
int64_t startTimeUs = ALooper::GetNowUs();
for (size_t i = 0; i < stateByTrack.size(); ++i) {
CodecState *state = &stateByTrack.editValueAt(i);
sp<MediaCodec> codec = state->mCodec;
CHECK_EQ((status_t)OK, codec->start());
CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
ALOGV("got %d input and %d output buffers",
state->mInBuffers.size(), state->mOutBuffers.size());
}
bool sawInputEOS = false;
for (;;) {
if (!sawInputEOS) {
size_t trackIndex;
status_t err = extractor->getSampleTrackIndex(&trackIndex);
if (err != OK) {
ALOGV("saw input eos");
sawInputEOS = true;
} else {
CodecState *state = &stateByTrack.editValueFor(trackIndex);
size_t index;
err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
//.........这里部分代码省略.........