本文整理汇总了C++中KeyedVector::size方法的典型用法代码示例。如果您正苦于以下问题:C++ KeyedVector::size方法的具体用法?C++ KeyedVector::size怎么用?C++ KeyedVector::size使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类KeyedVector
的用法示例。
在下文中一共展示了KeyedVector::size方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: binderDied
void GPUHardware::binderDied(const wp<IBinder>& who)
{
Mutex::Autolock _l(mLock);
pid_t pid = mRegisteredClients.valueFor(who);
if (pid != 0) {
ssize_t index = mClients.indexOfKey(pid);
if (index >= 0) {
//LOGD("*** removing client at %d", index);
Client& client(mClients.editValueAt(index));
client.revokeAllHeaps(); // not really needed in theory
mClients.removeItemsAt(index);
if (mClients.size() == 0) {
//LOGD("*** was last client closing everything");
mCallback.clear();
mAllocator.clear();
mCurrentAllocator.clear();
mSMIHeap.clear();
mREGHeap.clear();
// NOTE: we cannot clear the EBI heap because surfaceflinger
// itself may be using it, since this is where surfaces
// are allocated. if we're in the middle of compositing
// a surface (even if its process just died), we cannot
// rip the heap under our feet.
mOwner = NO_OWNER;
}
}
}
}
示例2: codecResultsToXml
static AString codecResultsToXml(const KeyedVector<AString, CodecSettings> &results) {
AString ret;
for (size_t i = 0; i < results.size(); ++i) {
AString name;
AString mime;
if (!splitString(results.keyAt(i), " ", &name, &mime)) {
continue;
}
AString codec =
AStringPrintf(" <MediaCodec name=\"%s\" type=\"%s\" update=\"true\" >\n",
name.c_str(),
mime.c_str());
ret.append(codec);
const CodecSettings &settings = results.valueAt(i);
for (size_t i = 0; i < settings.size(); ++i) {
// WARNING: we assume all the settings are "Limit". Currently we have only one type
// of setting in this case, which is "max-supported-instances".
AString setting = AStringPrintf(
" <Limit name=\"%s\" value=\"%s\" />\n",
settings.keyAt(i).c_str(),
settings.valueAt(i).c_str());
ret.append(setting);
}
ret.append(" </MediaCodec>\n");
}
return ret;
}
示例3: receiveFinishedSignals
status_t NativeInputEventSender::receiveFinishedSignals(JNIEnv* env) {
if (kDebugDispatchCycle) {
ALOGD("channel '%s' ~ Receiving finished signals.", getInputChannelName());
}
ScopedLocalRef<jobject> senderObj(env, NULL);
bool skipCallbacks = false;
for (;;) {
uint32_t publishedSeq;
bool handled;
status_t status = mInputPublisher.receiveFinishedSignal(&publishedSeq, &handled);
if (status) {
if (status == WOULD_BLOCK) {
return OK;
}
ALOGE("channel '%s' ~ Failed to consume finished signals. status=%d",
getInputChannelName(), status);
return status;
}
ssize_t index = mPublishedSeqMap.indexOfKey(publishedSeq);
if (index >= 0) {
uint32_t seq = mPublishedSeqMap.valueAt(index);
mPublishedSeqMap.removeItemsAt(index);
if (kDebugDispatchCycle) {
ALOGD("channel '%s' ~ Received finished signal, seq=%u, handled=%s, "
"pendingEvents=%zu.",
getInputChannelName(), seq, handled ? "true" : "false",
mPublishedSeqMap.size());
}
if (!skipCallbacks) {
if (!senderObj.get()) {
senderObj.reset(jniGetReferent(env, mSenderWeakGlobal));
if (!senderObj.get()) {
ALOGW("channel '%s' ~ Sender object was finalized "
"without being disposed.", getInputChannelName());
return DEAD_OBJECT;
}
}
env->CallVoidMethod(senderObj.get(),
gInputEventSenderClassInfo.dispatchInputEventFinished,
jint(seq), jboolean(handled));
if (env->ExceptionCheck()) {
ALOGE("Exception dispatching finished signal.");
skipCallbacks = true;
}
}
}
}
}
示例4: KeyedVectorToHashMap
static jobject KeyedVectorToHashMap (JNIEnv *env, KeyedVector<String8, String8> const &map) {
jclass clazz = gFields.hashmapClassId;
jobject hashMap = env->NewObject(clazz, gFields.hashmap.init);
for (size_t i = 0; i < map.size(); ++i) {
jstring jkey = env->NewStringUTF(map.keyAt(i).string());
jstring jvalue = env->NewStringUTF(map.valueAt(i).string());
env->CallObjectMethod(hashMap, gFields.hashmap.put, jkey, jvalue);
env->DeleteLocalRef(jkey);
env->DeleteLocalRef(jvalue);
}
return hashMap;
}
示例5: exportResultsToXML
void exportResultsToXML(
const char *fileName,
const CodecSettings &global_results,
const KeyedVector<AString, CodecSettings> &encoder_results,
const KeyedVector<AString, CodecSettings> &decoder_results) {
if (global_results.size() == 0 && encoder_results.size() == 0 && decoder_results.size() == 0) {
return;
}
AString overrides;
overrides.append(getProfilingVersionString());
overrides.append("\n");
overrides.append("<MediaCodecs>\n");
if (global_results.size() > 0) {
overrides.append(" <Settings>\n");
overrides.append(globalResultsToXml(global_results));
overrides.append(" </Settings>\n");
}
if (encoder_results.size() > 0) {
overrides.append(" <Encoders>\n");
overrides.append(codecResultsToXml(encoder_results));
overrides.append(" </Encoders>\n");
}
if (decoder_results.size() > 0) {
overrides.append(" <Decoders>\n");
overrides.append(codecResultsToXml(decoder_results));
overrides.append(" </Decoders>\n");
}
overrides.append("</MediaCodecs>\n");
FILE *f = fopen(fileName, "wb");
if (f == NULL) {
ALOGE("Failed to open %s for writing.", fileName);
return;
}
if (fwrite(overrides.c_str(), 1, overrides.size(), f) != overrides.size()) {
ALOGE("Failed to write to %s.", fileName);
}
fclose(f);
}
示例6: dump_heaps
void HeapCache::dump_heaps()
{
Mutex::Autolock _l(mHeapCacheLock);
int c = mHeapCache.size();
for (int i=0 ; i<c ; i++) {
const heap_info_t& info = mHeapCache.valueAt(i);
BpMemoryHeap const* h(static_cast<BpMemoryHeap const *>(info.heap.get()));
ALOGD("hey=%p, heap=%p, count=%d, (fd=%d, base=%p, size=%d)",
mHeapCache.keyAt(i).unsafe_get(),
info.heap.get(), info.count,
h->mHeapId, h->mBase, h->mSize);
}
}
示例7: pathStr
static void
android_media_MediaPlayer_setDataSourceAndHeaders(
JNIEnv *env, jobject thiz, jobject httpServiceBinderObj, jstring path,
jobjectArray keys, jobjectArray values) {
sp<MediaPlayer> mp = getMediaPlayer(env, thiz);
if (mp == NULL ) {
jniThrowException(env, "java/lang/IllegalStateException", NULL);
return;
}
if (path == NULL) {
jniThrowException(env, "java/lang/IllegalArgumentException", NULL);
return;
}
const char *tmp = env->GetStringUTFChars(path, NULL);
if (tmp == NULL) { // Out of memory
return;
}
ALOGV("setDataSource: path %s", tmp);
String8 pathStr(tmp);
env->ReleaseStringUTFChars(path, tmp);
tmp = NULL;
// We build a KeyedVector out of the key and val arrays
KeyedVector<String8, String8> headersVector;
if (!ConvertKeyValueArraysToKeyedVector(
env, keys, values, &headersVector)) {
return;
}
sp<IMediaHTTPService> httpService;
if (httpServiceBinderObj != NULL) {
sp<IBinder> binder = ibinderForJavaObject(env, httpServiceBinderObj);
httpService = interface_cast<IMediaHTTPService>(binder);
}
status_t opStatus =
mp->setDataSource(
httpService,
pathStr,
headersVector.size() > 0? &headersVector : NULL);
process_media_player_call(
env, thiz, opStatus, "java/io/IOException",
"setDataSource failed." );
}
示例8: autoLock
status_t MPEG2TSExtractor::feedMore() {
Mutex::Autolock autoLock(mLock);
uint8_t packet[kTSPacketSize];
ssize_t n = mDataSource->readAt(mOffset, packet, kTSPacketSize);
if (n < (ssize_t)kTSPacketSize) {
if (n >= 0) {
mParser->signalEOS(ERROR_END_OF_STREAM);
}
return (n < 0) ? (status_t)n : ERROR_END_OF_STREAM;
}
ATSParser::SyncEvent event(mOffset);
mOffset += n;
status_t err = mParser->feedTSPacket(packet, kTSPacketSize, &event);
if (event.isInit()) {
for (size_t i = 0; i < mSourceImpls.size(); ++i) {
if (mSourceImpls[i].get() == event.getMediaSource().get()) {
KeyedVector<int64_t, off64_t> *syncPoints = &mSyncPoints.editItemAt(i);
syncPoints->add(event.getTimeUs(), event.getOffset());
// We're keeping the size of the sync points at most 5mb per a track.
size_t size = syncPoints->size();
if (size >= 327680) {
int64_t firstTimeUs = syncPoints->keyAt(0);
int64_t lastTimeUs = syncPoints->keyAt(size - 1);
if (event.getTimeUs() - firstTimeUs > lastTimeUs - event.getTimeUs()) {
syncPoints->removeItemsAt(0, 4096);
} else {
syncPoints->removeItemsAt(size - 4096, 4096);
}
}
break;
}
}
}
return err;
}
示例9: generate
void generate(const KeyedVector<String8, Vector<SplitDescription> >& splits, const String8& base) {
Vector<SplitDescription> allSplits;
const size_t apkSplitCount = splits.size();
for (size_t i = 0; i < apkSplitCount; i++) {
allSplits.appendVector(splits[i]);
}
const SplitSelector selector(allSplits);
KeyedVector<SplitDescription, sp<Rule> > rules(selector.getRules());
bool first = true;
fprintf(stdout, "[\n");
for (size_t i = 0; i < apkSplitCount; i++) {
if (splits.keyAt(i) == base) {
// Skip the base.
continue;
}
if (!first) {
fprintf(stdout, ",\n");
}
first = false;
sp<Rule> masterRule = new Rule();
masterRule->op = Rule::OR_SUBRULES;
const Vector<SplitDescription>& splitDescriptions = splits[i];
const size_t splitDescriptionCount = splitDescriptions.size();
for (size_t j = 0; j < splitDescriptionCount; j++) {
masterRule->subrules.add(rules.valueFor(splitDescriptions[j]));
}
masterRule = Rule::simplify(masterRule);
fprintf(stdout, " {\n \"path\": \"%s\",\n \"rules\": %s\n }",
splits.keyAt(i).string(),
masterRule->toJson(2).string());
}
fprintf(stdout, "\n]\n");
}
示例10: write
status_t TiffWriter::write(Output* out, StripSource** sources, size_t sourcesCount,
Endianness end) {
status_t ret = OK;
EndianOutput endOut(out, end);
if (mIfd == NULL) {
ALOGE("%s: Tiff header is empty.", __FUNCTION__);
return BAD_VALUE;
}
uint32_t totalSize = getTotalSize();
KeyedVector<uint32_t, uint32_t> offsetVector;
for (size_t i = 0; i < mNamedIfds.size(); ++i) {
if (mNamedIfds[i]->uninitializedOffsets()) {
uint32_t stripSize = mNamedIfds[i]->getStripSize();
if (mNamedIfds[i]->setStripOffset(totalSize) != OK) {
ALOGE("%s: Could not set strip offsets.", __FUNCTION__);
return BAD_VALUE;
}
totalSize += stripSize;
WORD_ALIGN(totalSize);
offsetVector.add(mNamedIfds.keyAt(i), totalSize);
}
}
size_t offVecSize = offsetVector.size();
if (offVecSize != sourcesCount) {
ALOGE("%s: Mismatch between number of IFDs with uninitialized strips (%zu) and"
" sources (%zu).", __FUNCTION__, offVecSize, sourcesCount);
return BAD_VALUE;
}
BAIL_ON_FAIL(writeFileHeader(endOut), ret);
uint32_t offset = FILE_HEADER_SIZE;
sp<TiffIfd> ifd = mIfd;
while(ifd != NULL) {
BAIL_ON_FAIL(ifd->writeData(offset, &endOut), ret);
offset += ifd->getSize();
ifd = ifd->getNextIfd();
}
if (LOG_NDEBUG == 0) {
log();
}
for (size_t i = 0; i < offVecSize; ++i) {
uint32_t ifdKey = offsetVector.keyAt(i);
uint32_t sizeToWrite = mNamedIfds[ifdKey]->getStripSize();
bool found = false;
for (size_t j = 0; j < sourcesCount; ++j) {
if (sources[j]->getIfd() == ifdKey) {
if ((ret = sources[i]->writeToStream(endOut, sizeToWrite)) != OK) {
ALOGE("%s: Could not write to stream, received %d.", __FUNCTION__, ret);
return ret;
}
ZERO_TILL_WORD(&endOut, sizeToWrite, ret);
found = true;
break;
}
}
if (!found) {
ALOGE("%s: No stream for byte strips for IFD %u", __FUNCTION__, ifdKey);
return BAD_VALUE;
}
assert(offsetVector[i] == endOut.getCurrentOffset());
}
return ret;
}
示例11: decode
static int decode(
const android::sp<android::ALooper> &looper,
const char *path,
bool useAudio,
bool useVideo,
const android::sp<android::Surface> &surface) {
using namespace android;
static int64_t kTimeout = 500ll;
sp<NuMediaExtractor> extractor = new NuMediaExtractor;
if (extractor->setDataSource(path) != OK) {
fprintf(stderr, "unable to instantiate extractor.\n");
return 1;
}
KeyedVector<size_t, CodecState> stateByTrack;
bool haveAudio = false;
bool haveVideo = false;
for (size_t i = 0; i < extractor->countTracks(); ++i) {
sp<AMessage> format;
status_t err = extractor->getTrackFormat(i, &format);
CHECK_EQ(err, (status_t)OK);
AString mime;
CHECK(format->findString("mime", &mime));
bool isAudio = !strncasecmp(mime.c_str(), "audio/", 6);
bool isVideo = !strncasecmp(mime.c_str(), "video/", 6);
if (useAudio && !haveAudio && isAudio) {
haveAudio = true;
} else if (useVideo && !haveVideo && isVideo) {
haveVideo = true;
} else {
continue;
}
ALOGV("selecting track %d", i);
err = extractor->selectTrack(i);
CHECK_EQ(err, (status_t)OK);
CodecState *state =
&stateByTrack.editValueAt(stateByTrack.add(i, CodecState()));
state->mNumBytesDecoded = 0;
state->mNumBuffersDecoded = 0;
state->mIsAudio = isAudio;
state->mCodec = MediaCodec::CreateByType(
looper, mime.c_str(), false /* encoder */);
CHECK(state->mCodec != NULL);
err = state->mCodec->configure(
format, isVideo ? surface : NULL,
NULL /* crypto */,
0 /* flags */);
CHECK_EQ(err, (status_t)OK);
state->mSignalledInputEOS = false;
state->mSawOutputEOS = false;
}
CHECK(!stateByTrack.isEmpty());
int64_t startTimeUs = ALooper::GetNowUs();
for (size_t i = 0; i < stateByTrack.size(); ++i) {
CodecState *state = &stateByTrack.editValueAt(i);
sp<MediaCodec> codec = state->mCodec;
CHECK_EQ((status_t)OK, codec->start());
CHECK_EQ((status_t)OK, codec->getInputBuffers(&state->mInBuffers));
CHECK_EQ((status_t)OK, codec->getOutputBuffers(&state->mOutBuffers));
ALOGV("got %d input and %d output buffers",
state->mInBuffers.size(), state->mOutBuffers.size());
}
bool sawInputEOS = false;
for (;;) {
if (!sawInputEOS) {
size_t trackIndex;
status_t err = extractor->getSampleTrackIndex(&trackIndex);
if (err != OK) {
ALOGV("saw input eos");
sawInputEOS = true;
} else {
CodecState *state = &stateByTrack.editValueFor(trackIndex);
size_t index;
err = state->mCodec->dequeueInputBuffer(&index, kTimeout);
//.........这里部分代码省略.........