本文整理汇总了C++中StringTokenizer::getNext方法的典型用法代码示例。如果您正苦于以下问题:C++ StringTokenizer::getNext方法的具体用法?C++ StringTokenizer::getNext怎么用?C++ StringTokenizer::getNext使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类StringTokenizer
的用法示例。
在下文中一共展示了StringTokenizer::getNext方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: GetQuery
LanguageModel::LanguageModel(Index *index, offset start, offset end, bool stemmed) {
char line[Query::MAX_RESPONSELINE_LENGTH + 4];
initialize();
this->corpusSize = (end - start + 1);
this->documentCount = 1;
this->stemmed = stemmed;
char queryBody[64];
sprintf(queryBody, OFFSET_FORMAT " " OFFSET_FORMAT, start, end);
const char *modifiers[2] = { "filtered", NULL };
GetQuery *query = new GetQuery(index, "get", modifiers, queryBody, Index::GOD, -1);
if (query->parse()) {
line[sizeof(line) - 1] = 0;
while (query->getNextLine(line)) {
assert(line[sizeof(line) - 1] == 0);
StringTokenizer *tok = new StringTokenizer(line, " ");
while (tok->hasNext()) {
char *t = tok->getNext();
if ((strchr(t, '<') == NULL) && (strchr(t, '>') == NULL)) {
if (getTermID(t) < 0)
addTerm(t, 1, 1);
else
updateTerm(t, 1, 0);
}
}
delete tok;
}
}
else {
log(LOG_ERROR, LOG_ID, "Parsing failed in LanguageModel(Index*, offset, offset, bool)");
fprintf(stderr, "%lld %lld\n", static_cast<long long>(start), static_cast<long long>(end));
}
delete query;
this->corpusSize = 0;
for (int i = 0; i < termSlotsUsed; i++)
this->corpusSize += terms[i].termFrequency;
} //end of LanguageModel(Index*, offset, offset, bool)
示例2: processCoreQueryDocLevel
void DesktopQuery::processCoreQueryDocLevel() {
const char *getMod[2] = {"filtered", NULL};
char body[256];
char *mod[2];
mod[0] = (char*)malloc(32);
sprintf(mod[0], "count=%d", count);
mod[1] = NULL;
char *qs = queryString;
if (strstr(qs, " by ") != NULL)
qs = strstr(qs, " by ") + 4;
if (strstr(qs, " with ") != NULL)
*strstr(qs, " with ") = 0;
TerabyteQuery *q = new TerabyteQuery(index, "bm25tera", (const char**)mod, qs, (VisibleExtents*)NULL, -1);
if (!q->parse()) {
results = typed_malloc(ScoredExtent, 1);
count = 0;
}
else {
count = q->getCount();
results = typed_malloc(ScoredExtent, count);
for (int i = 0; i < count; i++) {
results[i] = q->getResult(i);
results[i].containerFrom = results[i].from;
results[i].containerTo = results[i].to;
if ((i < resultStart) || (i > resultEnd))
continue;
double maxProxiScore = 0.0;
sprintf(body, OFFSET_FORMAT " " OFFSET_FORMAT, results[i].from, results[i].to);
// construct an @get query for the current result extent and use it to
// find a good passage to return
GetQuery *gq = new GetQuery(index, "get", getMod, body, visibleExtents, -1);
if (!gq->parse())
results[i].to = MIN(results[i].from + 32, results[i].to);
else {
char buffer[FilteredInputStream::MAX_FILTERED_RANGE_SIZE + 32];
if (!gq->getNextLine(buffer))
results[i].to = MIN(results[i].from + 32, results[i].to);
else {
char *queryTerms[MAX_SCORER_COUNT];
offset lastForTerm[MAX_SCORER_COUNT];
for (int k = 0; k < elementCount; k++) {
queryTerms[k] = elementQueries[k]->getQueryString();
int outPos = 0;
for (int inPos = 0; queryTerms[k][inPos] != 0; inPos++)
if ((queryTerms[k][inPos] <= 0) || (queryTerms[k][inPos] > '"'))
queryTerms[k][outPos++] = queryTerms[k][inPos];
queryTerms[k][outPos] = 0;
lastForTerm[k] = -999999999;
}
offset pos = results[i].from;
// tokenize the result of the @get query and try to find a good passage within it
StringTokenizer *tok = new StringTokenizer(buffer, " ");
char *token;
while ((token = tok->getNext()) != NULL) {
for (int k = 0; k < elementCount; k++)
if (Stemmer::stemEquivalent(token, queryTerms[k], LANGUAGE_ENGLISH))
lastForTerm[k] = pos;
offset start = pos;
double score = 0.0;
for (int t = 0; t < elementCount; t++)
if (lastForTerm[t] >= pos - 12) {
start = MIN(lastForTerm[t], start);
score += internalWeights[t] + 100 - (pos - lastForTerm[t]);
}
if (score > maxProxiScore) {
maxProxiScore = score;
results[i].from = start;
results[i].to = pos;
}
pos++;
}
delete tok;
results[i].to = MIN(results[i].to, results[i].from + 12);
for (int k = 0; k < elementCount; k++)
free(queryTerms[k]);
}
} // end else [gq->parse() == true]
delete gq;
}
} // end else [q->parse() == true]
delete q;
} // end of exexuteQueryDocLevel()