本文整理汇总了C++中DocumentPtr::add方法的典型用法代码示例。如果您正苦于以下问题:C++ DocumentPtr::add方法的具体用法?C++ DocumentPtr::add怎么用?C++ DocumentPtr::add使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类DocumentPtr
的用法示例。
在下文中一共展示了DocumentPtr::add方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: PrefixInBooleanQueryFixture
PrefixInBooleanQueryFixture()
{
directory = newLucene<RAMDirectory>();
IndexWriterPtr writer = newLucene<IndexWriter>(directory, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
for (int32_t i = 0; i < 5137; ++i)
{
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(FIELD, L"meaninglessnames", Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
writer->addDocument(doc);
}
{
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(FIELD, L"tangfulin", Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
writer->addDocument(doc);
}
for (int32_t i = 5138; i < 11377; ++i)
{
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(FIELD, L"meaninglessnames", Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
writer->addDocument(doc);
}
{
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(FIELD, L"tangfulin", Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
writer->addDocument(doc);
}
writer->close();
}
示例2: ParallelTermEnumTestFixture
ParallelTermEnumTestFixture()
{
RAMDirectoryPtr rd1 = newLucene<RAMDirectory>();
IndexWriterPtr iw1 = newLucene<IndexWriter>(rd1, newLucene<SimpleAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(L"field1", L"the quick brown fox jumps", Field::STORE_YES, Field::INDEX_ANALYZED));
doc->add(newLucene<Field>(L"field2", L"the quick brown fox jumps", Field::STORE_YES, Field::INDEX_ANALYZED));
doc->add(newLucene<Field>(L"field4", L"", Field::STORE_NO, Field::INDEX_ANALYZED));
iw1->addDocument(doc);
iw1->close();
RAMDirectoryPtr rd2 = newLucene<RAMDirectory>();
IndexWriterPtr iw2 = newLucene<IndexWriter>(rd2, newLucene<SimpleAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
doc = newLucene<Document>();
doc->add(newLucene<Field>(L"field0", L"", Field::STORE_NO, Field::INDEX_ANALYZED));
doc->add(newLucene<Field>(L"field1", L"the fox jumps over the lazy dog", Field::STORE_YES, Field::INDEX_ANALYZED));
doc->add(newLucene<Field>(L"field3", L"the fox jumps over the lazy dog", Field::STORE_YES, Field::INDEX_ANALYZED));
iw2->addDocument(doc);
iw2->close();
this->ir1 = IndexReader::open(rd1, true);
this->ir2 = IndexReader::open(rd2, true);
}
示例3: FieldCacheSanityCheckerTestFixture
FieldCacheSanityCheckerTestFixture()
{
RAMDirectoryPtr dirA = newLucene<RAMDirectory>();
RAMDirectoryPtr dirB = newLucene<RAMDirectory>();
IndexWriterPtr wA = newLucene<IndexWriter>(dirA, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
IndexWriterPtr wB = newLucene<IndexWriter>(dirB, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
int64_t theLong = LLONG_MAX;
double theDouble = DBL_MAX;
uint8_t theByte = UCHAR_MAX;
int32_t theInt = INT_MAX;
for (int32_t i = 0; i < NUM_DOCS; ++i)
{
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(L"theLong", StringUtils::toString(theLong--), Field::STORE_NO, Field::INDEX_NOT_ANALYZED));
doc->add(newLucene<Field>(L"theDouble", StringUtils::toString(theDouble--), Field::STORE_NO, Field::INDEX_NOT_ANALYZED));
doc->add(newLucene<Field>(L"theByte", StringUtils::toString(theByte--), Field::STORE_NO, Field::INDEX_NOT_ANALYZED));
doc->add(newLucene<Field>(L"theInt", StringUtils::toString(theInt--), Field::STORE_NO, Field::INDEX_NOT_ANALYZED));
if (i % 3 == 0)
wA->addDocument(doc);
else
wB->addDocument(doc);
}
wA->close();
wB->close();
readerA = IndexReader::open(dirA, true);
readerB = IndexReader::open(dirB, true);
readerX = newLucene<MultiReader>(newCollection<IndexReaderPtr>(readerA, readerB));
}
示例4: runTest
// Run one indexer and 2 searchers against single index as stress test.
static void runTest(DirectoryPtr directory)
{
Collection<TimedThreadPtr> threads(Collection<TimedThreadPtr>::newInstance(4));
AnalyzerPtr analyzer = newLucene<SimpleAnalyzer>();
IndexWriterPtr writer = newLucene<MockIndexWriter>(directory, analyzer, true, IndexWriter::MaxFieldLengthUNLIMITED);
writer->setMaxBufferedDocs(7);
writer->setMergeFactor(3);
// Establish a base index of 100 docs
for (int32_t i = 0; i < 100; ++i)
{
DocumentPtr d = newLucene<Document>();
d->add(newLucene<Field>(L"id", StringUtils::toString(i), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
d->add(newLucene<Field>(L"contents", intToEnglish(i), Field::STORE_NO, Field::INDEX_ANALYZED));
if ((i - 1) % 7 == 0)
writer->commit();
writer->addDocument(d);
}
writer->commit();
IndexReaderPtr r = IndexReader::open(directory, true);
BOOST_CHECK_EQUAL(100, r->numDocs());
r->close();
IndexerThreadPtr indexerThread1 = newLucene<IndexerThread>(writer);
threads[0] = indexerThread1;
indexerThread1->start();
IndexerThreadPtr indexerThread2 = newLucene<IndexerThread>(writer);
threads[1] = indexerThread2;
indexerThread2->start();
SearcherThreadPtr searcherThread1 = newLucene<SearcherThread>(directory);
threads[2] = searcherThread1;
searcherThread1->start();
SearcherThreadPtr searcherThread2 = newLucene<SearcherThread>(directory);
threads[3] = searcherThread2;
searcherThread2->start();
indexerThread1->join();
indexerThread2->join();
searcherThread1->join();
searcherThread2->join();
writer->close();
BOOST_CHECK(!indexerThread1->failed); // hit unexpected exception in indexer1
BOOST_CHECK(!indexerThread2->failed); // hit unexpected exception in indexer2
BOOST_CHECK(!searcherThread1->failed); // hit unexpected exception in search1
BOOST_CHECK(!searcherThread2->failed); // hit unexpected exception in search2
}
示例5: doWork
virtual void doWork()
{
// Update all 100 docs
for (int32_t i = 0; i < 100; ++i)
{
DocumentPtr d = newLucene<Document>();
d->add(newLucene<Field>(L"id", StringUtils::toString(i), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
d->add(newLucene<Field>(L"contents", intToEnglish(i), Field::STORE_NO, Field::INDEX_ANALYZED));
writer->updateDocument(newLucene<Term>(L"id", StringUtils::toString(i)), d);
}
}
示例6: createIndexNoClose
TEST_F(IndexWriterReaderTest, testUpdateDocument) {
bool optimize = true;
DirectoryPtr dir1 = newLucene<MockRAMDirectory>();
IndexWriterPtr writer = newLucene<IndexWriter>(dir1, newLucene<WhitespaceAnalyzer>(), IndexWriter::MaxFieldLengthLIMITED);
// create the index
createIndexNoClose(!optimize, L"index1", writer);
// get a reader
IndexReaderPtr r1 = writer->getReader();
EXPECT_TRUE(r1->isCurrent());
String id10 = r1->document(10)->getField(L"id")->stringValue();
DocumentPtr newDoc = r1->document(10);
newDoc->removeField(L"id");
newDoc->add(newLucene<Field>(L"id", StringUtils::toString(8000), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
writer->updateDocument(newLucene<Term>(L"id", id10), newDoc);
EXPECT_TRUE(!r1->isCurrent());
IndexReaderPtr r2 = writer->getReader();
EXPECT_TRUE(r2->isCurrent());
EXPECT_EQ(0, count(newLucene<Term>(L"id", id10), r2));
EXPECT_EQ(1, count(newLucene<Term>(L"id", StringUtils::toString(8000)), r2));
r1->close();
writer->close();
EXPECT_TRUE(r2->isCurrent());
IndexReaderPtr r3 = IndexReader::open(dir1, true);
EXPECT_TRUE(r3->isCurrent());
EXPECT_TRUE(r2->isCurrent());
EXPECT_EQ(0, count(newLucene<Term>(L"id", id10), r3));
EXPECT_EQ(1, count(newLucene<Term>(L"id", StringUtils::toString(8000)), r3));
writer = newLucene<IndexWriter>(dir1, newLucene<WhitespaceAnalyzer>(), IndexWriter::MaxFieldLengthLIMITED);
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(L"field", L"a b c", Field::STORE_NO, Field::INDEX_ANALYZED));
writer->addDocument(doc);
EXPECT_TRUE(r2->isCurrent());
EXPECT_TRUE(r3->isCurrent());
writer->close();
EXPECT_TRUE(!r2->isCurrent());
EXPECT_TRUE(!r3->isCurrent());
r2->close();
r3->close();
dir1->close();
}
示例7: runTest
void runTest(DirectoryPtr directory, MergeSchedulerPtr merger)
{
IndexWriterPtr writer = newLucene<IndexWriter>(directory, analyzer, true, IndexWriter::MaxFieldLengthUNLIMITED);
writer->setMaxBufferedDocs(2);
if (merger)
writer->setMergeScheduler(merger);
for (int32_t iter = 0; iter < NUM_ITER; ++iter)
{
int32_t iterFinal = iter;
writer->setMergeFactor(1000);
for (int32_t i = 0; i < 200; ++i)
{
DocumentPtr d = newLucene<Document>();
d->add(newLucene<Field>(L"id", StringUtils::toString(i), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
d->add(newLucene<Field>(L"contents", intToEnglish(i), Field::STORE_NO, Field::INDEX_ANALYZED));
writer->addDocument(d);
}
writer->setMergeFactor(4);
Collection<LuceneThreadPtr> threads = Collection<LuceneThreadPtr>::newInstance(NUM_THREADS);
for (int32_t i = 0; i < NUM_THREADS; ++i)
{
int32_t iFinal = i;
IndexWriterPtr writerFinal = writer;
threads[i] = newLucene<OptimizeThread>(NUM_ITER2, iterFinal, iFinal, writer, writerFinal);
}
for (int32_t i = 0; i < NUM_THREADS; ++i)
threads[i]->start();
for (int32_t i = 0; i < NUM_THREADS; ++i)
threads[i]->join();
int32_t expectedDocCount = (int32_t)((1 + iter) * (200 + 8 * NUM_ITER2 * (int32_t)(((double)NUM_THREADS / 2.0) * (double)(1 + NUM_THREADS))));
BOOST_CHECK_EQUAL(expectedDocCount, writer->maxDoc());
writer->close();
writer = newLucene<IndexWriter>(directory, analyzer, false, IndexWriter::MaxFieldLengthUNLIMITED);
writer->setMaxBufferedDocs(2);
IndexReaderPtr reader = IndexReader::open(directory, true);
BOOST_CHECK(reader->isOptimized());
BOOST_CHECK_EQUAL(expectedDocCount, reader->numDocs());
reader->close();
}
writer->close();
}
示例8: createDocument
static DocumentPtr createDocument(int32_t n, const String& indexName, int32_t numFields) {
StringStream sb;
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(L"id", StringUtils::toString(n), Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
doc->add(newLucene<Field>(L"indexname", indexName, Field::STORE_YES, Field::INDEX_NOT_ANALYZED, Field::TERM_VECTOR_WITH_POSITIONS_OFFSETS));
sb << L"a" << n;
doc->add(newLucene<Field>(L"field1", sb.str(), Field::STORE_YES, Field::INDEX_ANALYZED, Field::TERM_VECTOR_WITH_POSITIONS_OFFSETS));
sb << L" b" << n;
for (int32_t i = 1; i < numFields; ++i) {
doc->add(newLucene<Field>(L"field" + StringUtils::toString(i + 1), sb.str(), Field::STORE_YES, Field::INDEX_ANALYZED, Field::TERM_VECTOR_WITH_POSITIONS_OFFSETS));
}
return doc;
}
示例9: createDocument
DocumentPtr createDocument(int32_t n, int32_t numFields) {
StringStream sb;
DocumentPtr doc = newLucene<Document>();
sb << L"a" << n;
doc->add(newLucene<Field>(L"field1", sb.str(), Field::STORE_YES, Field::INDEX_ANALYZED));
doc->add(newLucene<Field>(L"fielda", sb.str(), Field::STORE_YES, Field::INDEX_NOT_ANALYZED_NO_NORMS));
doc->add(newLucene<Field>(L"fieldb", sb.str(), Field::STORE_YES, Field::INDEX_NO));
sb << L" b" << n;
for (int32_t i = 1; i < numFields; ++i) {
doc->add(newLucene<Field>(L"field" + StringUtils::toString(i + 1), sb.str(), Field::STORE_YES, Field::INDEX_ANALYZED));
}
return doc;
}
示例10: createDocument
DocumentPtr createDocument(const String& text, int64_t time) {
DocumentPtr document = newLucene<Document>();
// Add the text field.
FieldPtr textField = newLucene<Field>(TEXT_FIELD, text, Field::STORE_YES, Field::INDEX_ANALYZED);
document->add(textField);
// Add the date/time field.
String dateTimeString = DateTools::timeToString(time, DateTools::RESOLUTION_SECOND);
FieldPtr dateTimeField = newLucene<Field>(DATE_TIME_FIELD, dateTimeString, Field::STORE_YES, Field::INDEX_NOT_ANALYZED);
document->add(dateTimeField);
return document;
}
示例11: checkAgainstRAMDirectory
/// Build a randomish document for both RAMDirectory and MemoryIndex, and run all the queries against it.
void checkAgainstRAMDirectory() {
StringStream fooField;
StringStream termField;
// add up to 250 terms to field "foo"
int32_t fieldCount = random->nextInt(250) + 1;
for (int32_t i = 0; i < fieldCount; ++i) {
fooField << L" " << randomTerm();
}
// add up to 250 terms to field "foo"
int32_t termCount = random->nextInt(250) + 1;
for (int32_t i = 0; i < termCount; ++i) {
termField << L" " << randomTerm();
}
RAMDirectoryPtr ramdir = newLucene<RAMDirectory>();
AnalyzerPtr analyzer = randomAnalyzer();
IndexWriterPtr writer = newLucene<IndexWriter>(ramdir, analyzer, IndexWriter::MaxFieldLengthUNLIMITED);
DocumentPtr doc = newLucene<Document>();
FieldPtr field1 = newLucene<Field>(L"foo", fooField.str(), Field::STORE_NO, Field::INDEX_ANALYZED);
FieldPtr field2 = newLucene<Field>(L"term", termField.str(), Field::STORE_NO, Field::INDEX_ANALYZED);
doc->add(field1);
doc->add(field2);
writer->addDocument(doc);
writer->close();
MemoryIndexPtr memory = newLucene<MemoryIndex>();
memory->addField(L"foo", fooField.str(), analyzer);
memory->addField(L"term", termField.str(), analyzer);
checkAllQueries(memory, ramdir, analyzer);
}
示例12: createIndex
void createIndex(int32_t numHits) {
int32_t numDocs = 500;
DirectoryPtr directory = newLucene<SeekCountingDirectory>();
IndexWriterPtr writer = newLucene<IndexWriter>(directory, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
writer->setUseCompoundFile(false);
writer->setMaxBufferedDocs(10);
for (int32_t i = 0; i < numDocs; ++i) {
DocumentPtr doc = newLucene<Document>();
String content;
if (i % (numDocs / numHits) == 0) {
// add a document that matches the query "term1 term2"
content = term1 + L" " + term2;
} else if (i % 15 == 0) {
// add a document that only contains term1
content = term1 + L" " + term1;
} else {
// add a document that contains term2 but not term 1
content = term3 + L" " + term2;
}
doc->add(newLucene<Field>(field, content, Field::STORE_YES, Field::INDEX_ANALYZED));
writer->addDocument(doc);
}
// make sure the index has only a single segment
writer->optimize();
writer->close();
SegmentReaderPtr reader = SegmentReader::getOnlySegmentReader(directory);
searcher = newLucene<IndexSearcher>(reader);
}
示例13:
TEST_F(LazyProxSkippingTest, testSeek) {
DirectoryPtr directory = newLucene<RAMDirectory>();
IndexWriterPtr writer = newLucene<IndexWriter>(directory, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
for (int32_t i = 0; i < 10; ++i) {
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(field, L"a b", Field::STORE_YES, Field::INDEX_ANALYZED));
writer->addDocument(doc);
}
writer->close();
IndexReaderPtr reader = IndexReader::open(directory, true);
TermPositionsPtr tp = reader->termPositions();
tp->seek(newLucene<Term>(field, L"b"));
for (int32_t i = 0; i < 10; ++i) {
tp->next();
EXPECT_EQ(tp->doc(), i);
EXPECT_EQ(tp->nextPosition(), 1);
}
tp->seek(newLucene<Term>(field, L"a"));
for (int32_t i = 0; i < 10; ++i) {
tp->next();
EXPECT_EQ(tp->doc(), i);
EXPECT_EQ(tp->nextPosition(), 0);
}
}
示例14: FilterPtr
TEST_F(BooleanScorerTest, testMethod) {
static const String FIELD = L"category";
RAMDirectoryPtr directory = newLucene<RAMDirectory>();
Collection<String> values = newCollection<String>(L"1", L"2", L"3", L"4");
IndexWriterPtr writer = newLucene<IndexWriter>(directory, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthLIMITED);
for (int32_t i = 0; i < values.size(); ++i) {
DocumentPtr doc = newLucene<Document>();
doc->add(newLucene<Field>(FIELD, values[i], Field::STORE_YES, Field::INDEX_NOT_ANALYZED));
writer->addDocument(doc);
}
writer->close();
BooleanQueryPtr booleanQuery1 = newLucene<BooleanQuery>();
booleanQuery1->add(newLucene<TermQuery>(newLucene<Term>(FIELD, L"1")), BooleanClause::SHOULD);
booleanQuery1->add(newLucene<TermQuery>(newLucene<Term>(FIELD, L"2")), BooleanClause::SHOULD);
BooleanQueryPtr query = newLucene<BooleanQuery>();
query->add(booleanQuery1, BooleanClause::MUST);
query->add(newLucene<TermQuery>(newLucene<Term>(FIELD, L"9")), BooleanClause::MUST_NOT);
IndexSearcherPtr indexSearcher = newLucene<IndexSearcher>(directory, true);
Collection<ScoreDocPtr> hits = indexSearcher->search(query, FilterPtr(), 1000)->scoreDocs;
EXPECT_EQ(2, hits.size());
}
示例15: setup
/// One-time setup to initialise static members
void setup() {
// set the theoretical maximum term count for 8bit (see docs for the number)
BooleanQuery::setMaxClauseCount(3 * 255 * 2 + 255);
directory = newLucene<RAMDirectory>();
IndexWriterPtr writer = newLucene<IndexWriter>(directory, newLucene<WhitespaceAnalyzer>(), true, IndexWriter::MaxFieldLengthUNLIMITED);
NumericFieldPtr field8 = newLucene<NumericField>(L"field8", 8, Field::STORE_YES, true);
NumericFieldPtr field4 = newLucene<NumericField>(L"field4", 4, Field::STORE_YES, true);
NumericFieldPtr field2 = newLucene<NumericField>(L"field2", 2, Field::STORE_YES, true);
NumericFieldPtr fieldNoTrie = newLucene<NumericField>(L"field" + StringUtils::toString(INT_MAX), INT_MAX, Field::STORE_YES, true);
NumericFieldPtr ascfield8 = newLucene<NumericField>(L"ascfield8", 8, Field::STORE_NO, true);
NumericFieldPtr ascfield4 = newLucene<NumericField>(L"ascfield4", 4, Field::STORE_NO, true);
NumericFieldPtr ascfield2 = newLucene<NumericField>(L"ascfield2", 2, Field::STORE_NO, true);
DocumentPtr doc = newLucene<Document>();
// add fields, that have a distance to test general functionality
doc->add(field8);
doc->add(field4);
doc->add(field2);
doc->add(fieldNoTrie);
// add ascending fields with a distance of 1, beginning at -noDocs/2 to test the correct splitting of range and inclusive/exclusive
doc->add(ascfield8);
doc->add(ascfield4);
doc->add(ascfield2);
// Add a series of noDocs docs with increasing int values
for (int32_t l = 0; l < noDocs; ++l) {
int32_t val = distance * l + startOffset;
field8->setIntValue(val);
field4->setIntValue(val);
field2->setIntValue(val);
fieldNoTrie->setIntValue(val);
val = l - (noDocs / 2);
ascfield8->setIntValue(val);
ascfield4->setIntValue(val);
ascfield2->setIntValue(val);
writer->addDocument(doc);
}
writer->optimize();
writer->close();
searcher = newLucene<IndexSearcher>(directory, true);
}