本文整理汇总了Java中org.apache.lucene.facet.FacetsConfig类的典型用法代码示例。如果您正苦于以下问题:Java FacetsConfig类的具体用法?Java FacetsConfig怎么用?Java FacetsConfig使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
FacetsConfig类属于org.apache.lucene.facet包,在下文中一共展示了FacetsConfig类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: writeLuceneIndexForProject
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
/**
* Stores features from a specified feature file to the specified project's Lucene index
* Sample query: featureId:rs44022* AND (variationType:del OR variationType:ins)
*
* @param featureFileId a FeatureFile, for which features to save
* @param projectId a project, for which to write an index
* @param entries a list of FeatureIndexEntry to write to index
* @throws IOException
*/
public void writeLuceneIndexForProject(final Long featureFileId, final long projectId,
final List<? extends FeatureIndexEntry> entries) throws IOException {
try (
StandardAnalyzer analyzer = new StandardAnalyzer();
Directory index = fileManager.createIndexForProject(projectId);
IndexWriter writer = new IndexWriter(index, new IndexWriterConfig(analyzer).setOpenMode(
IndexWriterConfig.OpenMode.CREATE_OR_APPEND))
) {
FacetsConfig facetsConfig = new FacetsConfig();
facetsConfig.setIndexFieldName(FeatureIndexFields.CHR_ID.getFieldName(),
FeatureIndexFields.FACET_CHR_ID.getFieldName());
for (FeatureIndexEntry entry : entries) {
Document document = new Document();
addCommonDocumentFields(document, entry, featureFileId);
if (entry instanceof VcfIndexEntry) {
addVcfDocumentFields(document, entry);
}
writer.addDocument(facetsConfig.build(document));
}
}
}
示例2: process
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
/**
* Process an intermediate form by carrying out, on the Lucene instance of
* the shard, the deletes and the inserts (a ram index) in the form.
* @param form the intermediate form containing deletes and a ram index
* @throws IOException
*/
public void process(IntermediateForm form, FacetsConfig facetsConfig) throws IOException {
if (facetsConfig != null) {
DirectoryTaxonomyWriter.OrdinalMap map = new DirectoryTaxonomyWriter.MemoryOrdinalMap();
// merge the taxonomies
taxoWriter.addTaxonomy(form.getTaxoDirectory(), map);
int ordinalMap[] = map.getMap();
DirectoryReader reader = DirectoryReader.open(form.getDirectory());
try {
List<AtomicReaderContext> leaves = reader.leaves();
int numReaders = leaves.size();
AtomicReader wrappedLeaves[] = new AtomicReader[numReaders];
for (int i = 0; i < numReaders; i++) {
wrappedLeaves[i] = new OrdinalMappingAtomicReader(leaves.get(i).reader(), ordinalMap, facetsConfig);
}
writer.addIndexes(new MultiReader(wrappedLeaves));
} finally {
reader.close();
}
} else {
writer.addIndexes(new Directory[] { form.getDirectory() });
}
numForms++;
}
示例3: writeLuceneIndexForProject
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
/**
* Stores features from a specified feature file to the specified project's Lucene index
* Sample query: featureId:rs44022* AND (variationType:del OR variationType:ins)
*
* @param featureFileId a FeatureFile, for which features to save
* @param projectId a project, for which to write an index
* @param entries a list of FeatureIndexEntry to write to index
* @throws IOException
*/
public void writeLuceneIndexForProject(final Long featureFileId, final long projectId,
final List<? extends FeatureIndexEntry> entries) throws IOException {
try (
StandardAnalyzer analyzer = new StandardAnalyzer();
Directory index = fileManager.createIndexForProject(projectId);
IndexWriter writer = new IndexWriter(index, new IndexWriterConfig(analyzer).setOpenMode(
IndexWriterConfig.OpenMode.CREATE_OR_APPEND))
) {
FacetsConfig facetsConfig = new FacetsConfig();
facetsConfig.setIndexFieldName(FeatureIndexFields.CHR_ID.getFieldName(),
FeatureIndexFields.FACET_CHR_ID.getFieldName());
for (FeatureIndexEntry entry : entries) {
Document document = new Document();
addCommonDocumentFields(document, entry, featureFileId);
if (entry instanceof VcfIndexEntry) {
addVcfDocumentFields(document, entry);
}
writer.addDocument(facetsConfig.build(document));
}
}
}
示例4: setUp
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
@Override
@Before
public void setUp() throws Exception {
super.setUp();
publishIndexDir = newDirectory();
publishTaxoDir = newDirectory();
handlerIndexDir = newMockDirectory();
handlerTaxoDir = newMockDirectory();
clientWorkDir = createTempDir("replicationClientTest");
sourceDirFactory = new PerSessionDirectoryFactory(clientWorkDir);
replicator = new LocalReplicator();
callback = new IndexAndTaxonomyReadyCallback(handlerIndexDir, handlerTaxoDir);
handler = new IndexAndTaxonomyReplicationHandler(handlerIndexDir, handlerTaxoDir, callback);
client = new ReplicationClient(replicator, handler, sourceDirFactory);
IndexWriterConfig conf = newIndexWriterConfig(null);
conf.setIndexDeletionPolicy(new SnapshotDeletionPolicy(conf.getIndexDeletionPolicy()));
publishIndexWriter = new IndexWriter(publishIndexDir, conf);
publishTaxoWriter = new SnapshotDirectoryTaxonomyWriter(publishTaxoDir);
config = new FacetsConfig();
config.setHierarchical("A", true);
}
示例5: merge
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
/**
* Merges the given taxonomy and index directories and commits the changes to
* the given writers.
*/
public static void merge(Directory srcIndexDir, Directory srcTaxoDir, OrdinalMap map, IndexWriter destIndexWriter,
DirectoryTaxonomyWriter destTaxoWriter, FacetsConfig srcConfig) throws IOException {
// merge the taxonomies
destTaxoWriter.addTaxonomy(srcTaxoDir, map);
int ordinalMap[] = map.getMap();
DirectoryReader reader = DirectoryReader.open(srcIndexDir);
try {
List<AtomicReaderContext> leaves = reader.leaves();
int numReaders = leaves.size();
AtomicReader wrappedLeaves[] = new AtomicReader[numReaders];
for (int i = 0; i < numReaders; i++) {
wrappedLeaves[i] = new OrdinalMappingAtomicReader(leaves.get(i).reader(), ordinalMap, srcConfig);
}
destIndexWriter.addIndexes(new MultiReader(wrappedLeaves));
// commit changes to taxonomy and index respectively.
destTaxoWriter.commit();
destIndexWriter.commit();
} finally {
reader.close();
}
}
示例6: getAllDims
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
@Override
public List<FacetResult> getAllDims(int topN) throws IOException {
int ord = children[TaxonomyReader.ROOT_ORDINAL];
List<FacetResult> results = new ArrayList<>();
while (ord != TaxonomyReader.INVALID_ORDINAL) {
String dim = taxoReader.getPath(ord).components[0];
FacetsConfig.DimConfig dimConfig = config.getDimConfig(dim);
if (dimConfig.indexFieldName.equals(indexFieldName)) {
FacetResult result = getTopChildren(topN, dim);
if (result != null) {
results.add(result);
}
}
ord = siblings[ord];
}
// Sort by highest value, tie break by dim:
Collections.sort(results, BY_VALUE_THEN_DIM);
return results;
}
示例7: testMixedTypesInSameIndexField
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testMixedTypesInSameIndexField() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig config = new FacetsConfig();
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new IntAssociationFacetField(14, "a", "x"));
doc.add(new FloatAssociationFacetField(55.0f, "b", "y"));
try {
writer.addDocument(config.build(taxoWriter, doc));
fail("did not hit expected exception");
} catch (IllegalArgumentException exc) {
// expected
}
IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
示例8: testNoHierarchy
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testNoHierarchy() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig config = new FacetsConfig();
config.setHierarchical("a", true);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new IntAssociationFacetField(14, "a", "x"));
try {
writer.addDocument(config.build(taxoWriter, doc));
fail("did not hit expected exception");
} catch (IllegalArgumentException exc) {
// expected
}
IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
示例9: testRequireDimCount
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testRequireDimCount() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetsConfig config = new FacetsConfig();
config.setRequireDimCount("a", true);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
Document doc = new Document();
doc.add(new IntAssociationFacetField(14, "a", "x"));
try {
writer.addDocument(config.build(taxoWriter, doc));
fail("did not hit expected exception");
} catch (IllegalArgumentException exc) {
// expected
}
IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
示例10: testReallyNoNormsForDrillDown
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testReallyNoNormsForDrillDown() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
IndexWriterConfig iwc = newIndexWriterConfig(new MockAnalyzer(random()));
iwc.setSimilarity(new PerFieldSimilarityWrapper() {
final Similarity sim = new DefaultSimilarity();
@Override
public Similarity get(String name) {
assertEquals("field", name);
return sim;
}
});
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir, iwc);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
doc.add(new FacetField("a", "path"));
writer.addDocument(config.build(taxoWriter, doc));
IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
示例11: testDetectHierarchicalField
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testDetectHierarchicalField() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
doc.add(new FacetField("a", "path", "other"));
try {
config.build(taxoWriter, doc);
fail("did not hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
示例12: testDetectMultiValuedField
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testDetectMultiValuedField() throws Exception {
Directory dir = newDirectory();
Directory taxoDir = newDirectory();
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir, IndexWriterConfig.OpenMode.CREATE);
RandomIndexWriter writer = new RandomIndexWriter(random(), dir);
FacetsConfig config = new FacetsConfig();
Document doc = new Document();
doc.add(newTextField("field", "text", Field.Store.NO));
doc.add(new FacetField("a", "path"));
doc.add(new FacetField("a", "path2"));
try {
config.build(taxoWriter, doc);
fail("did not hit expected exception");
} catch (IllegalArgumentException iae) {
// expected
}
IOUtils.close(writer, taxoWriter, dir, taxoDir);
}
示例13: testChildCount
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
public void testChildCount() throws Exception {
// LUCENE-4885: FacetResult.numValidDescendants was not set properly by FacetsAccumulator
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
DirectoryTaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
IndexWriter iw = new IndexWriter(indexDir, newIndexWriterConfig(new MockAnalyzer(random())));
FacetsConfig config = new FacetsConfig();
for (int i = 0; i < 10; i++) {
Document doc = new Document();
doc.add(new FacetField("a", Integer.toString(i)));
iw.addDocument(config.build(taxoWriter, doc));
}
DirectoryReader r = DirectoryReader.open(iw, true);
DirectoryTaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoWriter);
FacetsCollector sfc = new FacetsCollector();
newSearcher(r).search(new MatchAllDocsQuery(), sfc);
Facets facets = getTaxonomyFacetCounts(taxoReader, config, sfc);
assertEquals(10, facets.getTopChildren(2, "a").childCount);
IOUtils.close(taxoWriter, iw, taxoReader, taxoDir, r, indexDir);
}
示例14: indexTwoDocs
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
private void indexTwoDocs(TaxonomyWriter taxoWriter, IndexWriter indexWriter, FacetsConfig config, boolean withContent) throws Exception {
for (int i = 0; i < 2; i++) {
Document doc = new Document();
if (withContent) {
doc.add(new StringField("f", "a", Field.Store.NO));
}
if (config != null) {
doc.add(new FacetField("A", Integer.toString(i)));
indexWriter.addDocument(config.build(taxoWriter, doc));
} else {
indexWriter.addDocument(doc);
}
}
indexWriter.commit();
}
示例15: addFacets
import org.apache.lucene.facet.FacetsConfig; //导入依赖的package包/类
private static void addFacets(Document doc, FacetsConfig config, boolean updateTermExpectedCounts)
throws IOException {
List<FacetField> docCategories = randomCategories(random());
for (FacetField ff : docCategories) {
doc.add(ff);
String cp = ff.dim + "/" + ff.path[0];
allExpectedCounts.put(cp, allExpectedCounts.get(cp) + 1);
if (updateTermExpectedCounts) {
termExpectedCounts.put(cp, termExpectedCounts.get(cp) + 1);
}
}
// add 1 to each NO_PARENTS dimension
allExpectedCounts.put(CP_B, allExpectedCounts.get(CP_B) + 1);
allExpectedCounts.put(CP_C, allExpectedCounts.get(CP_C) + 1);
allExpectedCounts.put(CP_D, allExpectedCounts.get(CP_D) + 1);
if (updateTermExpectedCounts) {
termExpectedCounts.put(CP_B, termExpectedCounts.get(CP_B) + 1);
termExpectedCounts.put(CP_C, termExpectedCounts.get(CP_C) + 1);
termExpectedCounts.put(CP_D, termExpectedCounts.get(CP_D) + 1);
}
}