本文整理汇总了Java中org.apache.lucene.facet.taxonomy.TaxonomyWriter.addCategory方法的典型用法代码示例。如果您正苦于以下问题:Java TaxonomyWriter.addCategory方法的具体用法?Java TaxonomyWriter.addCategory怎么用?Java TaxonomyWriter.addCategory使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.lucene.facet.taxonomy.TaxonomyWriter
的用法示例。
在下文中一共展示了TaxonomyWriter.addCategory方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: processAssocFacetFields
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private void processAssocFacetFields(TaxonomyWriter taxoWriter,
Map<String,List<AssociationFacetField>> byField, Document doc)
throws IOException {
for (Map.Entry<String,List<AssociationFacetField>> ent : byField.entrySet()) {
byte[] bytes = new byte[16];
int upto = 0;
String indexFieldName = ent.getKey();
for(AssociationFacetField field : ent.getValue()) {
// NOTE: we don't add parents for associations
checkTaxoWriter(taxoWriter);
FacetLabel label = new FacetLabel(field.dim, field.path);
int ordinal = taxoWriter.addCategory(label);
if (upto + 4 > bytes.length) {
bytes = ArrayUtil.grow(bytes, upto+4);
}
// big-endian:
bytes[upto++] = (byte) (ordinal >> 24);
bytes[upto++] = (byte) (ordinal >> 16);
bytes[upto++] = (byte) (ordinal >> 8);
bytes[upto++] = (byte) ordinal;
if (upto + field.assoc.length > bytes.length) {
bytes = ArrayUtil.grow(bytes, upto+field.assoc.length);
}
System.arraycopy(field.assoc.bytes, field.assoc.offset, bytes, upto, field.assoc.length);
upto += field.assoc.length;
// Drill down:
for (int i = 1; i <= label.length; i++) {
doc.add(new StringField(indexFieldName, pathToString(label.components, i), Field.Store.NO));
}
}
doc.add(new BinaryDocValuesField(indexFieldName, new BytesRef(bytes, 0, upto)));
}
}
示例2: processAssocFacetFields
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private void processAssocFacetFields(TaxonomyWriter taxoWriter,
Map<String,List<AssociationFacetField>> byField, Document doc)
throws IOException {
for (Map.Entry<String,List<AssociationFacetField>> ent : byField.entrySet()) {
byte[] bytes = new byte[16];
int upto = 0;
String indexFieldName = ent.getKey();
for(AssociationFacetField field : ent.getValue()) {
// NOTE: we don't add parents for associations
checkTaxoWriter(taxoWriter);
int ordinal = taxoWriter.addCategory(new FacetLabel(field.dim, field.path));
if (upto + 4 > bytes.length) {
bytes = ArrayUtil.grow(bytes, upto+4);
}
// big-endian:
bytes[upto++] = (byte) (ordinal >> 24);
bytes[upto++] = (byte) (ordinal >> 16);
bytes[upto++] = (byte) (ordinal >> 8);
bytes[upto++] = (byte) ordinal;
if (upto + field.assoc.length > bytes.length) {
bytes = ArrayUtil.grow(bytes, upto+field.assoc.length);
}
System.arraycopy(field.assoc.bytes, field.assoc.offset, bytes, upto, field.assoc.length);
upto += field.assoc.length;
// Drill down:
FacetLabel cp = new FacetLabel(field.dim, field.path);
for (int i = 1; i <= cp.length; i++) {
doc.add(new StringField(indexFieldName, pathToString(cp.components, i), Field.Store.NO));
}
}
doc.add(new BinaryDocValuesField(indexFieldName, new BytesRef(bytes, 0, upto)));
}
}
示例3: processFacetFields
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private void processFacetFields(TaxonomyWriter taxoWriter, Map<String,List<FacetField>> byField, Document doc) throws IOException {
for(Map.Entry<String,List<FacetField>> ent : byField.entrySet()) {
String indexFieldName = ent.getKey();
//System.out.println(" indexFieldName=" + indexFieldName + " fields=" + ent.getValue());
IntsRefBuilder ordinals = new IntsRefBuilder();
for(FacetField facetField : ent.getValue()) {
FacetsConfig.DimConfig ft = getDimConfig(facetField.dim);
if (facetField.path.length > 1 && ft.hierarchical == false) {
throw new IllegalArgumentException("dimension \"" + facetField.dim + "\" is not hierarchical yet has " + facetField.path.length + " components");
}
FacetLabel cp = new FacetLabel(facetField.dim, facetField.path);
checkTaxoWriter(taxoWriter);
int ordinal = taxoWriter.addCategory(cp);
ordinals.append(ordinal);
//System.out.println("ords[" + (ordinals.length-1) + "]=" + ordinal);
//System.out.println(" add cp=" + cp);
if (ft.multiValued && (ft.hierarchical || ft.requireDimCount)) {
//System.out.println(" add parents");
// Add all parents too:
int parent = taxoWriter.getParent(ordinal);
while (parent > 0) {
ordinals.append(parent);
parent = taxoWriter.getParent(parent);
}
if (ft.requireDimCount == false) {
// Remove last (dimension) ord:
ordinals.setLength(ordinals.length() - 1);
}
}
// Drill down:
for (int i=1;i<=cp.length;i++) {
doc.add(new StringField(indexFieldName, pathToString(cp.components, i), Field.Store.NO));
}
}
// Facet counts:
// DocValues are considered stored fields:
doc.add(new BinaryDocValuesField(indexFieldName, dedupAndEncode(ordinals.get())));
}
}
示例4: doTestReadRecreatedTaxonomy
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private void doTestReadRecreatedTaxonomy(Random random, boolean closeReader) throws Exception {
Directory dir = null;
TaxonomyWriter tw = null;
TaxonomyReader tr = null;
// prepare a few categories
int n = 10;
FacetLabel[] cp = new FacetLabel[n];
for (int i=0; i<n; i++) {
cp[i] = new FacetLabel("a", Integer.toString(i));
}
try {
dir = newDirectory();
tw = new DirectoryTaxonomyWriter(dir);
tw.addCategory(new FacetLabel("a"));
tw.close();
tr = new DirectoryTaxonomyReader(dir);
int baseNumCategories = tr.getSize();
for (int i=0; i<n; i++) {
int k = random.nextInt(n);
tw = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE);
for (int j = 0; j <= k; j++) {
tw.addCategory(cp[j]);
}
tw.close();
if (closeReader) {
tr.close();
tr = new DirectoryTaxonomyReader(dir);
} else {
TaxonomyReader newtr = TaxonomyReader.openIfChanged(tr);
assertNotNull(newtr);
tr.close();
tr = newtr;
}
assertEquals("Wrong #categories in taxonomy (i="+i+", k="+k+")", baseNumCategories + 1 + k, tr.getSize());
}
} finally {
IOUtils.close(tr, tw, dir);
}
}
示例5: testGrowingTaxonomy
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
/**
* This test is to address a bug in a previous version. If a TFC cache is
* written to disk, and then the taxonomy grows (but the index does not change),
* and then the TFC cache is re-read from disk, there will be an exception
* thrown, as the integers are read off of the disk according to taxonomy
* size, which has changed.
*/
@Test
public void testGrowingTaxonomy() throws Exception {
Directory indexDir = newDirectory();
Directory taxoDir = newDirectory();
// Create our index/taxonomy writers
IndexWriter indexWriter = new IndexWriter(indexDir, newIndexWriterConfig(TEST_VERSION_CURRENT, null));
TaxonomyWriter taxoWriter = new DirectoryTaxonomyWriter(taxoDir);
FacetIndexingParams iParams = new FacetIndexingParams() {
@Override
public int getPartitionSize() {
return 2;
}
};
// Add a facet to the index
addFacets(iParams, indexWriter, taxoWriter, "a", "b");
// Commit Changes
indexWriter.commit();
taxoWriter.commit();
DirectoryReader indexReader = DirectoryReader.open(indexDir);
TaxonomyReader taxoReader = new DirectoryTaxonomyReader(taxoDir);
// Create TFC and write cache to disk
File outputFile = _TestUtil.createTempFile("test", "tmp", TEMP_DIR);
TFC.store(outputFile, indexReader, taxoReader, iParams);
// Make the taxonomy grow without touching the index
for (int i = 0; i < 10; i++) {
taxoWriter.addCategory(new CategoryPath("foo", Integer.toString(i)));
}
taxoWriter.commit();
TaxonomyReader newTaxoReader = TaxonomyReader.openIfChanged(taxoReader);
assertNotNull(newTaxoReader);
taxoReader.close();
taxoReader = newTaxoReader;
initCache();
// With the bug, this next call should result in an exception
TFC.load(outputFile, indexReader, taxoReader, iParams);
TotalFacetCounts totalCounts = TFC.getTotalCounts(indexReader, taxoReader, iParams);
assertReadFromDisc(totalCounts, 0, "after reading from disk.");
outputFile.delete();
IOUtils.close(indexWriter, taxoWriter, indexReader, taxoReader);
IOUtils.close(indexDir, taxoDir);
}
示例6: doTestReadRecreatedTaxonomy
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private void doTestReadRecreatedTaxonomy(Random random, boolean closeReader) throws Exception {
Directory dir = null;
TaxonomyWriter tw = null;
TaxonomyReader tr = null;
// prepare a few categories
int n = 10;
CategoryPath[] cp = new CategoryPath[n];
for (int i=0; i<n; i++) {
cp[i] = new CategoryPath("a", Integer.toString(i));
}
try {
dir = newDirectory();
tw = new DirectoryTaxonomyWriter(dir);
tw.addCategory(new CategoryPath("a"));
tw.close();
tr = new DirectoryTaxonomyReader(dir);
int baseNumCategories = tr.getSize();
for (int i=0; i<n; i++) {
int k = random.nextInt(n);
tw = new DirectoryTaxonomyWriter(dir, OpenMode.CREATE);
for (int j = 0; j <= k; j++) {
tw.addCategory(cp[j]);
}
tw.close();
if (closeReader) {
tr.close();
tr = new DirectoryTaxonomyReader(dir);
} else {
TaxonomyReader newtr = TaxonomyReader.openIfChanged(tr);
assertNotNull(newtr);
tr.close();
tr = newtr;
}
assertEquals("Wrong #categories in taxonomy (i="+i+", k="+k+")", baseNumCategories + 1 + k, tr.getSize());
}
} finally {
IOUtils.close(tr, tw, dir);
}
}
示例7: processFacetFields
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private void processFacetFields(TaxonomyWriter taxoWriter, Map<String,List<FacetField>> byField, Document doc) throws IOException {
for(Map.Entry<String,List<FacetField>> ent : byField.entrySet()) {
String indexFieldName = ent.getKey();
//System.out.println(" indexFieldName=" + indexFieldName + " fields=" + ent.getValue());
IntsRef ordinals = new IntsRef(32);
for(FacetField facetField : ent.getValue()) {
FacetsConfig.DimConfig ft = getDimConfig(facetField.dim);
if (facetField.path.length > 1 && ft.hierarchical == false) {
throw new IllegalArgumentException("dimension \"" + facetField.dim + "\" is not hierarchical yet has " + facetField.path.length + " components");
}
FacetLabel cp = new FacetLabel(facetField.dim, facetField.path);
checkTaxoWriter(taxoWriter);
int ordinal = taxoWriter.addCategory(cp);
if (ordinals.length == ordinals.ints.length) {
ordinals.grow(ordinals.length+1);
}
ordinals.ints[ordinals.length++] = ordinal;
//System.out.println("ords[" + (ordinals.length-1) + "]=" + ordinal);
//System.out.println(" add cp=" + cp);
if (ft.multiValued && (ft.hierarchical || ft.requireDimCount)) {
//System.out.println(" add parents");
// Add all parents too:
int parent = taxoWriter.getParent(ordinal);
while (parent > 0) {
if (ordinals.ints.length == ordinals.length) {
ordinals.grow(ordinals.length+1);
}
ordinals.ints[ordinals.length++] = parent;
parent = taxoWriter.getParent(parent);
}
if (ft.requireDimCount == false) {
// Remove last (dimension) ord:
ordinals.length--;
}
}
// Drill down:
for (int i=1;i<=cp.length;i++) {
doc.add(new StringField(indexFieldName, pathToString(cp.components, i), Field.Store.NO));
}
}
// Facet counts:
// DocValues are considered stored fields:
doc.add(new BinaryDocValuesField(indexFieldName, dedupAndEncode(ordinals)));
}
}
示例8: addFacetCategories
import org.apache.lucene.facet.taxonomy.TaxonomyWriter; //导入方法依赖的package包/类
private static void addFacetCategories(final TaxonomyWriter taxonomyWriter) throws IOException {
for (ObjectType objectType : ObjectType.values()) {
final FacetLabel facetLabel = new FacetLabel("object-type", objectType.getName());
taxonomyWriter.addCategory(facetLabel);
}
}