本文整理汇总了Java中org.apache.lucene.util.mutable.MutableValueInt类的典型用法代码示例。如果您正苦于以下问题:Java MutableValueInt类的具体用法?Java MutableValueInt怎么用?Java MutableValueInt使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
MutableValueInt类属于org.apache.lucene.util.mutable包,在下文中一共展示了MutableValueInt类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: put
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
@Override
Leaf<K, V> put(K key, int hash, int hashBits, V value, MutableValueInt newValue) {
assert hashBits <= 0 : hashBits;
int slot = -1;
for (int i = 0; i < keys.length; i++) {
if (key.equals(keys[i])) {
slot = i;
break;
}
}
final K[] keys2;
final V[] values2;
if (slot < 0) {
keys2 = appendElement(keys, key);
values2 = appendElement(values, value);
newValue.value = 1;
} else {
keys2 = replace(keys, slot, key);
values2 = replace(values, slot, value);
}
return new Leaf<>(keys2, values2);
}
示例2: putExisting
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
private InnerNode<K, V> putExisting(K key, int hash, int hashBits, int slot, V value, MutableValueInt newValue) {
final K[] keys2 = Arrays.copyOf(keys, keys.length);
final Object[] subNodes2 = Arrays.copyOf(subNodes, subNodes.length);
final Object previousValue = subNodes2[slot];
if (previousValue instanceof Node) {
// insert recursively
assert keys[slot] == null;
subNodes2[slot] = ((Node<K, V>) previousValue).put(key, hash, hashBits, value, newValue);
} else if (keys[slot].equals(key)) {
// replace the existing entry
subNodes2[slot] = value;
} else {
// hash collision
final K previousKey = keys[slot];
final int previousHash = previousKey.hashCode() >>> (TOTAL_HASH_BITS - hashBits);
Node<K, V> subNode = newSubNode(hashBits);
subNode = subNode.put(previousKey, previousHash, hashBits, (V) previousValue, newValue);
subNode = subNode.put(key, hash, hashBits, value, newValue);
keys2[slot] = null;
subNodes2[slot] = subNode;
}
return new InnerNode<>(mask, keys2, subNodes2);
}
示例3: getValueFiller
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
@Override
public ValueFiller getValueFiller() {
return new ValueFiller() {
private final MutableValueInt mval = new MutableValueInt();
@Override
public MutableValue getValue() {
return mval;
}
@Override
public void fillValue(int doc) {
mval.value = intVal(doc);
mval.exists = exists(doc);
}
};
}
示例4: copyAndPut
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
/**
* Associate <code>key</code> with <code>value</code> and return a new copy
* of the hash table. The current hash table is not modified.
*/
public CopyOnWriteHashMap<K, V> copyAndPut(K key, V value) {
if (key == null) {
throw new IllegalArgumentException("null keys are not supported");
}
if (value == null) {
throw new IllegalArgumentException("null values are not supported");
}
final int hash = key.hashCode();
final MutableValueInt newValue = new MutableValueInt();
final InnerNode<K, V> newRoot = root.put(key, hash, TOTAL_HASH_BITS, value, newValue);
final int newSize = size + newValue.value;
return new CopyOnWriteHashMap<>(newRoot, newSize);
}
示例5: copyAndPut
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
/**
* Associate <code>key</code> with <code>value</code> and return a new copy
* of the hash table. The current hash table is not modified.
*/
public CopyOnWriteHashMap<K, V> copyAndPut(K key, V value) {
Preconditions.checkArgument(key != null, "null keys are not supported");
Preconditions.checkArgument(value != null, "null values are not supported");
final int hash = key.hashCode();
final MutableValueInt newValue = new MutableValueInt();
final InnerNode<K, V> newRoot = root.put(key, hash, TOTAL_HASH_BITS, value, newValue);
final int newSize = size + newValue.value;
return new CopyOnWriteHashMap<>(newRoot, newSize);
}
示例6: getResults
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
private List<TermIDF> getResults(String fieldName,
CharArrayMap<MutableValueInt> map, int numResults) {
TFIDFPriorityQueue queue = new TFIDFPriorityQueue(numResults);
IDFIndexCalc idfCalc = new IDFIndexCalc(searcher.getIndexReader());
int tf = -1;
double idf = -1.0;
int minTf = minTermFreq;
String text = null;
//make more efficient
// Term reusableTerm = new Term(fieldName, "");
for (Map.Entry<Object, MutableValueInt> entry : map.entrySet()) {
tf = entry.getValue().value;
if (tf < minTf)
continue;
text = new String((char[]) entry.getKey());
// calculate idf for potential phrase
try {
idf = idfCalc.singleTermIDF(new Term(fieldName, text));
} catch (IOException e) {
throw new RuntimeException("Error trying to calculate IDF: " + e.getMessage());
}
int estimatedDF = (int) Math.max(1, Math.round(idfCalc.unIDF(idf)));
TermIDF r = new TermIDF(text, estimatedDF, tf, idf);
queue.insertWithOverflow(r);
}
List<TermIDF> results = new LinkedList<>();
while (queue.size() > 0) {
results.add(0, queue.pop());
}
return results;
}
示例7: getResults
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
@Override
public List<TermDFTF> getResults() {
List<TermDFTF> list = new ArrayList<>();
for (Map.Entry<String, MutableValueInt> entry : df.entrySet()) {
String key = entry.getKey();
int docFreq = entry.getValue().value;
MutableValueInt mutTF = tf.get(key);
int termFreq = (mutTF == null) ? 0 : mutTF.value;
list.add(new TermDFTF(key, docFreq, termFreq));
}
Collections.sort(list);
//if list is short enough, return now
if (list.size() <= numResults) {
return list;
}
//copy over only the required results
List<TermDFTF> ret = new ArrayList<>();
int i = 0;
for (TermDFTF t : list) {
if (i++ >= numResults) {
break;
}
ret.add(t);
}
return ret;
}
示例8: getResults
import org.apache.lucene.util.mutable.MutableValueInt; //导入依赖的package包/类
/**
* can throw RuntimeException if there is an IOException
* while calculating the IDFs
*/
public List<TermIDF> getResults() {
TFIDFPriorityQueue queue = new TFIDFPriorityQueue(numResults);
int tf = -1;
double idf = -1.0;
int minTf = minTermFreq;
String text = "";
Term reusableTerm = new Term(getFieldName(), "");
for (Map.Entry<String, MutableValueInt> entry : tfs.entrySet()) {
tf = entry.getValue().value;
if (tf < minTf)
continue;
text = entry.getKey();
// calculate idf for potential phrase
double[] stats;
try {
stats = idfCalc.multiTermIDF(text, reusableTerm);
} catch (IOException e) {
throw new RuntimeException("Error trying to calculate IDF: " + e.getMessage());
}
idf = stats[0];
int estimatedDF = (int) Math.max(1, Math.round(idfCalc.unIDF(idf)));
TermIDF r = new TermIDF(text, estimatedDF, tf, idf);
queue.insertWithOverflow(r);
}
List<TermIDF> results = new LinkedList<TermIDF>();
while (queue.size() > 0) {
results.add(0, queue.pop());
}
return results;
}