本文整理汇总了Java中com.carrotsearch.hppc.ObjectIntHashMap类的典型用法代码示例。如果您正苦于以下问题:Java ObjectIntHashMap类的具体用法?Java ObjectIntHashMap怎么用?Java ObjectIntHashMap使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ObjectIntHashMap类属于com.carrotsearch.hppc包,在下文中一共展示了ObjectIntHashMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: assignLabels
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
static void assignLabels(AnnotationProvider annotationProvider,
List<DefaultDomain> domains) {
domains.stream()
.forEach(domain -> {
ObjectIntMap<String> wordCounts = new ObjectIntHashMap<>();
domain.forEachAttribute(attributeIndex -> {
String label = annotationProvider.getAttributeLabel(attributeIndex);
if (label == null) {
return;
}
countWords(wordCounts, label);
});
List<String> topWords = computeTopWords(wordCounts);
domain.name = String.join(" ", topWords);
});
}
示例2: readObject
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
private void readObject (ObjectInputStream in) throws IOException, ClassNotFoundException {
lock = new ReentrantReadWriteLock();
lock.writeLock().lock();
try {
int version = in.readInt();
int size = in.readInt();
entries = new ArrayList(size);
map = new ObjectIntHashMap(size);
for (int i = 0; i < size; i++) {
Object o = in.readObject();
map.put(o, i);
entries.add(o);
}
growthStopped = in.readBoolean();
entryClass = (Class) in.readObject();
if (version > 0) { // instanced id added in version 1S
instanceId = (VMID) in.readObject();
}
} finally {
lock.writeLock().unlock();
}
}
示例3: ensureNoNullKeys
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
public static <V> ObjectIntHashMap<V> ensureNoNullKeys(int capacity, float loadFactor) {
return new ObjectIntHashMap<V>(capacity, loadFactor) {
@Override
public int put(V key, int value) {
if (key == null) {
throw new IllegalArgumentException("Map key must not be null");
}
return super.put(key, value);
}
};
}
示例4: OsStats
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
/**
* Build the stats from information about each node.
*/
private OsStats(List<NodeInfo> nodeInfos, List<NodeStats> nodeStatsList) {
this.names = new ObjectIntHashMap<>();
int availableProcessors = 0;
int allocatedProcessors = 0;
for (NodeInfo nodeInfo : nodeInfos) {
availableProcessors += nodeInfo.getOs().getAvailableProcessors();
allocatedProcessors += nodeInfo.getOs().getAllocatedProcessors();
if (nodeInfo.getOs().getName() != null) {
names.addTo(nodeInfo.getOs().getName(), 1);
}
}
this.availableProcessors = availableProcessors;
this.allocatedProcessors = allocatedProcessors;
long totalMemory = 0;
long freeMemory = 0;
for (NodeStats nodeStats : nodeStatsList) {
if (nodeStats.getOs() != null) {
long total = nodeStats.getOs().getMem().getTotal().getBytes();
if (total > 0) {
totalMemory += total;
}
long free = nodeStats.getOs().getMem().getFree().getBytes();
if (free > 0) {
freeMemory += free;
}
}
}
this.mem = new org.elasticsearch.monitor.os.OsStats.Mem(totalMemory, freeMemory);
}
示例5: JvmStats
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
/**
* Build from lists of information about each node.
*/
private JvmStats(List<NodeInfo> nodeInfos, List<NodeStats> nodeStatsList) {
this.versions = new ObjectIntHashMap<>();
long threads = 0;
long maxUptime = 0;
long heapMax = 0;
long heapUsed = 0;
for (NodeInfo nodeInfo : nodeInfos) {
versions.addTo(new JvmVersion(nodeInfo.getJvm()), 1);
}
for (NodeStats nodeStats : nodeStatsList) {
org.elasticsearch.monitor.jvm.JvmStats js = nodeStats.getJvm();
if (js == null) {
continue;
}
if (js.getThreads() != null) {
threads += js.getThreads().getCount();
}
maxUptime = Math.max(maxUptime, js.getUptime().millis());
if (js.getMem() != null) {
heapUsed += js.getMem().getHeapUsed().getBytes();
heapMax += js.getMem().getHeapMax().getBytes();
}
}
this.threads = threads;
this.maxUptime = maxUptime;
this.heapUsed = heapUsed;
this.heapMax = heapMax;
}
示例6: createString
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
private String createString(String[] tokens, Map<String, List<BytesRef>> payloads, int encoding, char delimiter) {
String resultString = "";
ObjectIntHashMap<String> payloadCounter = new ObjectIntHashMap<>();
for (String token : tokens) {
if (!payloadCounter.containsKey(token)) {
payloadCounter.putIfAbsent(token, 0);
} else {
payloadCounter.put(token, payloadCounter.get(token) + 1);
}
resultString = resultString + token;
BytesRef payload = payloads.get(token).get(payloadCounter.get(token));
if (payload.length > 0) {
resultString = resultString + delimiter;
switch (encoding) {
case 0: {
resultString = resultString + Float.toString(PayloadHelper.decodeFloat(payload.bytes, payload.offset));
break;
}
case 1: {
resultString = resultString + Integer.toString(PayloadHelper.decodeInt(payload.bytes, payload.offset));
break;
}
case 2: {
resultString = resultString + payload.utf8ToString();
break;
}
default: {
throw new ElasticsearchException("unsupported encoding type");
}
}
}
resultString = resultString + " ";
}
return resultString;
}
示例7: JvmStats
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
JvmStats() {
versions = new ObjectIntHashMap<>();
threads = 0;
maxUptime = 0;
heapMax = 0;
heapUsed = 0;
}
示例8: readFrom
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
@Override
public void readFrom(StreamInput in) throws IOException {
int size = in.readVInt();
versions = new ObjectIntHashMap<>(size);
for (; size > 0; size--) {
versions.addTo(JvmVersion.readJvmVersion(in), in.readVInt());
}
threads = in.readVLong();
maxUptime = in.readVLong();
heapUsed = in.readVLong();
heapMax = in.readVLong();
}
示例9: initVarMap
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
/**
* creates a mapping between concept pairs and indices used in the ILP
*
* @param predictions
*/
private ObjectIntMap<CPair> initVarMap(ObjectDoubleMap<CPair> predictions) {
ObjectIntMap<CPair> varMap = new ObjectIntHashMap<CPair>();
int x = 0;
for (ObjectCursor<CPair> p : predictions.keys()) {
varMap.put(p.value, x);
x++;
}
return varMap;
}
示例10: SubgraphILPFast
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
public SubgraphILPFast(List<Concept> concepts, List<Proposition> propositions, int maxSize, int maxTime,
Set<Concept> goldConcepts, double penalty) {
super(concepts, propositions, maxSize);
if (maxTime > 0)
this.maxTime = maxTime;
this.goldConcepts = goldConcepts;
this.penalty = penalty;
if (goldConcepts != null)
this.withPenalty = true;
long start = System.currentTimeMillis();
Logger.getGlobal().log(Level.INFO, "Creating ILP");
this.conceptIds = new ObjectIntHashMap<Concept>(concepts.size());
for (int i = 0; i < this.concepts.size(); i++)
this.conceptIds.put(this.concepts.get(i), i);
Set<CPair> edges = new HashSet<CPair>();
for (Proposition p : this.propositions) {
edges.add(new CPair(p.sourceConcept, p.targetConcept));
edges.add(new CPair(p.targetConcept, p.sourceConcept));
}
this.edges = new ArrayList<CPair>(edges);
this.createProblem();
double duration = (System.currentTimeMillis() - start) / 1000.0;
Logger.getGlobal().log(Level.INFO, "- done: " + duration);
}
示例11: SubgraphILP
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
public SubgraphILP(List<Concept> concepts, List<Proposition> propositions, int maxSize) {
super(concepts, propositions, maxSize);
long start = System.currentTimeMillis();
Logger.getGlobal().log(Level.INFO, "Creating ILP");
this.conceptIds = new ObjectIntHashMap<Concept>(concepts.size());
for (int i = 0; i < this.concepts.size(); i++)
this.conceptIds.put(this.concepts.get(i), i);
this.relations = this.propositions.stream().map(p -> new CPair(p.sourceConcept, p.targetConcept))
.collect(Collectors.toSet());
this.edges = new CPair[this.concepts.size() * this.concepts.size()];
int id = 0;
for (Concept c1 : this.concepts) {
this.edges[id++] = new CPair(null, c1);
for (Concept c2 : this.concepts) {
if (c1 != c2) {
this.edges[id++] = new CPair(c1, c2);
}
}
}
this.createProblem();
double duration = (System.currentTimeMillis() - start) / 1000.0;
Logger.getGlobal().log(Level.INFO, "- done: " + duration);
}
示例12: calculateInformationGain
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
protected static double calculateInformationGain(
ObjectIntHashMap<Double> cIn, ObjectIntHashMap<Double> cOut,
double class_entropy,
double total_c_in,
double total) {
double total_c_out = (total - total_c_in);
return class_entropy
- total_c_in / total * entropy(cIn, total_c_in)
- total_c_out / total * entropy(cOut, total_c_out);
}
示例13: moveElement
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
protected int moveElement(
List<ValueLabel> element,
ObjectIntHashMap<Double> cIn, ObjectIntHashMap<Double> cOut,
int pos) {
cIn.putOrAdd(element.get(pos).label, 1, 1);
cOut.putOrAdd(element.get(pos).label, -1, -1);
return 1;
}
示例14: checkIdCoverage
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
void checkIdCoverage(File file) throws IOException {
CyNetworkView view = session.getNetworkView();
CyNetwork network = view.getModel();
CyTable nodeTable = network.getDefaultNodeTable();
List<String> names = SafeUtil.getStringColumnNames(nodeTable)
.collect(Collectors.toList());
CyNodeTableVisitor visitor = new CyNodeTableVisitor(nodeTable, names);
String path = file.getPath();
AnalyzeAnnotationConsumer consumer = new AnalyzeAnnotationConsumer() {
@Override
public void accept(IdMappingResult result) {
idMappingResult = result;
IntIntMap coverage = result.coverage;
int topIndex = Util.getTopKey(coverage, -1);
columnCoverage = new ObjectIntHashMap<>();
coverage.forEach((Consumer<? super IntIntCursor>) c -> columnCoverage.addTo(names.get(c.key), c.value));
nodeIds.setSelectedIndex(topIndex);
try {
String canonicalPath = file.getCanonicalPath();
lastAnnotationPath = canonicalPath;
notifyListeners(file);
} catch (IOException e) {
fail(e, "Unexpected error");
}
}
};
SimpleTaskFactory taskFactory = new SimpleTaskFactory(() -> new AnalyzeAnnotationTask(path, visitor, consumer));
taskManager.execute(taskFactory.createTaskIterator());
}
示例15: check
import com.carrotsearch.hppc.ObjectIntHashMap; //导入依赖的package包/类
@SuppressWarnings("PMD.DataflowAnomalyAnalysis")
public void check() {
int color = DEFAULT_COLOR;
ObjectIntMap<AbstractStage> colors = new ObjectIntHashMap<AbstractStage>();
ThreadPainter threadPainter = new ThreadPainter();
Traverser traverser = new Traverser(threadPainter);
for (AbstractStage threadableStage : threadableStages) {
color++;
colors.put(threadableStage, color);
threadPainter.reset(colors, color, threadableStages);
traverser.traverse(threadableStage);
}
}