本文整理汇总了Java中gnu.trove.TIntIterator.next方法的典型用法代码示例。如果您正苦于以下问题:Java TIntIterator.next方法的具体用法?Java TIntIterator.next怎么用?Java TIntIterator.next使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类gnu.trove.TIntIterator
的用法示例。
在下文中一共展示了TIntIterator.next方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: collapse
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
public void collapse(LinearBekGraph graph) {
for (int upNodeIndex : myTailEdges.keys()) {
for (int downNodeIndex : myTailEdges.get(upNodeIndex)) {
removeEdge(graph, upNodeIndex, downNodeIndex);
}
}
TIntIterator it = myTails.iterator();
while (it.hasNext()) {
int tail = it.next();
if (!LinearGraphUtils.getDownNodes(graph, tail).contains(myLeftChild)) {
addEdge(graph, tail, myLeftChild);
}
else {
replaceEdge(graph, tail, myLeftChild);
}
}
removeEdge(graph, myParent, myLeftChild);
}
示例2: constructEdges
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
@Override
protected UndirectedWeightedListGraph constructEdges() {
int n = this.metricSpace.size();
TIntHashSet neighborhood = null;
UndirectedWeightedListGraph graph = new UndirectedWeightedListGraph(n);
for (int i = 0; i < n; i++) {
// obtain the neighborhood of the i-th point
neighborhood = this.metricSpace.getClosedNeighborhood(metricSpace.getPoint(i), this.maxDistance);
// get the pairwise distances of the points and store them
TIntIterator iterator = neighborhood.iterator();
while (iterator.hasNext()) {
int j = iterator.next();
if (i == j) {
continue;
}
double distance = this.metricSpace.distance(i, j);
graph.addEdge(i, j, distance);
}
}
return graph;
}
示例3: computeIntersection
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
/**
* This function computes the intersection between two sets of
* integers.
*
* @param set1 the first set
* @param set2 the second set
* @return a TIntSet containing elements common to both input sets
*/
public static TIntHashSet computeIntersection(TIntHashSet set1, TIntHashSet set2) {
TIntHashSet smallerSet = null;
TIntHashSet largerSet = null;
/*
* Let's identify the smaller and larger sets,
* so that we only need to iterate through the smaller one.
*/
if (set1.size() < set2.size()) {
smallerSet = set1;
largerSet = set2;
} else {
smallerSet = set2;
largerSet = set1;
}
TIntHashSet intersection = new TIntHashSet();
TIntIterator iterator = smallerSet.iterator();
while (iterator.hasNext()) {
int element = iterator.next();
if (largerSet.contains(element)) {
intersection.add(element);
}
}
return intersection;
}
示例4: pullUpIndices
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
protected static TIntHashSet pullUpIndices(TIntHashSet subIndices, TIntHashSet indices) {
TIntHashSet result = new TIntHashSet();
TIntIterator iterator = subIndices.iterator();
int[] indicesArray = indices.toArray();
while (iterator.hasNext()) {
int subIndex = iterator.next();
result.add(indicesArray[subIndex]);
}
return result;
}
示例5: highestCommonPhaseNumber
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
private int highestCommonPhaseNumber(TIntHashSet phaseSet) {
int hcpn = -1;
// Trove iterators cannot use for() constructs
TIntIterator i = phaseSet.iterator();
while (i.hasNext()) {
int phase = i.next();
hcpn = phase > hcpn ? phase : hcpn;
}
return hcpn;
}
示例6: DataEntitySearchState
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
public DataEntitySearchState() {
GraphicalEventAppearance render = renderEvents.last();
// Always skip the graphics context
for(int arg = 1; arg < render.event.getNumberOfArgumentProducers(); arg++) {
String name = render.event.getArgumentName(arg);
Invoke invoke = (Invoke) trace.getInstruction(render.event.getEventID());
QualifiedClassName argumentType = invoke.getMethodInvoked().getParsedDescriptor().getTypeOfArgumentNumber(arg - 1);
if(argumentType == QualifiedClassName.INT)
continue;
Value value = trace.getOperandStackValue(render.event.getEventID(), arg);
if(value.hasEventID()) {
dependencies.add(value.getEventID());
while(dependencies.size() > 0) {
// Go through each dependency waiting to be analyzed and find its root dependencies.
TIntIterator iterator = dependencies.iterator();
while(iterator.hasNext()) {
int eventID = iterator.next();
Instruction inst = trace.getInstruction(eventID);
visited.add(eventID);
boolean isFieldUse = inst instanceof GETFIELD;
QualifiedClassName fieldClassname = isFieldUse ? ((GETFIELD)inst).getFieldref().getClassname() : null;
// HAAAAAAAAAAAACK!
boolean isProjectClass = fieldClassname != null && !fieldClassname.getText().startsWith("java") && !fieldClassname.getText().startsWith("sun");
// Add operand stack dependencies if this isn't a field reference, or if it is, only if the field is part of the program (and not the SDK)
if(!isFieldUse || isProjectClass) {
for(Value vp : trace.getOperandStackDependencies(eventID))
if(vp != null && vp.getEventID() >= 0)
handleDependency(vp.getEventID());
}
int heapDependencyID = trace.getHeapDependency(eventID);
if(heapDependencyID >= 0 && !visited.contains(heapDependencyID))
handleDependency(heapDependencyID);
IntegerVector objectDependencies = trace.getUnrecordedInvocationDependencyIDs(eventID);
if(objectDependencies != null) {
for(int i = 0; i < objectDependencies.size(); i++)
handleDependency(objectDependencies.get(i));
}
}
// Now that we've gone through these, make the new ones the next to iterate through and clear the new dependency set.
TIntHashSet temp = newDependencies;
dependencies.clear();
newDependencies = dependencies;
dependencies = temp;
}
}
}
}
示例7: dirichletMultinomialLikelihoodRatio
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
/** What is the probability that these two observations were drawn from
* the same multinomial with symmetric Dirichlet prior alpha, relative
* to the probability that they were drawn from different multinomials
* both drawn from this Dirichlet?
*/
public static double dirichletMultinomialLikelihoodRatio(TIntIntHashMap countsX,
TIntIntHashMap countsY,
double alpha, double alphaSum) {
// The likelihood for one DCM is
// Gamma( alpha_sum ) prod Gamma( alpha + N_i )
// prod Gamma ( alpha ) Gamma ( alpha_sum + N )
// When we divide this by the product of two other DCMs with the same
// alpha parameter, the first term in the numerator cancels with the
// first term in the denominator. Then moving the remaining alpha-only
// term to the numerator, we get
// prod Gamma(alpha) prod Gamma( alpha + X_i + Y_i )
// Gamma (alpha_sum) Gamma( alpha_sum + X_sum + Y_sum )
// ----------------------------------------------------------
// prod Gamma(alpha + X_i) prod Gamma(alpha + Y_i)
// Gamma( alpha_sum + X_sum ) Gamma( alpha_sum + Y_sum )
double logLikelihood = 0.0;
double logGammaAlpha = logGamma(alpha);
int totalX = 0;
int totalY = 0;
int key, x, y;
TIntHashSet distinctKeys = new TIntHashSet();
distinctKeys.addAll(countsX.keys());
distinctKeys.addAll(countsY.keys());
TIntIterator iterator = distinctKeys.iterator();
while (iterator.hasNext()) {
key = iterator.next();
x = 0;
if (countsX.containsKey(key)) {
x = countsX.get(key);
}
y = 0;
if (countsY.containsKey(key)) {
y = countsY.get(key);
}
totalX += x;
totalY += y;
logLikelihood += logGamma(alpha) + logGamma(alpha + x + y)
- logGamma(alpha + x) - logGamma(alpha + y);
}
logLikelihood += logGamma(alphaSum + totalX) + logGamma(alphaSum + totalY)
- logGamma(alphaSum) - logGamma(alphaSum + totalX + totalY);
return logLikelihood;
}
示例8: findParametersToRemove
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
@NotNull
public static TIntArrayList findParametersToRemove(@NotNull PsiMethod method,
@NotNull final PsiExpression expr,
@Nullable final PsiExpression[] occurences) {
final PsiParameter[] parameters = method.getParameterList().getParameters();
if (parameters.length == 0) return new TIntArrayList();
PsiMethod[] overridingMethods = OverridingMethodsSearch.search(method, true).toArray(PsiMethod.EMPTY_ARRAY);
final PsiMethod[] allMethods = ArrayUtil.append(overridingMethods, method);
final TIntHashSet suspects = new TIntHashSet();
expr.accept(new JavaRecursiveElementWalkingVisitor() {
@Override public void visitReferenceExpression(final PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
PsiElement resolved = expression.resolve();
if (resolved instanceof PsiParameter) {
int i = ArrayUtil.find(parameters, resolved);
if (i != -1) {
suspects.add(i);
}
}
}
});
final TIntIterator iterator = suspects.iterator();
while(iterator.hasNext()) {
final int paramNum = iterator.next();
for (PsiMethod psiMethod : allMethods) {
PsiParameter[] psiParameters = psiMethod.getParameterList().getParameters();
if (paramNum >= psiParameters.length) continue;
PsiParameter parameter = psiParameters[paramNum];
if (!ReferencesSearch.search(parameter, parameter.getResolveScope(), false).forEach(new Processor<PsiReference>() {
public boolean process(final PsiReference reference) {
PsiElement element = reference.getElement();
boolean stillCanBeRemoved = false;
if (element != null) {
stillCanBeRemoved = isAncestor(expr, element, false) || PsiUtil.isInsideJavadocComment(getPhysical(element));
if (!stillCanBeRemoved && occurences != null) {
for (PsiExpression occurence : occurences) {
if (isAncestor(occurence, element, false)) {
stillCanBeRemoved = true;
break;
}
}
}
}
if (!stillCanBeRemoved) {
iterator.remove();
return false;
}
return true;
}
})) break;
}
}
return new TIntArrayList(suspects.toArray());
}
示例9: addCofaces
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
/**
* This function inductively adds all of the cofaces of the simplex tau to
* the complex. For more information about this algorithm, consult the paper
* "Fast Construction of the Vietoris-Rips Complex" by Afra Zomorodian.
*
* @param G
* the neighborhood graph
* @param k
* the maximum allowable dimension
* @param tau
* the current simplex to add
* @param N
* the lower neighbors to investigate
* @param filtrationValue
* the filtration value of the current simplex, tau
*/
protected void addCofaces(UndirectedWeightedListGraph G, int k, Simplex tau, TIntHashSet N, double filtrationValue) {
Simplex newSimplex = null;
if (this.indices != null) {
newSimplex = HomologyUtility.convertIndices(tau, this.indices);
} else {
newSimplex = tau;
}
// add the current simplex to the complex
this.storageStructure.addElement(newSimplex, this.converter.getFiltrationIndex(filtrationValue));
// exit if the dimension is the maximum allowed
if (tau.getDimension() >= k) {
return;
}
double weight = 0;
TIntIterator iterator = N.iterator();
TIntHashSet M;
// iterate through the lower neighborhood
while (iterator.hasNext()) {
int v = iterator.next();
// create a new simplex by appending
// ie. sigma = tau U {v}
Simplex sigma = new Simplex(HomologyUtility.appendToArray(tau.getVertices(), v));
// compute the intersection between N and the lower neighbors of v
M = HomologyUtility.computeIntersection(N, G.getLowerNeighbors(v));
// compute the weight of the simplex sigma
// the weight is defined to be the maximum weight of all of the
// simplex's
// faces
if (sigma.getDimension() == 1) {
int i = sigma.getVertices()[0];
int j = sigma.getVertices()[1];
weight = G.getWeight(i, j);
} else if (sigma.getDimension() > 1) {
weight = filtrationValue;
int[] tauVertices = tau.getVertices();
for (int tauVertex : tauVertices) {
weight = this.converter.computeInducedFiltrationValue(weight, G.getWeight(tauVertex, v));
}
}
// recurse: add the cofaces of sigma
this.addCofaces(G, k, sigma, M, weight);
}
}
示例10: dirichletMultinomialLikelihoodRatio
import gnu.trove.TIntIterator; //导入方法依赖的package包/类
/**
* What is the probability that these two observations were drawn from the
* same multinomial with symmetric Dirichlet prior alpha, relative to the
* probability that they were drawn from different multinomials both drawn
* from this Dirichlet?
*/
public static double dirichletMultinomialLikelihoodRatio(TIntIntHashMap countsX,
TIntIntHashMap countsY,
double alpha, double alphaSum) {
// The likelihood for one DCM is
// Gamma( alpha_sum ) prod Gamma( alpha + N_i )
// prod Gamma ( alpha ) Gamma ( alpha_sum + N )
// When we divide this by the product of two other DCMs with the same
// alpha parameter, the first term in the numerator cancels with the
// first term in the denominator. Then moving the remaining alpha-only
// term to the numerator, we get
// prod Gamma(alpha) prod Gamma( alpha + X_i + Y_i )
// Gamma (alpha_sum) Gamma( alpha_sum + X_sum + Y_sum )
// ----------------------------------------------------------
// prod Gamma(alpha + X_i) prod Gamma(alpha + Y_i)
// Gamma( alpha_sum + X_sum ) Gamma( alpha_sum + Y_sum )
double logLikelihood = 0.0;
double logGammaAlpha = logGamma(alpha);
int totalX = 0;
int totalY = 0;
int key, x, y;
TIntHashSet distinctKeys = new TIntHashSet();
distinctKeys.addAll(countsX.keys());
distinctKeys.addAll(countsY.keys());
TIntIterator iterator = distinctKeys.iterator();
while (iterator.hasNext()) {
key = iterator.next();
x = 0;
if (countsX.containsKey(key)) {
x = countsX.get(key);
}
y = 0;
if (countsY.containsKey(key)) {
y = countsY.get(key);
}
totalX += x;
totalY += y;
logLikelihood += logGamma(alpha) + logGamma(alpha + x + y)
- logGamma(alpha + x) - logGamma(alpha + y);
}
logLikelihood += logGamma(alphaSum + totalX) + logGamma(alphaSum + totalY)
- logGamma(alphaSum) - logGamma(alphaSum + totalX + totalY);
return logLikelihood;
}