本文整理汇总了Java中org.apache.commons.lang.mutable.MutableDouble.setValue方法的典型用法代码示例。如果您正苦于以下问题:Java MutableDouble.setValue方法的具体用法?Java MutableDouble.setValue怎么用?Java MutableDouble.setValue使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.commons.lang.mutable.MutableDouble
的用法示例。
在下文中一共展示了MutableDouble.setValue方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: accumulateStoreMetric
import org.apache.commons.lang.mutable.MutableDouble; //导入方法依赖的package包/类
/**
* Used to accumulate store metrics across multiple regions in a region
* server. These metrics are not "persistent", i.e. we keep overriding them
* on every update instead of incrementing, so we need to accumulate them in
* a temporary map before pushing them to the global metric collection.
* @param tmpMap a temporary map for accumulating store metrics
* @param storeMetricType the store metric type to increment
* @param val the value to add to the metric
*/
public void accumulateStoreMetric(final Map<String, MutableDouble> tmpMap,
StoreMetricType storeMetricType, double val) {
final String key = getStoreMetricName(storeMetricType);
if (tmpMap.get(key) == null) {
tmpMap.put(key, new MutableDouble(val));
} else {
tmpMap.get(key).add(val);
}
if (this == ALL_SCHEMA_METRICS) {
// also compute the max value across all Stores on this server
final String maxKey = getStoreMetricNameMax(storeMetricType);
MutableDouble cur = tmpMap.get(maxKey);
if (cur == null) {
tmpMap.put(maxKey, new MutableDouble(val));
} else if (cur.doubleValue() < val) {
cur.setValue(val);
}
} else {
ALL_SCHEMA_METRICS.accumulateStoreMetric(tmpMap, storeMetricType, val);
}
}
示例2: process
import org.apache.commons.lang.mutable.MutableDouble; //导入方法依赖的package包/类
@Override
public void process(KeyValPair<MerchantKey, Double> tuple)
{
MutableDouble currentSma = currentSMAMap.get(tuple.getKey());
if (currentSma == null) { // first sma for the given key
double sma = tuple.getValue();
currentSMAMap.put(tuple.getKey(), new MutableDouble(sma));
//lastSMAMap.put(tuple.getKey(), new MutableDouble(sma));
} else { // move the current SMA value to the last SMA Map
//lastSMAMap.get(tuple.getKey()).setValue(currentSma.getValue());
currentSma.setValue(tuple.getValue()); // update the current SMA value
}
}
示例3: simpleHighPeaks
import org.apache.commons.lang.mutable.MutableDouble; //导入方法依赖的package包/类
private Map<Integer, Double> simpleHighPeaks(Double[] periodData, Double[] periodSmoothedCeiling, MutableDouble amountBelowSmoothingCeiling) {
boolean noSmoothedConstraint = periodSmoothedCeiling.length == 0;
Double max = -1d;
SortedMap<Integer, Double> highPeaks = new TreeMap<Integer, Double>();
highPeaks.put(0, 0d);
for (int i = 1 ; i < periodData.length -1; i++) {
if (noSmoothedConstraint || periodSmoothedCeiling[i] > periodData[i]) {
amountBelowSmoothingCeiling.increment();
}
boolean isIdxAbovePrev = periodData[i-1] < periodData[i];
boolean isIdxAboveSucc = periodData[i+1] < periodData[i];
boolean isIdxAboveSmoothed = noSmoothedConstraint || periodSmoothedCeiling[i] <= periodData[i];
if (isIdxAbovePrev && isIdxAboveSucc && isIdxAboveSmoothed) {
highPeaks.put(i,periodData[i]);
} else {
highPeaks.put(i,0d);
}
if (periodData[i] > max) max = periodData[i];
}
if (periodData[periodData.length-1] >= max && (noSmoothedConstraint || periodSmoothedCeiling[periodData.length-1] <= periodData[periodData.length-1])) {
highPeaks.put(periodData.length-1, periodData[periodData.length-1]);
} else {
highPeaks.put(periodData.length-1, 0d);
}
amountBelowSmoothingCeiling.setValue(amountBelowSmoothingCeiling.doubleValue()/periodData.length);
return highPeaks;
}
示例4: simpleLowTroughs
import org.apache.commons.lang.mutable.MutableDouble; //导入方法依赖的package包/类
private Map<Integer, Double> simpleLowTroughs(Double[] pData, Double[] periodSmoothedFloor, MutableDouble amountAboveSmoothingFloor) {
boolean noSmoothedConstraint = periodSmoothedFloor.length == 0;
Double min = Double.MAX_VALUE;
SortedMap<Integer, Double> lowTroughs = new TreeMap<Integer, Double>();
lowTroughs.put(0,0d);
for (int i = 1 ; i < pData.length-1; i++) {
if (noSmoothedConstraint || periodSmoothedFloor[i] < pData[i]) {
amountAboveSmoothingFloor.increment();
}
boolean isIdxBelowPrev = pData[i-1] > pData[i];
boolean isIdxBelowSucc = pData[i+1] > pData[i];
boolean isIdxBelowSmothed = noSmoothedConstraint || periodSmoothedFloor[i] >= pData[i];
if ( isIdxBelowPrev && isIdxBelowSucc && isIdxBelowSmothed) {
lowTroughs.put(i,pData[i]);
} else {
lowTroughs.put(i,0d);
}
if (pData[i] < min) min = pData[i];
}
if (pData[pData.length-1] < min && (noSmoothedConstraint || periodSmoothedFloor[pData.length-1] >= pData[pData.length-1])) {
lowTroughs.put(pData.length-1,pData[pData.length-1]);
} else {
lowTroughs.put(pData.length-1,0d);
}
amountAboveSmoothingFloor.setValue(amountAboveSmoothingFloor.doubleValue()/pData.length);
return lowTroughs;
}
示例5: doWork
import org.apache.commons.lang.mutable.MutableDouble; //导入方法依赖的package包/类
@Override
public Object doWork() {
final List<PileupSummary> sites = filterSites(PileupSummary.readFromFile(inputPileupSummariesTable));
// used the matched normal to genotype (i.e. find hom alt sites) if available
final List<PileupSummary> genotypingSites = matchedPileupSummariesTable == null ? sites :
filterSites(PileupSummary.readFromFile(matchedPileupSummariesTable));
// we partition the genome into contiguous allelic copy-number segments in order to infer the local minor
// allele fraction at each site. This is important because a minor allele fraction close to 1/2 (neutral)
// allows hets and hom alts to be distinguished easily, while a low minor allele fraction makes it harder
// to discriminate. It is crucial to know which site are true hom alts and which sites are hets with
// loss of heterozygosity. We do this for the genotyping sample because that is the sample from which
// the hom alts are deduced.
final List<List<PileupSummary>> genotypingSegments = findSegments(genotypingSites);
List<PileupSummary> homAltGenotypingSites = new ArrayList<>();
final MutableDouble genotypingContamination = new MutableDouble(INITIAL_CONTAMINATION_GUESS);
for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++) {
List<List<PileupSummary>> homAltSitesBySegment = Arrays.asList(new ArrayList<>());
final MutableDouble minorAlleleFractionThreshold = new MutableDouble(STRICT_LOH_MAF_THRESHOLD);
while (homAltSitesBySegment.stream().mapToInt(List::size).sum() < DESIRED_MINIMUM_HOM_ALT_COUNT && minorAlleleFractionThreshold.doubleValue() > 0) {
homAltSitesBySegment = genotypingSegments.stream()
.map(segment -> segmentHomAlts(segment, genotypingContamination.doubleValue(), minorAlleleFractionThreshold.doubleValue()))
.collect(Collectors.toList());
minorAlleleFractionThreshold.subtract(MINOR_ALLELE_FRACTION_STEP_SIZE);
}
homAltGenotypingSites = homAltSitesBySegment.stream().flatMap(List::stream).collect(Collectors.toList());
final double newGenotypingContamination = calculateContamination(homAltGenotypingSites, errorRate(genotypingSites)).getLeft();
if (Math.abs(newGenotypingContamination - genotypingContamination.doubleValue()) < CONTAMINATION_CONVERGENCE_THRESHOLD) {
break;
}
genotypingContamination.setValue(newGenotypingContamination);
}
final List<PileupSummary> homAltSites = subsetSites(sites, homAltGenotypingSites);
final Pair<Double, Double> contaminationAndError = calculateContamination(homAltSites, errorRate(sites));
final double contamination = contaminationAndError.getLeft();
final double error = contaminationAndError.getRight();
ContaminationRecord.writeToFile(Arrays.asList(new ContaminationRecord(ContaminationRecord.Level.WHOLE_BAM.toString(), contamination, error)), outputTable);
return "SUCCESS";
}