本文整理汇总了Java中org.apache.hadoop.io.DoubleWritable.get方法的典型用法代码示例。如果您正苦于以下问题:Java DoubleWritable.get方法的具体用法?Java DoubleWritable.get怎么用?Java DoubleWritable.get使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.DoubleWritable
的用法示例。
在下文中一共展示了DoubleWritable.get方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: compute
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void compute(Iterable<DoubleWritable> messages) throws IOException {
if (this.getSuperstepCount() == 0) {
this.setValue(new DoubleWritable(1.0 / this.getNumVertices()));
}
else {
double pageRankSum = 0;
for (DoubleWritable message : messages) {
pageRankSum += message.get();
}
double alpha = (1.0 - DAMPING_FACTOR) / this.getNumVertices();
setValue(new DoubleWritable(alpha + (pageRankSum * DAMPING_FACTOR)));
}
long edges = this.getEdges().size();
this.sendMessageToNeighbors(new DoubleWritable(this.getValue().get() / edges));
}
开发者ID:tayllan,项目名称:comparative-study-of-frameworks-for-parallel-processing-of-graphs,代码行数:20,代码来源:PageRank.java
示例2: compute
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void compute(Vertex<LongWritable, DoubleWritable, FloatWritable> vertex, Iterable<DoubleWritable> messages) throws IOException {
if (getSuperstep() == 0) {
vertex.setValue(new DoubleWritable(Integer.MAX_VALUE));
}
double minDist = (vertex.getId().get() == 1) ? 0d : Integer.MAX_VALUE;
for (DoubleWritable message : messages) {
if (message.get() < minDist) {
minDist = message.get();
}
}
if ((int)minDist < (int)vertex.getValue().get()) {
vertex.setValue(new DoubleWritable(minDist));
for (Edge<LongWritable, FloatWritable> edge : vertex.getEdges()) {
double distance = minDist + edge.getValue().get();
this.sendMessage(
edge.getTargetVertexId(),
new DoubleWritable(distance)
);
}
}
vertex.voteToHalt();
}
开发者ID:tayllan,项目名称:comparative-study-of-frameworks-for-parallel-processing-of-graphs,代码行数:26,代码来源:ShortestPath.java
示例3: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
protected void reduce(Text key, Iterable<DoubleWritable> values,
Context context) throws IOException, InterruptedException {
double total = 0;
for (DoubleWritable value : values) {
total += value.get();
}
context.write(key, new DoubleWritable(total));
}
示例4: readFields
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public void readFields(DataInput dataInput) throws IOException {
Text text = new Text();
text.readFields(dataInput);
wifiProb = text.toString();
IntWritable intReader = new IntWritable();
intReader.readFields(dataInput);
inNoOutWifi = intReader.get();
intReader.readFields(dataInput);
inNoOutStore = intReader.get();
intReader.readFields(dataInput);
outNoInWifi = intReader.get();
intReader.readFields(dataInput);
outNoInStore = intReader.get();
intReader.readFields(dataInput);
inAndOutWifi = intReader.get();
intReader.readFields(dataInput);
inAndOutStore = intReader.get();
intReader.readFields(dataInput);
stayInWifi = intReader.get();
intReader.readFields(dataInput);
stayInStore = intReader.get();
DoubleWritable doubleWritable = new DoubleWritable();
doubleWritable.readFields(dataInput);
jumpRate = doubleWritable.get();
doubleWritable.readFields(dataInput);
deepVisit = doubleWritable.get();
doubleWritable.readFields(dataInput);
inStoreRate = doubleWritable.get();
}
示例5: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
throws IOException, InterruptedException {
//key = user:movieA
//value = <subSum, subSub>
int sum = 0;
for (DoubleWritable value: values) {
sum += value.get();
}
context.write(key, new DoubleWritable(sum));
}
示例6: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
throws IOException, InterruptedException {
double sum = 0;
for (DoubleWritable value: values) {
sum += value.get();
}
DecimalFormat df = new DecimalFormat("#.0000");
sum = Double.valueOf(df.format(sum));
context.write(key, new DoubleWritable(sum));
}
示例7: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
protected void reduce(LongWritable key, Iterable<DoubleWritable> amounts, Context context)
throws IOException, InterruptedException {
// keeping only the core logic here.
double totalValue = 0.0;
for(DoubleWritable amount : amounts) {
totalValue += amount.get();
}
context.write(NullWritable.get(), new OrderWritable(key.get(), totalValue));
}
示例8: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void reduce(NullWritable n, Iterable<DoubleWritable> values,
Context context) throws IOException, InterruptedException {
double norm2 = 0;
for (DoubleWritable v : values)
norm2 += v.get();
context.write(n, new DoubleWritable(norm2));
}
示例9: getValue
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public static double getValue(String url) throws IOException{
DoubleWritable value = new DoubleWritable(0);
try {
reader.get(new Text(url), value);
}
finally {
if(value == null) {
return 0;
}
return value.get();
}
}
示例10: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public void reduce (Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
double pageRank = 0;
double dampFactor = 0.85;
for(DoubleWritable value : values) {
pageRank += value.get();
}
pageRank = 1-dampFactor+dampFactor*pageRank;
context.write(key, new DoubleWritable(pageRank));
}
示例11: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public void reduce (Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
double urlPatternDotProduct = 0;
for(DoubleWritable value : values) {
urlPatternDotProduct += value.get();
}
String url = key.toString();
double urlPatternCosDistance = urlPatternDotProduct/(urlModulus.getValue(url));
double pageRank = pageRanks.getValue(url);
double urlWeight = Math.pow(urlPatternCosDistance, pageRank)+pageRank*10000;
context.write(key, new DoubleWritable(urlWeight));
}
示例12: reduce
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public void reduce(Text key, Iterable<DoubleWritable> values,
Context context)
throws IOException, InterruptedException {
double total = 0;
int instances = 0;
for (DoubleWritable stockPrice : values) {
total += stockPrice.get();
instances++;
}
outValue.set(total / (double) instances);
context.write(key, outValue);
}
示例13: run
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
@Override
public void run(Context context) throws IOException,
InterruptedException {
double keySum = 0.0;
double valueSum = 0.0;
while (context.nextKey()) {
keySum += context.getCurrentKey().get();
for (DoubleWritable value : context.getValues()) {
valueSum += value.get();
}
}
outKey.set(keySum);
outValue.set(valueSum);
context.write(outKey, outValue);
}
示例14: readNextKeyValuePairs
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public boolean readNextKeyValuePairs(DoubleWritable readKey, IntWritable readValue)
throws IOException
{
boolean ret = true;
try {
if(contain0s && justFound0)
{
readKey.set(keyAfterZero.get());
readValue.set(valueAfterZero.get());
contain0s=false;
}else
{
readKey.readFields(currentStream);
readValue.readFields(currentStream);
}
} catch(EOFException e) {
// case in which zero is the maximum value in the matrix.
// The zero value from the last entry is not present in the input sorted matrix, but needs to be accounted for.
if (contain0s && !justFound0 ) {
justFound0=true;
readKey.set(0);
readValue.set((int)numZeros);
}
else {
ret = false;
}
}
if(contain0s && !justFound0 && readKey.get()>=0)
{
justFound0=true;
keyAfterZero.set(readKey.get());
valueAfterZero.set(readValue.get());
readKey.set(0);
readValue.set((int)numZeros);
}
return ret;
}
示例15: pickValueWeight
import org.apache.hadoop.io.DoubleWritable; //导入方法依赖的package包/类
public static double[] pickValueWeight(String dir, MetaDataNumItemsByEachReducer metadata, double p, boolean average)
throws IOException
{
long[] counts=metadata.getNumItemsArray();
long[] ranges=new long[counts.length];
ranges[0]=counts[0];
for(int i=1; i<counts.length; i++)
ranges[i]=ranges[i-1]+counts[i];
long total=ranges[ranges.length-1];
// do averaging only if it is asked for; and sum_wt is even
average = average && (total%2 == 0);
int currentPart=0;
double cum_weight = 0;
long pos=(long)Math.ceil(total*p);
while(ranges[currentPart]<pos) {
currentPart++;
cum_weight += ranges[currentPart];
}
int offset;
if(currentPart>0)
offset=(int)(pos-ranges[currentPart-1]-1);
else
offset=(int)pos-1;
Path path=new Path(dir);
FileSystem fs=IOUtilFunctions.getFileSystem(path);
FileStatus[] files=fs.listStatus(path);
Path fileToRead=null;
for(FileStatus file: files)
if(file.getPath().toString().endsWith(Integer.toString(currentPart)))
{
fileToRead=file.getPath();
break;
}
if(fileToRead==null)
throw new RuntimeException("cannot read partition "+currentPart);
int buffsz = 64 * 1024;
DoubleWritable readKey=new DoubleWritable();
IntWritable readValue=new IntWritable();
FSDataInputStream currentStream = null;
double ret = -1;
try {
currentStream = fs.open(fileToRead, buffsz);
boolean contain0s=false;
long numZeros=0;
if(currentPart==metadata.getPartitionOfZero())
{
contain0s=true;
numZeros=metadata.getNumberOfZero();
}
ReadWithZeros reader=new ReadWithZeros(currentStream, contain0s, numZeros);
int numRead=0;
while(numRead<=offset)
{
reader.readNextKeyValuePairs(readKey, readValue);
numRead+=readValue.get();
cum_weight += readValue.get();
}
ret = readKey.get();
if(average) {
if(numRead<=offset+1) {
reader.readNextKeyValuePairs(readKey, readValue);
cum_weight += readValue.get();
ret = (ret+readKey.get())/2;
}
}
}
finally {
IOUtilFunctions.closeSilently(currentStream);
}
return new double[] {ret, (average ? -1 : readValue.get()), (average ? -1 : cum_weight)};
}