當前位置: 首頁>>代碼示例>>Java>>正文


Java DoubleWritable.get方法代碼示例

本文整理匯總了Java中org.apache.hadoop.io.DoubleWritable.get方法的典型用法代碼示例。如果您正苦於以下問題:Java DoubleWritable.get方法的具體用法?Java DoubleWritable.get怎麽用?Java DoubleWritable.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.io.DoubleWritable的用法示例。


在下文中一共展示了DoubleWritable.get方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: compute

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public void compute(Iterable<DoubleWritable> messages) throws IOException {
	if (this.getSuperstepCount() == 0) {
		this.setValue(new DoubleWritable(1.0 / this.getNumVertices()));
	}
	else {
		double pageRankSum = 0;

		for (DoubleWritable message : messages) {
			pageRankSum += message.get();
		}

		double alpha = (1.0 - DAMPING_FACTOR) / this.getNumVertices();
		setValue(new DoubleWritable(alpha + (pageRankSum * DAMPING_FACTOR)));
	}

	long edges = this.getEdges().size();
	this.sendMessageToNeighbors(new DoubleWritable(this.getValue().get() / edges));
}
 
開發者ID:tayllan,項目名稱:comparative-study-of-frameworks-for-parallel-processing-of-graphs,代碼行數:20,代碼來源:PageRank.java

示例2: compute

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public void compute(Vertex<LongWritable, DoubleWritable, FloatWritable> vertex, Iterable<DoubleWritable> messages) throws IOException {
	if (getSuperstep() == 0) {
		vertex.setValue(new DoubleWritable(Integer.MAX_VALUE));
	}
	double minDist = (vertex.getId().get() == 1) ? 0d : Integer.MAX_VALUE;
	for (DoubleWritable message : messages) {
		if (message.get() < minDist) {
			minDist = message.get();
		}
	}
	if ((int)minDist < (int)vertex.getValue().get()) {
		vertex.setValue(new DoubleWritable(minDist));

		for (Edge<LongWritable, FloatWritable> edge : vertex.getEdges()) {
			double distance = minDist + edge.getValue().get();

			this.sendMessage(
				edge.getTargetVertexId(),
				new DoubleWritable(distance)
			);
		}
	}
	vertex.voteToHalt();
}
 
開發者ID:tayllan,項目名稱:comparative-study-of-frameworks-for-parallel-processing-of-graphs,代碼行數:26,代碼來源:ShortestPath.java

示例3: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
protected void reduce(Text key, Iterable<DoubleWritable> values,
		Context context) throws IOException, InterruptedException {
	double total = 0;
	for (DoubleWritable value : values) {
		total += value.get();
	}
	context.write(key, new DoubleWritable(total));
}
 
開發者ID:amritbhat786,項目名稱:DocIT,代碼行數:9,代碼來源:Total.java

示例4: readFields

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public void readFields(DataInput dataInput) throws IOException {
    Text text = new Text();
    text.readFields(dataInput);
    wifiProb = text.toString();

    IntWritable intReader = new IntWritable();

    intReader.readFields(dataInput);
    inNoOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inNoOutStore = intReader.get();

    intReader.readFields(dataInput);
    outNoInWifi = intReader.get();
    intReader.readFields(dataInput);
    outNoInStore = intReader.get();


    intReader.readFields(dataInput);
    inAndOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inAndOutStore = intReader.get();

    intReader.readFields(dataInput);
    stayInWifi = intReader.get();
    intReader.readFields(dataInput);
    stayInStore = intReader.get();


    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.readFields(dataInput);
    jumpRate = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    deepVisit = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    inStoreRate = doubleWritable.get();

}
 
開發者ID:cuiods,項目名稱:WIFIProbe,代碼行數:39,代碼來源:CustomerFlowElement.java

示例5: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {
    //key = user:movieA
    //value = <subSum, subSub>
    int sum = 0;
    for (DoubleWritable value: values) {
        sum += value.get();
    }

    context.write(key, new DoubleWritable(sum));
}
 
開發者ID:yogykwan,項目名稱:mapreduce-samples,代碼行數:13,代碼來源:Sum.java

示例6: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public void reduce(Text key, Iterable<DoubleWritable> values, Context context)
        throws IOException, InterruptedException {

    double sum = 0;
    for (DoubleWritable value: values) {
        sum += value.get();
    }
    DecimalFormat df = new DecimalFormat("#.0000");
    sum = Double.valueOf(df.format(sum));
    context.write(key, new DoubleWritable(sum));
}
 
開發者ID:yogykwan,項目名稱:mapreduce-samples,代碼行數:13,代碼來源:UnitSum.java

示例7: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
protected void reduce(LongWritable key, Iterable<DoubleWritable> amounts, Context context)
        throws IOException, InterruptedException {
    // keeping only the core logic here.
    double totalValue = 0.0;
    for(DoubleWritable amount : amounts) {
        totalValue += amount.get();
    }
    context.write(NullWritable.get(), new OrderWritable(key.get(), totalValue));
}
 
開發者ID:mravi,項目名稱:pro-phoenix,代碼行數:11,代碼來源:OrderStatsApp.java

示例8: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public void reduce(NullWritable n, Iterable<DoubleWritable> values,
    Context context) throws IOException, InterruptedException {
  double norm2 = 0;
  for (DoubleWritable v : values)
    norm2 += v.get();
  context.write(n, new DoubleWritable(norm2));
}
 
開發者ID:SiddharthMalhotra,項目名稱:sPCA,代碼行數:9,代碼來源:Norm2Job.java

示例9: getValue

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public static double getValue(String url) throws IOException{
  
  DoubleWritable value = new DoubleWritable(0);

  try {
    reader.get(new Text(url), value);
  }   
  finally {
    if(value == null) {
      return 0;
    }
    return value.get();
  }
}
 
開發者ID:ifuding,項目名稱:search-1047,代碼行數:15,代碼來源:MapFileRead.java

示例10: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public void reduce (Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
  double pageRank = 0;
  double dampFactor = 0.85;
  for(DoubleWritable value : values) {
    pageRank += value.get();
  }
  pageRank = 1-dampFactor+dampFactor*pageRank;
  context.write(key, new DoubleWritable(pageRank));
}
 
開發者ID:ifuding,項目名稱:search-1047,代碼行數:10,代碼來源:PageRank.java

示例11: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public void reduce (Text key, Iterable<DoubleWritable> values, Context context) throws IOException, InterruptedException {
double urlPatternDotProduct = 0;
for(DoubleWritable value : values) {
  urlPatternDotProduct += value.get();
}
String url = key.toString();
double urlPatternCosDistance = urlPatternDotProduct/(urlModulus.getValue(url));
double pageRank = pageRanks.getValue(url);
		
double urlWeight = Math.pow(urlPatternCosDistance, pageRank)+pageRank*10000;
		
context.write(key, new DoubleWritable(urlWeight));
    }
 
開發者ID:ifuding,項目名稱:search-1047,代碼行數:14,代碼來源:TermsSearch.java

示例12: reduce

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<DoubleWritable> values,
                   Context context)
    throws IOException, InterruptedException {

  double total = 0;
  int instances = 0;
  for (DoubleWritable stockPrice : values) {
    total += stockPrice.get();
    instances++;
  }
  outValue.set(total / (double) instances);
  context.write(key, outValue);
}
 
開發者ID:Hanmourang,項目名稱:hiped2,代碼行數:14,代碼來源:SimpleMovingAverage.java

示例13: run

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
@Override
public void run(Context context) throws IOException,
    InterruptedException {
  double keySum = 0.0;
  double valueSum = 0.0;
  while (context.nextKey()) {
    keySum += context.getCurrentKey().get();
    for (DoubleWritable value : context.getValues()) {
      valueSum += value.get();
    }
  }
  outKey.set(keySum);
  outValue.set(valueSum);
  context.write(outKey, outValue);
}
 
開發者ID:saradelrio,項目名稱:Chi-FRBCS-BigDataCS,代碼行數:16,代碼來源:CVB0Driver.java

示例14: readNextKeyValuePairs

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public boolean readNextKeyValuePairs(DoubleWritable readKey, IntWritable readValue)
	throws IOException 
{
	boolean ret = true;
	
	try {
		if(contain0s && justFound0)
		{
			readKey.set(keyAfterZero.get());
			readValue.set(valueAfterZero.get());
			contain0s=false;
		}else
		{
			readKey.readFields(currentStream);
			readValue.readFields(currentStream);
		}
	} catch(EOFException e) {
		// case in which zero is the maximum value in the matrix. 
		// The zero value from the last entry is not present in the input sorted matrix, but needs to be accounted for.
		if (contain0s && !justFound0 ) {
			justFound0=true;
			readKey.set(0);
			readValue.set((int)numZeros);
		}
		else {
			ret = false;
		}
	}
	
	if(contain0s && !justFound0 && readKey.get()>=0)
	{
		justFound0=true;
		keyAfterZero.set(readKey.get());
		valueAfterZero.set(readValue.get());
		readKey.set(0);
		readValue.set((int)numZeros);
	}
	
	return ret;
}
 
開發者ID:apache,項目名稱:systemml,代碼行數:41,代碼來源:ReadWithZeros.java

示例15: pickValueWeight

import org.apache.hadoop.io.DoubleWritable; //導入方法依賴的package包/類
public static double[] pickValueWeight(String dir, MetaDataNumItemsByEachReducer metadata, double p, boolean average) 
throws IOException
{
	long[] counts=metadata.getNumItemsArray();
	long[] ranges=new long[counts.length];
	ranges[0]=counts[0];
	for(int i=1; i<counts.length; i++)
		ranges[i]=ranges[i-1]+counts[i];
	
	long total=ranges[ranges.length-1];
	
	// do averaging only if it is asked for; and sum_wt is even
	average = average && (total%2 == 0);

	int currentPart=0;
	double cum_weight = 0;
	long pos=(long)Math.ceil(total*p);
	while(ranges[currentPart]<pos) {
		currentPart++;
		cum_weight += ranges[currentPart];
	}
	int offset;
	if(currentPart>0)
		offset=(int)(pos-ranges[currentPart-1]-1);
	else
		offset=(int)pos-1;
	
	Path path=new Path(dir);
	FileSystem fs=IOUtilFunctions.getFileSystem(path);
	FileStatus[] files=fs.listStatus(path);
	Path fileToRead=null;
	for(FileStatus file: files)
		if(file.getPath().toString().endsWith(Integer.toString(currentPart)))
		{
			fileToRead=file.getPath();
			break;
		}
	
	if(fileToRead==null)
		throw new RuntimeException("cannot read partition "+currentPart);
	
	int buffsz = 64 * 1024;
	DoubleWritable readKey=new DoubleWritable();
    IntWritable readValue=new IntWritable();
    FSDataInputStream currentStream = null;
	double ret = -1;
    try {
		currentStream = fs.open(fileToRead, buffsz);
	    
		boolean contain0s=false;
		long numZeros=0;
		if(currentPart==metadata.getPartitionOfZero())
		{
			contain0s=true;
			numZeros=metadata.getNumberOfZero();
		}
	    ReadWithZeros reader=new ReadWithZeros(currentStream, contain0s, numZeros);

	    int numRead=0;
	    while(numRead<=offset)
		{
	    	reader.readNextKeyValuePairs(readKey, readValue);
			numRead+=readValue.get();
			cum_weight += readValue.get();
		}
	    
	    ret = readKey.get();
	    if(average) {
	    	if(numRead<=offset+1) {
	    		reader.readNextKeyValuePairs(readKey, readValue);
				cum_weight += readValue.get();
				ret = (ret+readKey.get())/2;
	    	}
	    }
	}
	finally {
		IOUtilFunctions.closeSilently(currentStream);
	}
    return new double[] {ret, (average ? -1 : readValue.get()), (average ? -1 : cum_weight)};
}
 
開發者ID:apache,項目名稱:systemml,代碼行數:81,代碼來源:MapReduceTool.java


注:本文中的org.apache.hadoop.io.DoubleWritable.get方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。