當前位置: 首頁>>代碼示例>>Java>>正文


Java IntWritable.get方法代碼示例

本文整理匯總了Java中org.apache.hadoop.io.IntWritable.get方法的典型用法代碼示例。如果您正苦於以下問題:Java IntWritable.get方法的具體用法?Java IntWritable.get怎麽用?Java IntWritable.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在org.apache.hadoop.io.IntWritable的用法示例。


在下文中一共展示了IntWritable.get方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterable<IntWritable> values, 
    Context context) throws IOException, InterruptedException {

  int errors = 0;

  MarkableIterator<IntWritable> mitr = 
    new MarkableIterator<IntWritable>(values.iterator());

  switch (key.get()) {
  case 0:
    errors += test0(key, mitr);
    break;
  case 1:
    errors += test1(key, mitr);
    break;
  case 2:
    errors += test2(key, mitr);
    break;
  case 3:
    errors += test3(key, mitr);
    break;
  default:
    break;
  }
  context.write(key, new IntWritable(errors));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:27,代碼來源:TestValueIterReset.java

示例2: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<IntWritable> value, Context context)
           throws IOException, InterruptedException {
      int sum = 0;
      for (IntWritable val : value) {
         if (val.get()== 0){
             sum=0;
             break;
         }
         else if (val.get()== 1){
          sum += 1;
         }
   }
      if (sum!=0){
      context.write(key, new IntWritable(sum));
      }
}
 
開發者ID:dhruvmalik007,項目名稱:Deep_learning_using_Java,代碼行數:17,代碼來源:Recommendation_program.java

示例3: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(BytesWritable key, Iterator<IntWritable> values,
                   OutputCollector<BytesWritable, IntWritable> output,
                   Reporter reporter) throws IOException {
  int ones = 0;
  int twos = 0;
  while (values.hasNext()) {
    IntWritable count = values.next(); 
    if (count.equals(sortInput)) {
      ++ones;
    } else if (count.equals(sortOutput)) {
      ++twos;
    } else {
      throw new IOException("Invalid 'value' of " + count.get() + 
                            " for (key,value): " + key.toString());
    }
  }
  
  // Check to ensure there are equal no. of ones and twos
  if (ones != twos) {
    throw new IOException("Illegal ('one', 'two'): (" + ones + ", " + twos +
                          ") for (key, value): " + key.toString());
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:24,代碼來源:SortValidator.java

示例4: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterator<Text> values,
    OutputCollector<Text, Text> out,
    Reporter reporter) throws IOException {
  keyVal = key.get();
  while(values.hasNext()) {
    Text value = values.next();
    String towrite = value.toString() + "\n";
    indexStream.write(towrite.getBytes(Charsets.UTF_8));
    written++;
    if (written > numIndexes -1) {
      // every 1000 indexes we report status
      reporter.setStatus("Creating index for archives");
      reporter.progress();
      endIndex = keyVal;
      String masterWrite = startIndex + " " + endIndex + " " + startPos 
                          +  " " + indexStream.getPos() + " \n" ;
      outStream.write(masterWrite.getBytes(Charsets.UTF_8));
      startPos = indexStream.getPos();
      startIndex = endIndex;
      written = 0;
    }
  }
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:24,代碼來源:HadoopArchives.java

示例5: map

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void map(IntWritable key, IntWritable val, Context context)
    throws IOException, InterruptedException {
  int k = key.get();
  final int vali = val.get();
  final String kvstr = "Unexpected tuple: " + stringify(key, val);
  if (0 == k % (srcs * srcs)) {
    assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
  } else {
    final int i = k % srcs;
    assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
  }
  context.write(key, one);
  //If the user modifies the key or any of the values in the tuple, it
  // should not affect the rest of the join.
  key.set(-1);
  val.set(0);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestJoinDatamerge.java

示例6: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {

    int sum = 0;
    for (IntWritable value: values) {
        sum += value.get();
    }

    context.write(key, new IntWritable(sum));
}
 
開發者ID:yogykwan,項目名稱:mapreduce-samples,代碼行數:12,代碼來源:SentimentAnalysis.java

示例7: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
@Override
public void reduce(Text key, Iterable<IntWritable> pageCnts,
        Context context
) throws IOException, InterruptedException {

    for (IntWritable cnt : pageCnts) {
        sum += cnt.get();
    }
    count += 1;
    average = sum / count;
    finalAvg.set(average);
    context.write(new Text("Average Page Count = "), finalAvg);
}
 
開發者ID:PacktPublishing,項目名稱:Java-Data-Science-Made-Easy,代碼行數:14,代碼來源:AveragePageCount.java

示例8: map

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void map(IntWritable key, IntWritable val,
    OutputCollector<IntWritable, IntWritable> out, Reporter reporter)
    throws IOException {
  int k = key.get();
  final int vali = val.get();
  final String kvstr = "Unexpected tuple: " + stringify(key, val);
  if (0 == k % (srcs * srcs)) {
    assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
  } else {
    final int i = k % srcs;
    assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
  }
  out.collect(key, one);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:15,代碼來源:TestDatamerge.java

示例9: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
/**
 * Sums all the individual values within the iterator and writes them to the
 * same key.
 * 
 * @param key
 *          This will be a length of a word that was read.
 * @param values
 *          This will be an iterator of all the values associated with that
 *          key.
 */
public void reduce(IntWritable key, Iterable<IntWritable> values,
    Context context) throws IOException, InterruptedException {

  int sum = 0;
  for (IntWritable value : values) {
    sum += value.get();
  }
  val.set(sum);
  context.write(key, val);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:21,代碼來源:WordMedian.java

示例10: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterable<IntWritable> it,
    Context context) throws IOException, InterruptedException {
  int keyint = key.get();
  int count = 0;
  for (IntWritable iw : it) {
    count++;
  }
  context.write(new IntWritable(keyint), new IntWritable(count));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:10,代碼來源:TestMapReduce.java

示例11: readFields

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void readFields(DataInput dataInput) throws IOException {
    Text text = new Text();
    text.readFields(dataInput);
    wifiProb = text.toString();

    IntWritable intReader = new IntWritable();

    intReader.readFields(dataInput);
    inNoOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inNoOutStore = intReader.get();

    intReader.readFields(dataInput);
    outNoInWifi = intReader.get();
    intReader.readFields(dataInput);
    outNoInStore = intReader.get();


    intReader.readFields(dataInput);
    inAndOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inAndOutStore = intReader.get();

    intReader.readFields(dataInput);
    stayInWifi = intReader.get();
    intReader.readFields(dataInput);
    stayInStore = intReader.get();


    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.readFields(dataInput);
    jumpRate = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    deepVisit = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    inStoreRate = doubleWritable.get();

}
 
開發者ID:cuiods,項目名稱:WIFIProbe,代碼行數:39,代碼來源:CustomerFlowElement.java

示例12: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<IntWritable> values, Context context)
    throws IOException, InterruptedException {
  context.getCounter("MyCounterGroup", "REDUCE_INPUT_GROUPS").increment(1);
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(key, result);
  context.getCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS")
      .increment(1);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:13,代碼來源:TestMiniMRClientCluster.java

示例13: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterator<IntWritable> it,
                   OutputCollector<IntWritable, IntWritable> out,
                   Reporter reporter) throws IOException {
  int keyint = key.get();
  int total = 0;
  while (it.hasNext()) {
    total += it.next().get();
  }
  out.collect(new IntWritable(keyint), new IntWritable(total));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:11,代碼來源:TestMapRed.java

示例14: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Key key, Iterable<IntWritable> values, 
                   Context context) throws IOException, InterruptedException {
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(key, result);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:10,代碼來源:IntSumReducer.java

示例15: reduce

import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<IntWritable> values, Context context)
		throws IOException, InterruptedException {
	int sum = 0;
	for (IntWritable v : values) {
		sum += v.get();
	}
	context.write(key, new IntWritable(sum));
}
 
開發者ID:yogykwan,項目名稱:mapreduce-samples,代碼行數:9,代碼來源:NGramLibraryBuilder.java


注:本文中的org.apache.hadoop.io.IntWritable.get方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。