当前位置: 首页>>代码示例>>Java>>正文


Java IntWritable.get方法代码示例

本文整理汇总了Java中org.apache.hadoop.io.IntWritable.get方法的典型用法代码示例。如果您正苦于以下问题:Java IntWritable.get方法的具体用法?Java IntWritable.get怎么用?Java IntWritable.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.io.IntWritable的用法示例。


在下文中一共展示了IntWritable.get方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(IntWritable key, Iterable<IntWritable> values, 
    Context context) throws IOException, InterruptedException {

  int errors = 0;

  MarkableIterator<IntWritable> mitr = 
    new MarkableIterator<IntWritable>(values.iterator());

  switch (key.get()) {
  case 0:
    errors += test0(key, mitr);
    break;
  case 1:
    errors += test1(key, mitr);
    break;
  case 2:
    errors += test2(key, mitr);
    break;
  case 3:
    errors += test3(key, mitr);
    break;
  default:
    break;
  }
  context.write(key, new IntWritable(errors));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:27,代码来源:TestValueIterReset.java

示例2: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(Text key, Iterable<IntWritable> value, Context context)
           throws IOException, InterruptedException {
      int sum = 0;
      for (IntWritable val : value) {
         if (val.get()== 0){
             sum=0;
             break;
         }
         else if (val.get()== 1){
          sum += 1;
         }
   }
      if (sum!=0){
      context.write(key, new IntWritable(sum));
      }
}
 
开发者ID:dhruvmalik007,项目名称:Deep_learning_using_Java,代码行数:17,代码来源:Recommendation_program.java

示例3: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(BytesWritable key, Iterator<IntWritable> values,
                   OutputCollector<BytesWritable, IntWritable> output,
                   Reporter reporter) throws IOException {
  int ones = 0;
  int twos = 0;
  while (values.hasNext()) {
    IntWritable count = values.next(); 
    if (count.equals(sortInput)) {
      ++ones;
    } else if (count.equals(sortOutput)) {
      ++twos;
    } else {
      throw new IOException("Invalid 'value' of " + count.get() + 
                            " for (key,value): " + key.toString());
    }
  }
  
  // Check to ensure there are equal no. of ones and twos
  if (ones != twos) {
    throw new IOException("Illegal ('one', 'two'): (" + ones + ", " + twos +
                          ") for (key, value): " + key.toString());
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:SortValidator.java

示例4: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(IntWritable key, Iterator<Text> values,
    OutputCollector<Text, Text> out,
    Reporter reporter) throws IOException {
  keyVal = key.get();
  while(values.hasNext()) {
    Text value = values.next();
    String towrite = value.toString() + "\n";
    indexStream.write(towrite.getBytes(Charsets.UTF_8));
    written++;
    if (written > numIndexes -1) {
      // every 1000 indexes we report status
      reporter.setStatus("Creating index for archives");
      reporter.progress();
      endIndex = keyVal;
      String masterWrite = startIndex + " " + endIndex + " " + startPos 
                          +  " " + indexStream.getPos() + " \n" ;
      outStream.write(masterWrite.getBytes(Charsets.UTF_8));
      startPos = indexStream.getPos();
      startIndex = endIndex;
      written = 0;
    }
  }
}
 
开发者ID:naver,项目名称:hadoop,代码行数:24,代码来源:HadoopArchives.java

示例5: map

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void map(IntWritable key, IntWritable val, Context context)
    throws IOException, InterruptedException {
  int k = key.get();
  final int vali = val.get();
  final String kvstr = "Unexpected tuple: " + stringify(key, val);
  if (0 == k % (srcs * srcs)) {
    assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
  } else {
    final int i = k % srcs;
    assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
  }
  context.write(key, one);
  //If the user modifies the key or any of the values in the tuple, it
  // should not affect the rest of the join.
  key.set(-1);
  val.set(0);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:18,代码来源:TestJoinDatamerge.java

示例6: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
        throws IOException, InterruptedException {

    int sum = 0;
    for (IntWritable value: values) {
        sum += value.get();
    }

    context.write(key, new IntWritable(sum));
}
 
开发者ID:yogykwan,项目名称:mapreduce-samples,代码行数:12,代码来源:SentimentAnalysis.java

示例7: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
@Override
public void reduce(Text key, Iterable<IntWritable> pageCnts,
        Context context
) throws IOException, InterruptedException {

    for (IntWritable cnt : pageCnts) {
        sum += cnt.get();
    }
    count += 1;
    average = sum / count;
    finalAvg.set(average);
    context.write(new Text("Average Page Count = "), finalAvg);
}
 
开发者ID:PacktPublishing,项目名称:Java-Data-Science-Made-Easy,代码行数:14,代码来源:AveragePageCount.java

示例8: map

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void map(IntWritable key, IntWritable val,
    OutputCollector<IntWritable, IntWritable> out, Reporter reporter)
    throws IOException {
  int k = key.get();
  final int vali = val.get();
  final String kvstr = "Unexpected tuple: " + stringify(key, val);
  if (0 == k % (srcs * srcs)) {
    assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
  } else {
    final int i = k % srcs;
    assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
  }
  out.collect(key, one);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:15,代码来源:TestDatamerge.java

示例9: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
/**
 * Sums all the individual values within the iterator and writes them to the
 * same key.
 * 
 * @param key
 *          This will be a length of a word that was read.
 * @param values
 *          This will be an iterator of all the values associated with that
 *          key.
 */
public void reduce(IntWritable key, Iterable<IntWritable> values,
    Context context) throws IOException, InterruptedException {

  int sum = 0;
  for (IntWritable value : values) {
    sum += value.get();
  }
  val.set(sum);
  context.write(key, val);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:21,代码来源:WordMedian.java

示例10: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(IntWritable key, Iterable<IntWritable> it,
    Context context) throws IOException, InterruptedException {
  int keyint = key.get();
  int count = 0;
  for (IntWritable iw : it) {
    count++;
  }
  context.write(new IntWritable(keyint), new IntWritable(count));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:TestMapReduce.java

示例11: readFields

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void readFields(DataInput dataInput) throws IOException {
    Text text = new Text();
    text.readFields(dataInput);
    wifiProb = text.toString();

    IntWritable intReader = new IntWritable();

    intReader.readFields(dataInput);
    inNoOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inNoOutStore = intReader.get();

    intReader.readFields(dataInput);
    outNoInWifi = intReader.get();
    intReader.readFields(dataInput);
    outNoInStore = intReader.get();


    intReader.readFields(dataInput);
    inAndOutWifi = intReader.get();
    intReader.readFields(dataInput);
    inAndOutStore = intReader.get();

    intReader.readFields(dataInput);
    stayInWifi = intReader.get();
    intReader.readFields(dataInput);
    stayInStore = intReader.get();


    DoubleWritable doubleWritable = new DoubleWritable();
    doubleWritable.readFields(dataInput);
    jumpRate = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    deepVisit = doubleWritable.get();
    doubleWritable.readFields(dataInput);
    inStoreRate = doubleWritable.get();

}
 
开发者ID:cuiods,项目名称:WIFIProbe,代码行数:39,代码来源:CustomerFlowElement.java

示例12: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(Text key, Iterable<IntWritable> values, Context context)
    throws IOException, InterruptedException {
  context.getCounter("MyCounterGroup", "REDUCE_INPUT_GROUPS").increment(1);
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(key, result);
  context.getCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS")
      .increment(1);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:13,代码来源:TestMiniMRClientCluster.java

示例13: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(IntWritable key, Iterator<IntWritable> it,
                   OutputCollector<IntWritable, IntWritable> out,
                   Reporter reporter) throws IOException {
  int keyint = key.get();
  int total = 0;
  while (it.hasNext()) {
    total += it.next().get();
  }
  out.collect(new IntWritable(keyint), new IntWritable(total));
}
 
开发者ID:naver,项目名称:hadoop,代码行数:11,代码来源:TestMapRed.java

示例14: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(Key key, Iterable<IntWritable> values, 
                   Context context) throws IOException, InterruptedException {
  int sum = 0;
  for (IntWritable val : values) {
    sum += val.get();
  }
  result.set(sum);
  context.write(key, result);
}
 
开发者ID:naver,项目名称:hadoop,代码行数:10,代码来源:IntSumReducer.java

示例15: reduce

import org.apache.hadoop.io.IntWritable; //导入方法依赖的package包/类
public void reduce(Text key, Iterable<IntWritable> values, Context context)
		throws IOException, InterruptedException {
	int sum = 0;
	for (IntWritable v : values) {
		sum += v.get();
	}
	context.write(key, new IntWritable(sum));
}
 
开发者ID:yogykwan,项目名称:mapreduce-samples,代码行数:9,代码来源:NGramLibraryBuilder.java


注:本文中的org.apache.hadoop.io.IntWritable.get方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。