本文整理匯總了Java中org.apache.hadoop.io.IntWritable.get方法的典型用法代碼示例。如果您正苦於以下問題:Java IntWritable.get方法的具體用法?Java IntWritable.get怎麽用?Java IntWritable.get使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.io.IntWritable
的用法示例。
在下文中一共展示了IntWritable.get方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int errors = 0;
MarkableIterator<IntWritable> mitr =
new MarkableIterator<IntWritable>(values.iterator());
switch (key.get()) {
case 0:
errors += test0(key, mitr);
break;
case 1:
errors += test1(key, mitr);
break;
case 2:
errors += test2(key, mitr);
break;
case 3:
errors += test3(key, mitr);
break;
default:
break;
}
context.write(key, new IntWritable(errors));
}
示例2: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<IntWritable> value, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : value) {
if (val.get()== 0){
sum=0;
break;
}
else if (val.get()== 1){
sum += 1;
}
}
if (sum!=0){
context.write(key, new IntWritable(sum));
}
}
示例3: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(BytesWritable key, Iterator<IntWritable> values,
OutputCollector<BytesWritable, IntWritable> output,
Reporter reporter) throws IOException {
int ones = 0;
int twos = 0;
while (values.hasNext()) {
IntWritable count = values.next();
if (count.equals(sortInput)) {
++ones;
} else if (count.equals(sortOutput)) {
++twos;
} else {
throw new IOException("Invalid 'value' of " + count.get() +
" for (key,value): " + key.toString());
}
}
// Check to ensure there are equal no. of ones and twos
if (ones != twos) {
throw new IOException("Illegal ('one', 'two'): (" + ones + ", " + twos +
") for (key, value): " + key.toString());
}
}
示例4: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterator<Text> values,
OutputCollector<Text, Text> out,
Reporter reporter) throws IOException {
keyVal = key.get();
while(values.hasNext()) {
Text value = values.next();
String towrite = value.toString() + "\n";
indexStream.write(towrite.getBytes(Charsets.UTF_8));
written++;
if (written > numIndexes -1) {
// every 1000 indexes we report status
reporter.setStatus("Creating index for archives");
reporter.progress();
endIndex = keyVal;
String masterWrite = startIndex + " " + endIndex + " " + startPos
+ " " + indexStream.getPos() + " \n" ;
outStream.write(masterWrite.getBytes(Charsets.UTF_8));
startPos = indexStream.getPos();
startIndex = endIndex;
written = 0;
}
}
}
示例5: map
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void map(IntWritable key, IntWritable val, Context context)
throws IOException, InterruptedException {
int k = key.get();
final int vali = val.get();
final String kvstr = "Unexpected tuple: " + stringify(key, val);
if (0 == k % (srcs * srcs)) {
assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
} else {
final int i = k % srcs;
assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
}
context.write(key, one);
//If the user modifies the key or any of the values in the tuple, it
// should not affect the rest of the join.
key.set(-1);
val.set(0);
}
示例6: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
@Override
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value: values) {
sum += value.get();
}
context.write(key, new IntWritable(sum));
}
示例7: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
@Override
public void reduce(Text key, Iterable<IntWritable> pageCnts,
Context context
) throws IOException, InterruptedException {
for (IntWritable cnt : pageCnts) {
sum += cnt.get();
}
count += 1;
average = sum / count;
finalAvg.set(average);
context.write(new Text("Average Page Count = "), finalAvg);
}
示例8: map
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void map(IntWritable key, IntWritable val,
OutputCollector<IntWritable, IntWritable> out, Reporter reporter)
throws IOException {
int k = key.get();
final int vali = val.get();
final String kvstr = "Unexpected tuple: " + stringify(key, val);
if (0 == k % (srcs * srcs)) {
assertTrue(kvstr, vali == k * 10 / srcs + srcs - 1);
} else {
final int i = k % srcs;
assertTrue(kvstr, srcs * (vali - i) == 10 * (k - i));
}
out.collect(key, one);
}
示例9: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
/**
* Sums all the individual values within the iterator and writes them to the
* same key.
*
* @param key
* This will be a length of a word that was read.
* @param values
* This will be an iterator of all the values associated with that
* key.
*/
public void reduce(IntWritable key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable value : values) {
sum += value.get();
}
val.set(sum);
context.write(key, val);
}
示例10: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterable<IntWritable> it,
Context context) throws IOException, InterruptedException {
int keyint = key.get();
int count = 0;
for (IntWritable iw : it) {
count++;
}
context.write(new IntWritable(keyint), new IntWritable(count));
}
示例11: readFields
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void readFields(DataInput dataInput) throws IOException {
Text text = new Text();
text.readFields(dataInput);
wifiProb = text.toString();
IntWritable intReader = new IntWritable();
intReader.readFields(dataInput);
inNoOutWifi = intReader.get();
intReader.readFields(dataInput);
inNoOutStore = intReader.get();
intReader.readFields(dataInput);
outNoInWifi = intReader.get();
intReader.readFields(dataInput);
outNoInStore = intReader.get();
intReader.readFields(dataInput);
inAndOutWifi = intReader.get();
intReader.readFields(dataInput);
inAndOutStore = intReader.get();
intReader.readFields(dataInput);
stayInWifi = intReader.get();
intReader.readFields(dataInput);
stayInStore = intReader.get();
DoubleWritable doubleWritable = new DoubleWritable();
doubleWritable.readFields(dataInput);
jumpRate = doubleWritable.get();
doubleWritable.readFields(dataInput);
deepVisit = doubleWritable.get();
doubleWritable.readFields(dataInput);
inStoreRate = doubleWritable.get();
}
示例12: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
context.getCounter("MyCounterGroup", "REDUCE_INPUT_GROUPS").increment(1);
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
context.getCounter("MyCounterGroup", "REDUCE_OUTPUT_RECORDS")
.increment(1);
}
示例13: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(IntWritable key, Iterator<IntWritable> it,
OutputCollector<IntWritable, IntWritable> out,
Reporter reporter) throws IOException {
int keyint = key.get();
int total = 0;
while (it.hasNext()) {
total += it.next().get();
}
out.collect(new IntWritable(keyint), new IntWritable(total));
}
示例14: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Key key, Iterable<IntWritable> values,
Context context) throws IOException, InterruptedException {
int sum = 0;
for (IntWritable val : values) {
sum += val.get();
}
result.set(sum);
context.write(key, result);
}
示例15: reduce
import org.apache.hadoop.io.IntWritable; //導入方法依賴的package包/類
public void reduce(Text key, Iterable<IntWritable> values, Context context)
throws IOException, InterruptedException {
int sum = 0;
for (IntWritable v : values) {
sum += v.get();
}
context.write(key, new IntWritable(sum));
}