本文整理匯總了Java中org.apache.hadoop.io.BooleanWritable類的典型用法代碼示例。如果您正苦於以下問題:Java BooleanWritable類的具體用法?Java BooleanWritable怎麽用?Java BooleanWritable使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
BooleanWritable類屬於org.apache.hadoop.io包,在下文中一共展示了BooleanWritable類的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: readFields
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
/**
* Read (say, deserialize) an employee
*/
@Override
public void readFields(DataInput in) throws IOException {
name = new Text();
name.readFields(in);
address = new Text();
address.readFields(in);
company = new Text();
company.readFields(in);
salary = new DoubleWritable();
salary.readFields(in);
department = new Text();
department.readFields(in);
isManager = new BooleanWritable();
isManager.readFields(in);
}
示例2: makeRandomWritables
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
private Writable[] makeRandomWritables() {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
return writs;
}
示例3: testIterable
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
public void testIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable t = new TupleWritable(writs);
for (int i = 0; i < 6; ++i) {
t.setWritten(i);
}
verifIter(writs, t, 0);
}
示例4: testNestedIterable
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
public void testNestedIterable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
示例5: testWritable
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = {
new BooleanWritable(r.nextBoolean()),
new FloatWritable(r.nextFloat()),
new FloatWritable(r.nextFloat()),
new IntWritable(r.nextInt()),
new LongWritable(r.nextLong()),
new BytesWritable("dingo".getBytes()),
new LongWritable(r.nextLong()),
new IntWritable(r.nextInt()),
new BytesWritable("yak".getBytes()),
new IntWritable(r.nextInt())
};
TupleWritable sTuple = makeTuple(writs);
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
示例6: init
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Override
public void init() throws IOException {
registerKey(NullWritable.class.getName(), NullWritableSerializer.class);
registerKey(Text.class.getName(), TextSerializer.class);
registerKey(LongWritable.class.getName(), LongWritableSerializer.class);
registerKey(IntWritable.class.getName(), IntWritableSerializer.class);
registerKey(Writable.class.getName(), DefaultSerializer.class);
registerKey(BytesWritable.class.getName(), BytesWritableSerializer.class);
registerKey(BooleanWritable.class.getName(), BoolWritableSerializer.class);
registerKey(ByteWritable.class.getName(), ByteWritableSerializer.class);
registerKey(FloatWritable.class.getName(), FloatWritableSerializer.class);
registerKey(DoubleWritable.class.getName(), DoubleWritableSerializer.class);
registerKey(VIntWritable.class.getName(), VIntWritableSerializer.class);
registerKey(VLongWritable.class.getName(), VLongWritableSerializer.class);
LOG.info("Hadoop platform inited");
}
示例7: terminatePartial
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Override
public Object terminatePartial(@SuppressWarnings("deprecation") AggregationBuffer agg)
throws HiveException {
QueueAggregationBuffer myagg = (QueueAggregationBuffer) agg;
Pair<List<Object>, List<Object>> tuples = myagg.drainQueue();
if (tuples == null) {
return null;
}
List<Object> keyList = tuples.getKey();
List<Object> valueList = tuples.getValue();
Object[] partialResult = new Object[4];
partialResult[0] = valueList;
partialResult[1] = keyList;
partialResult[2] = new IntWritable(myagg.size);
partialResult[3] = new BooleanWritable(myagg.reverseOrder);
return partialResult;
}
示例8: testSimpleConsumerWithEmptySequenceFile
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(0);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例9: testWriteBoolean
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testWriteBoolean() throws Exception {
if (!canTest()) {
return;
}
Boolean aBoolean = true;
template.sendBody("direct:write_boolean", aBoolean);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-boolean");
SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
Boolean rBoolean = ((BooleanWritable) value).get();
assertEquals(rBoolean, aBoolean);
IOHelper.close(reader);
}
示例10: testSimpleConsumerWithEmptySequenceFile
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(0);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例11: testWriteBoolean
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testWriteBoolean() throws Exception {
if (!canTest()) {
return;
}
Boolean aBoolean = true;
template.sendBody("direct:write_boolean", aBoolean);
Configuration conf = new Configuration();
Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-boolean");
FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
reader.next(key, value);
Boolean rBoolean = ((BooleanWritable) value).get();
assertEquals(rBoolean, aBoolean);
IOHelper.close(reader);
}
示例12: reduce
import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Override
protected void reduce(BytesWritable wordtimeb, Iterable<BooleanWritable> wordBools, Reducer<BytesWritable,BooleanWritable,LongWritable,BytesWritable>.Context context) throws IOException ,InterruptedException {
ReadWritableStringLong wordtime = IOUtils.deserialize(wordtimeb.getBytes(), ReadWritableStringLong.class);
long time = wordtime.secondObject();
boolean seenInPresent = false;
boolean seenInPast = false;
for (BooleanWritable isfrompast: wordBools) {
boolean frompast = isfrompast.get();
seenInPresent |= !frompast;
seenInPast |= frompast;
if(seenInPast && seenInPresent){
// then we've seen all the ones from this time if we were to see them, so we can break early. MASSIVE SAVINGS HERE
break;
}
}
ReadWritableBooleanBoolean intersectionUnion = new ReadWritableBooleanBoolean(seenInPast && seenInPresent,seenInPast || seenInPresent);
context.write(new LongWritable(time), new BytesWritable(IOUtils.serialize(intersectionUnion)));
}