當前位置: 首頁>>代碼示例>>Java>>正文


Java BooleanWritable類代碼示例

本文整理匯總了Java中org.apache.hadoop.io.BooleanWritable的典型用法代碼示例。如果您正苦於以下問題:Java BooleanWritable類的具體用法?Java BooleanWritable怎麽用?Java BooleanWritable使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


BooleanWritable類屬於org.apache.hadoop.io包,在下文中一共展示了BooleanWritable類的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: readFields

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
/**
 * Read (say, deserialize) an employee
 */
@Override
public void readFields(DataInput in) throws IOException {
	name = new Text();
	name.readFields(in);
	address = new Text();
	address.readFields(in);
	company = new Text();
	company.readFields(in);
	salary = new DoubleWritable();
	salary.readFields(in);
	department = new Text();
	department.readFields(in);
	isManager = new BooleanWritable();
	isManager.readFields(in);
}
 
開發者ID:amritbhat786,項目名稱:DocIT,代碼行數:19,代碼來源:Employee.java

示例2: makeRandomWritables

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
private Writable[] makeRandomWritables() {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  return writs;
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:17,代碼來源:TestTupleWritable.java

示例3: testIterable

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
public void testIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable t = new TupleWritable(writs);
  for (int i = 0; i < 6; ++i) {
    t.setWritten(i);
  }
  verifIter(writs, t, 0);
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:21,代碼來源:TestTupleWritable.java

示例4: testNestedIterable

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
public void testNestedIterable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:18,代碼來源:TestTupleWritable.java

示例5: testWritable

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
public void testWritable() throws Exception {
  Random r = new Random();
  Writable[] writs = {
    new BooleanWritable(r.nextBoolean()),
    new FloatWritable(r.nextFloat()),
    new FloatWritable(r.nextFloat()),
    new IntWritable(r.nextInt()),
    new LongWritable(r.nextLong()),
    new BytesWritable("dingo".getBytes()),
    new LongWritable(r.nextLong()),
    new IntWritable(r.nextInt()),
    new BytesWritable("yak".getBytes()),
    new IntWritable(r.nextInt())
  };
  TupleWritable sTuple = makeTuple(writs);
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  sTuple.write(new DataOutputStream(out));
  ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
  TupleWritable dTuple = new TupleWritable();
  dTuple.readFields(new DataInputStream(in));
  assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
 
開發者ID:naver,項目名稱:hadoop,代碼行數:23,代碼來源:TestTupleWritable.java

示例6: init

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Override
public void init() throws IOException {
  registerKey(NullWritable.class.getName(), NullWritableSerializer.class);
  registerKey(Text.class.getName(), TextSerializer.class);
  registerKey(LongWritable.class.getName(), LongWritableSerializer.class);
  registerKey(IntWritable.class.getName(), IntWritableSerializer.class);
  registerKey(Writable.class.getName(), DefaultSerializer.class);
  registerKey(BytesWritable.class.getName(), BytesWritableSerializer.class);
  registerKey(BooleanWritable.class.getName(), BoolWritableSerializer.class);
  registerKey(ByteWritable.class.getName(), ByteWritableSerializer.class);
  registerKey(FloatWritable.class.getName(), FloatWritableSerializer.class);
  registerKey(DoubleWritable.class.getName(), DoubleWritableSerializer.class);
  registerKey(VIntWritable.class.getName(), VIntWritableSerializer.class);
  registerKey(VLongWritable.class.getName(), VLongWritableSerializer.class);

  LOG.info("Hadoop platform inited");
}
 
開發者ID:aliyun-beta,項目名稱:aliyun-oss-hadoop-fs,代碼行數:18,代碼來源:HadoopPlatform.java

示例7: terminatePartial

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Override
public Object terminatePartial(@SuppressWarnings("deprecation") AggregationBuffer agg)
        throws HiveException {
    QueueAggregationBuffer myagg = (QueueAggregationBuffer) agg;

    Pair<List<Object>, List<Object>> tuples = myagg.drainQueue();
    if (tuples == null) {
        return null;
    }
    List<Object> keyList = tuples.getKey();
    List<Object> valueList = tuples.getValue();

    Object[] partialResult = new Object[4];
    partialResult[0] = valueList;
    partialResult[1] = keyList;
    partialResult[2] = new IntWritable(myagg.size);
    partialResult[3] = new BooleanWritable(myagg.reverseOrder);
    return partialResult;
}
 
開發者ID:apache,項目名稱:incubator-hivemall,代碼行數:20,代碼來源:UDAFToOrderedList.java

示例8: testSimpleConsumerWithEmptySequenceFile

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
    Configuration conf = new Configuration();
    SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, BooleanWritable.class);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(0);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:25,代碼來源:HdfsConsumerTest.java

示例9: testWriteBoolean

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testWriteBoolean() throws Exception {
    if (!canTest()) {
        return;
    }
    Boolean aBoolean = true;
    template.sendBody("direct:write_boolean", aBoolean);

    Configuration conf = new Configuration();
    Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-boolean");
    SequenceFile.Reader reader = new SequenceFile.Reader(conf, SequenceFile.Reader.file(file1));
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    reader.next(key, value);
    Boolean rBoolean = ((BooleanWritable) value).get();
    assertEquals(rBoolean, aBoolean);

    IOHelper.close(reader);
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:20,代碼來源:HdfsProducerTest.java

示例10: testSimpleConsumerWithEmptySequenceFile

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testSimpleConsumerWithEmptySequenceFile() throws Exception {
    if (!canTest()) {
        return;
    }

    final Path file = new Path(new File("target/test/test-camel-sequence-file").getAbsolutePath());
    Configuration conf = new Configuration();
    FileSystem fs1 = FileSystem.get(file.toUri(), conf);
    SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, BooleanWritable.class);
    writer.sync();
    writer.close();

    MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
    resultEndpoint.expectedMessageCount(0);

    context.addRoutes(new RouteBuilder() {
        public void configure() {
            from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&chunkSize=4096&initialDelay=0").to("mock:result");
        }
    });
    context.start();

    resultEndpoint.assertIsSatisfied();
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:26,代碼來源:HdfsConsumerTest.java

示例11: testWriteBoolean

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Test
public void testWriteBoolean() throws Exception {
    if (!canTest()) {
        return;
    }
    Boolean aBoolean = true;
    template.sendBody("direct:write_boolean", aBoolean);

    Configuration conf = new Configuration();
    Path file1 = new Path("file:///" + TEMP_DIR.toUri() + "/test-camel-boolean");
    FileSystem fs1 = FileSystem.get(file1.toUri(), conf);
    SequenceFile.Reader reader = new SequenceFile.Reader(fs1, file1, conf);
    Writable key = (Writable) ReflectionUtils.newInstance(reader.getKeyClass(), conf);
    Writable value = (Writable) ReflectionUtils.newInstance(reader.getValueClass(), conf);
    reader.next(key, value);
    Boolean rBoolean = ((BooleanWritable) value).get();
    assertEquals(rBoolean, aBoolean);

    IOHelper.close(reader);
}
 
開發者ID:HydAu,項目名稱:Camel,代碼行數:21,代碼來源:HdfsProducerTest.java

示例12: reduce

import org.apache.hadoop.io.BooleanWritable; //導入依賴的package包/類
@Override
protected void reduce(BytesWritable wordtimeb, Iterable<BooleanWritable> wordBools, Reducer<BytesWritable,BooleanWritable,LongWritable,BytesWritable>.Context context) throws IOException ,InterruptedException {
	ReadWritableStringLong wordtime = IOUtils.deserialize(wordtimeb.getBytes(), ReadWritableStringLong.class);
	long time = wordtime.secondObject();
	boolean seenInPresent = false;
	boolean seenInPast = false;
	for (BooleanWritable isfrompast: wordBools) {
		boolean frompast = isfrompast.get();
		seenInPresent |= !frompast;
		seenInPast |= frompast;
		if(seenInPast && seenInPresent){
			// then we've seen all the ones from this time if we were to see them, so we can break early. MASSIVE SAVINGS HERE
			break;
		}
	}
	ReadWritableBooleanBoolean intersectionUnion = new ReadWritableBooleanBoolean(seenInPast && seenInPresent,seenInPast || seenInPresent);
	context.write(new LongWritable(time), new BytesWritable(IOUtils.serialize(intersectionUnion)));
}
 
開發者ID:openimaj,項目名稱:openimaj,代碼行數:19,代碼來源:CumulativeTimeWord.java


注:本文中的org.apache.hadoop.io.BooleanWritable類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。