本文整理汇总了Java中org.apache.hadoop.io.ByteWritable.set方法的典型用法代码示例。如果您正苦于以下问题:Java ByteWritable.set方法的具体用法?Java ByteWritable.set怎么用?Java ByteWritable.set使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.io.ByteWritable
的用法示例。
在下文中一共展示了ByteWritable.set方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: create
import org.apache.hadoop.io.ByteWritable; //导入方法依赖的package包/类
@Override
public Writable create(Object value, TypeConverter typeConverter, Holder<Integer> size) {
size.value = SIZE;
ByteWritable writable = new ByteWritable();
writable.set(typeConverter.convertTo(Byte.class, value));
return writable;
}
示例2: testReadByte
import org.apache.hadoop.io.ByteWritable; //导入方法依赖的package包/类
@Test
public void testReadByte() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath());
Configuration conf = new Configuration();
SequenceFile.Writer writer = createWriter(conf, file, NullWritable.class, ByteWritable.class);
NullWritable keyWritable = NullWritable.get();
ByteWritable valueWritable = new ByteWritable();
byte value = 3;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
resultEndpoint.message(0).body(byte.class).isEqualTo(3);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs2:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例3: testReadByte
import org.apache.hadoop.io.ByteWritable; //导入方法依赖的package包/类
@Test
public void testReadByte() throws Exception {
if (!canTest()) {
return;
}
final Path file = new Path(new File("target/test/test-camel-byte").getAbsolutePath());
Configuration conf = new Configuration();
FileSystem fs1 = FileSystem.get(file.toUri(), conf);
SequenceFile.Writer writer = createWriter(fs1, conf, file, NullWritable.class, ByteWritable.class);
NullWritable keyWritable = NullWritable.get();
ByteWritable valueWritable = new ByteWritable();
byte value = 3;
valueWritable.set(value);
writer.append(keyWritable, valueWritable);
writer.sync();
writer.close();
MockEndpoint resultEndpoint = context.getEndpoint("mock:result", MockEndpoint.class);
resultEndpoint.expectedMessageCount(1);
resultEndpoint.message(0).body(byte.class).isEqualTo(3);
context.addRoutes(new RouteBuilder() {
public void configure() {
from("hdfs:localhost/" + file.toUri() + "?fileSystemType=LOCAL&fileType=SEQUENCE_FILE&initialDelay=0").to("mock:result");
}
});
context.start();
resultEndpoint.assertIsSatisfied();
}
示例4: wrap
import org.apache.hadoop.io.ByteWritable; //导入方法依赖的package包/类
@Override
public void wrap(Byte javaValue, ByteWritable writableValue) {
writableValue.set(javaValue);
}