本文整理汇总了Java中org.apache.hadoop.hive.serde2.io.ByteWritable类的典型用法代码示例。如果您正苦于以下问题:Java ByteWritable类的具体用法?Java ByteWritable怎么用?Java ByteWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ByteWritable类属于org.apache.hadoop.hive.serde2.io包,在下文中一共展示了ByteWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getConstantIntValue
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public static Integer getConstantIntValue(ObjectInspector[] arguments, int i)
throws UDFArgumentTypeException {
Object constValue = ((ConstantObjectInspector) arguments[i]).getWritableConstantValue();
if (constValue == null) {
return null;
}
int v;
if (constValue instanceof IntWritable) {
v = ((IntWritable) constValue).get();
} else if (constValue instanceof ShortWritable) {
v = ((ShortWritable) constValue).get();
} else if (constValue instanceof ByteWritable) {
v = ((ByteWritable) constValue).get();
} else {
throw new UDFArgumentTypeException(i, "_FUNC_ only takes INT/SHORT/BYTE types as "
+ getArgOrder(i) + " argument, got " + constValue.getClass());
}
return v;
}
示例2: writePrimitive
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
private void writePrimitive(final Writable value) {
if (value == null) {
return;
}
if (value instanceof DoubleWritable) {
recordConsumer.addDouble(((DoubleWritable) value).get());
} else if (value instanceof BooleanWritable) {
recordConsumer.addBoolean(((BooleanWritable) value).get());
} else if (value instanceof FloatWritable) {
recordConsumer.addFloat(((FloatWritable) value).get());
} else if (value instanceof IntWritable) {
recordConsumer.addInteger(((IntWritable) value).get());
} else if (value instanceof LongWritable) {
recordConsumer.addLong(((LongWritable) value).get());
} else if (value instanceof ShortWritable) {
recordConsumer.addInteger(((ShortWritable) value).get());
} else if (value instanceof ByteWritable) {
recordConsumer.addInteger(((ByteWritable) value).get());
} else if (value instanceof BigDecimalWritable) {
throw new UnsupportedOperationException("BigDecimal writing not implemented");
} else if (value instanceof BinaryWritable) {
recordConsumer.addBinary(((BinaryWritable) value).getBinary());
} else {
throw new IllegalArgumentException("Unknown value type: " + value + " " + value.getClass());
}
}
示例3: evaluate
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public IntWritable evaluate(ByteWritable a, IntWritable b) {
if (a == null || b == null) {
return null;
}
intWritable.set(a.get() >> b.get());
return intWritable;
}
示例4: evaluate
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public IntWritable evaluate(ByteWritable a, IntWritable b) {
if (a == null || b == null) {
return null;
}
intWritable.set(a.get() << b.get());
return intWritable;
}
示例5: evaluate
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
public IntWritable evaluate(ByteWritable a, IntWritable b) {
if (a == null || b == null) {
return null;
}
intWritable.set(a.get() >>> b.get());
return intWritable;
}
示例6: write
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
public Result write(Writable writable, Generator generator) {
if (writable instanceof ByteWritable) {
generator.writeNumber(((ByteWritable) writable).get());
}
else if (writable instanceof DoubleWritable) {
generator.writeNumber(((DoubleWritable) writable).get());
}
else if (writable instanceof ShortWritable) {
generator.writeNumber(((ShortWritable) writable).get());
}
// HiveDecimal - Hive 0.11+
else if (writable != null && HiveConstants.DECIMAL_WRITABLE.equals(writable.getClass().getName())) {
generator.writeString(writable.toString());
}
// pass the UNIX epoch
else if (writable instanceof TimestampWritable) {
long ts = ((TimestampWritable) writable).getTimestamp().getTime();
Calendar cal = Calendar.getInstance();
cal.setTimeInMillis(ts);
generator.writeString(DatatypeConverter.printDateTime(cal));
}
// HiveDate - Hive 0.12+
else if (writable != null && HiveConstants.DATE_WRITABLE.equals(writable.getClass().getName())) {
generator.writeString(DateWritableWriter.toES(writable));
}
// HiveVarcharWritable - Hive 0.12+
else if (writable != null && HiveConstants.VARCHAR_WRITABLE.equals(writable.getClass().getName())) {
generator.writeString(writable.toString());
}
// HiveChar - Hive 0.13+
else if (writable != null && HiveConstants.CHAR_WRITABLE.equals(writable.getClass().getName())) {
generator.writeString(StringUtils.trim(writable.toString()));
}
else {
return super.write(writable, generator);
}
return Result.SUCCESFUL();
}
示例7: toComparable
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
static Comparable<?> toComparable(PrimitiveCategory category, Object literal) {
String stringLiteral;
switch (category) {
case STRING:
return new Text((String) literal);
case BOOLEAN:
return new BooleanWritable((Boolean) literal);
case BYTE:
return new ByteWritable(((Long) literal).byteValue());
case SHORT:
return new ShortWritable(((Long) literal).shortValue());
case INT:
return new IntWritable(((Long) literal).intValue());
case LONG:
return new LongWritable((Long) literal);
case FLOAT:
return new FloatWritable(((Double) literal).floatValue());
case DOUBLE:
return new DoubleWritable((Double) literal);
case TIMESTAMP:
return new TimestampWritable((Timestamp) literal);
case DATE:
return (DateWritable) literal;
case CHAR:
stringLiteral = (String) literal;
return new HiveCharWritable(new HiveChar(stringLiteral, stringLiteral.length()));
case VARCHAR:
stringLiteral = (String) literal;
return new HiveVarcharWritable(new HiveVarchar(stringLiteral, stringLiteral.length()));
case DECIMAL:
return new HiveDecimalWritable(HiveDecimal.create((BigDecimal) literal));
default:
throw new IllegalArgumentException("Unsupported category: " + category);
}
}
示例8: getObjectFromWritable
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
private Object getObjectFromWritable(Writable w) {
if (w instanceof IntWritable) {
// int
return ((IntWritable) w).get();
} else if (w instanceof ShortWritable) {
// short
return ((ShortWritable) w).get();
} else if (w instanceof ByteWritable) {
// byte
return ((ByteWritable) w).get();
} else if (w instanceof BooleanWritable) {
// boolean
return ((BooleanWritable) w).get();
} else if (w instanceof LongWritable) {
// long
return ((LongWritable) w).get();
} else if (w instanceof FloatWritable) {
// float
return ((FloatWritable) w).get();
} else if (w instanceof DoubleWritable) {
// double
return ((DoubleWritable) w).get();
}else if (w instanceof NullWritable) {
//null
return null;
} else {
// treat as string
return w.toString();
}
}
示例9: get
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
public byte get(Object o) {
// Accept int writables and convert them.
if (o instanceof IntWritable) {
return (byte) ((IntWritable) o).get();
}
return ((ByteWritable) o).get();
}
示例10: createPrimitive
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
private Writable createPrimitive(final Object obj, final PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case VOID:
return null;
case BOOLEAN:
return new BooleanWritable(((BooleanObjectInspector) inspector).get(obj) ? Boolean.TRUE : Boolean.FALSE);
case BYTE:
return new ByteWritable((byte) ((ByteObjectInspector) inspector).get(obj));
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case SHORT:
return new ShortWritable((short) ((ShortObjectInspector) inspector).get(obj));
case STRING:
return new BinaryWritable(Binary.fromString(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj)));
default:
throw new SerDeException("Unknown primitive : " + inspector.getPrimitiveCategory());
}
}
示例11: TestCopyByte
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
/**
* Tests that after copying a lazy byte object, calling materialize on the original and the
* copy doesn't advance the tree reader twice
* @throws Exception
*/
@Test
public void TestCopyByte() throws Exception {
ReaderWriterProfiler.setProfilerOptions(null);
OrcLazyByte lazyByte = new OrcLazyByte(new LazyByteTreeReader(0, 0) {
int nextCalls = 0;
@Override
public Object next(Object previous) throws IOException {
if (nextCalls == 0) {
return new ByteWritable((byte) 1);
}
throw new IOException("next should only be called once");
}
@Override
protected boolean seekToRow(long currentRow) throws IOException {
return true;
}
});
ByteObjectInspector byteOI = (ByteObjectInspector)
OrcLazyObjectInspectorUtils.createLazyObjectInspector(TypeInfoFactory.byteTypeInfo);
OrcLazyByte lazyByte2 = (OrcLazyByte) byteOI.copyObject(lazyByte);
Assert.assertEquals(1, ((ByteWritable) lazyByte.materialize()).get());
Assert.assertEquals(1, ((ByteWritable) lazyByte2.materialize()).get());
}
示例12: byteType
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
protected Class<? extends Writable> byteType() {
return ByteWritable.class;
}
示例13: processByte
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
protected Object processByte(Byte value) {
return new ByteWritable(value);
}
示例14: testByte
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Test
public void testByte() {
// byte is not recognized by the schema
assertEquals("127", hiveTypeToJson(new MyHiveType(new ByteWritable(Byte.MAX_VALUE), byteTypeInfo)));
}
示例15: toWritableObjectInternal
import org.apache.hadoop.hive.serde2.io.ByteWritable; //导入依赖的package包/类
@Override
protected Object toWritableObjectInternal(Object value) throws UnexpectedTypeException {
return new ByteWritable((Byte) value);
}