本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java LongObjectInspector类的具体用法?Java LongObjectInspector怎么用?Java LongObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
LongObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了LongObjectInspector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getFeatureType
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Nonnull
private static FeatureType getFeatureType(@Nonnull ListObjectInspector featureListOI)
throws UDFArgumentException {
final ObjectInspector featureOI = featureListOI.getListElementObjectInspector();
if (featureOI instanceof StringObjectInspector) {
return FeatureType.STRING;
} else if (featureOI instanceof IntObjectInspector) {
return FeatureType.INT;
} else if (featureOI instanceof LongObjectInspector) {
return FeatureType.LONG;
} else {
throw new UDFArgumentException("Feature object inspector must be one of "
+ "[StringObjectInspector, IntObjectInspector, LongObjectInspector]: "
+ featureOI.toString());
}
}
示例2: asLongArray
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Nonnull
public static long[] asLongArray(@Nullable final Object argObj,
@Nonnull final ListObjectInspector listOI, @Nonnull LongObjectInspector elemOI) {
if (argObj == null) {
return null;
}
final int length = listOI.getListLength(argObj);
final long[] ary = new long[length];
for (int i = 0; i < length; i++) {
Object o = listOI.getListElement(argObj, i);
if (o == null) {
continue;
}
ary[i] = elemOI.get(o);
}
return ary;
}
示例3: createPrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case STRING:
return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
case DATE:
return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
case TIMESTAMP:
return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
default:
throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
}
}
示例4: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
throws UDFArgumentException {
Preconditions.checkPositionIndex(0, arguments.length);
if (!(arguments[0] instanceof LongObjectInspector)) {
throw new IllegalArgumentException(
"Input to IsoStartDateOfWeek UDF must be a bigint. Given "
+ arguments[0].getTypeName());
}
timestampOi = (LongObjectInspector) arguments[0];
YYYYMMDD = org.joda.time.format.DateTimeFormat.forPattern("YYYY-MM-dd");
mdt = new MutableDateTime(0, DateTimeZone.UTC);
return PrimitiveObjectInspectorFactory.javaIntObjectInspector;
}
示例5: test2
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Test
public void test2() throws Exception {
IsoWeekOfWeekYear udf = new IsoWeekOfWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
long l = 1388358783962L;
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(52, output);
l = 1388359114590L;
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(52, output);
}
示例6: testYearBoundary1
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Test
public void testYearBoundary1() throws HiveException, IOException {
UsWeekOfWeekYear udf = new UsWeekOfWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
DateTimeFormatter dtf = ISODateTimeFormat.basicDateTime();
// Sunday December 29, 2013 - Saturday January 4, 2014 are Week 1 of 2014
DateTime dt = dtf.parseDateTime("20131229T000000.000Z");
long l = dt.getMillis();
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(1, output);
dt = dtf.parseDateTime("20140104T000000.000Z");
l = dt.getMillis();
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(1, output);
}
示例7: testYearBoundary2
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Test
public void testYearBoundary2() throws HiveException, IOException {
UsWeekOfWeekYear udf = new UsWeekOfWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
DateTimeFormatter dtf = ISODateTimeFormat.basicDateTime();
// Sunday December 28, 2014 - Saturday January 3, 2015 are Week 53 of 2014
DateTime dt = dtf.parseDateTime("20141229T000000.000Z");
long l = dt.getMillis();
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(53, output);
dt = dtf.parseDateTime("20150103T000000.000Z");
l = dt.getMillis();
dt = dtf.parseDateTime("20141229T000000.000Z");
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(53, output);
}
示例8: testYearBoundary1
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Test
public void testYearBoundary1() throws HiveException, IOException {
UsWeekYear udf = new UsWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
DateTimeFormatter dtf = ISODateTimeFormat.basicDateTime();
// Sunday December 29, 2013 - Saturday January 4, 2014 are Week 1 of 2014
DateTime dt = dtf.parseDateTime("20131229T000000.000Z");
long l = dt.getMillis();
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2014, output);
dt = dtf.parseDateTime("20140104T000000.000Z");
l = dt.getMillis();
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2014, output);
dt = dtf.parseDateTime("20131228T000000.000Z");
l = dt.getMillis();
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2013, output);
}
示例9: testYearBoundary2
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Test
public void testYearBoundary2() throws HiveException, IOException {
UsWeekYear udf = new UsWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
DateTimeFormatter dtf = ISODateTimeFormat.basicDateTime();
// Sunday December 28, 2014 - Saturday January 3, 2015 are Week 53 of 2014
DateTime dt = dtf.parseDateTime("20141229T000000.000Z");
long l = dt.getMillis();
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2014, output);
dt = dtf.parseDateTime("20150103T000000.000Z");
l = dt.getMillis();
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2014, output);
}
示例10: test2
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Test
public void test2() throws Exception {
IsoWeekYear udf = new IsoWeekYear();
LongObjectInspector soi = PrimitiveObjectInspectorFactory.javaLongObjectInspector;
udf.initialize(new ObjectInspector[]{soi});
long l = 1388358783962L;
int output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2013, output);
l = 1388359114590L;
output = (Integer) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(l)});
Assert.assertEquals(2013, output);
}
示例11: get
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){
switch( objectInspector.getCategory() ){
case PRIMITIVE:
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
switch( primitiveInspector.getPrimitiveCategory() ){
case BINARY:
return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
case BOOLEAN:
return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
case BYTE:
return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
case DOUBLE:
return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
case FLOAT:
return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
case INT:
return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
case LONG:
return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
case SHORT:
return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
case STRING:
return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
case DATE:
case TIMESTAMP:
case VOID:
case UNKNOWN:
default:
return new HiveDefaultPrimitiveConverter();
}
default :
return new HiveDefaultPrimitiveConverter();
}
}
示例12: getArg
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
private long getArg(int i, Object[] args) {
if (inputOIs[i] instanceof LongObjectInspector) {
return (Long) ((LongObjectInspector) inputOIs[i])
.getPrimitiveJavaObject(args[i]);
} else {
return (Integer) ((IntObjectInspector) inputOIs[i])
.getPrimitiveJavaObject(args[i]);
}
}
示例13: getNumber
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
public String getNumber(Object data, ObjectInspector objectInspector) {
if (objectInspector instanceof DoubleObjectInspector) {
return Double.toString(((DoubleObjectInspector) objectInspector).get(data));
} else if (objectInspector instanceof LongObjectInspector) {
return Long.toString(((LongObjectInspector) objectInspector).get(data));
} else {
throw new RuntimeException("Unknown object inspector type: " + objectInspector.getCategory()
+ " Type name: " + objectInspector.getTypeName());
}
}
示例14: asLongOI
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Nonnull
public static LongObjectInspector asLongOI(@Nonnull final ObjectInspector argOI)
throws UDFArgumentException {
if (!BIGINT_TYPE_NAME.equals(argOI.getTypeName())) {
throw new UDFArgumentException("Argument type must be BIGINT: " + argOI.getTypeName());
}
return (LongObjectInspector) argOI;
}
示例15: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.LongObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters) throws HiveException {
super.init(mode, parameters);
// Setup the list and element object inspectors.
listObjectInspector = (ListObjectInspector) parameters[0];
longObjectInspector = (LongObjectInspector) listObjectInspector.getListElementObjectInspector();
// Will return a list of longs
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.javaLongObjectInspector);
}