本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector类的典型用法代码示例。如果您正苦于以下问题:Java StringObjectInspector类的具体用法?Java StringObjectInspector怎么用?Java StringObjectInspector使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
StringObjectInspector类属于org.apache.hadoop.hive.serde2.objectinspector.primitive包,在下文中一共展示了StringObjectInspector类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getFeatureType
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Nonnull
private static FeatureType getFeatureType(@Nonnull ListObjectInspector featureListOI)
throws UDFArgumentException {
final ObjectInspector featureOI = featureListOI.getListElementObjectInspector();
if (featureOI instanceof StringObjectInspector) {
return FeatureType.STRING;
} else if (featureOI instanceof IntObjectInspector) {
return FeatureType.INT;
} else if (featureOI instanceof LongObjectInspector) {
return FeatureType.LONG;
} else {
throw new UDFArgumentException("Feature object inspector must be one of "
+ "[StringObjectInspector, IntObjectInspector, LongObjectInspector]: "
+ featureOI.toString());
}
}
示例2: createPrimitive
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
private static Writable createPrimitive(Object obj, PrimitiveObjectInspector inspector)
throws SerDeException {
if (obj == null) {
return null;
}
switch (inspector.getPrimitiveCategory()) {
case DOUBLE:
return new DoubleWritable(((DoubleObjectInspector) inspector).get(obj));
case FLOAT:
return new FloatWritable(((FloatObjectInspector) inspector).get(obj));
case INT:
return new IntWritable(((IntObjectInspector) inspector).get(obj));
case LONG:
return new LongWritable(((LongObjectInspector) inspector).get(obj));
case STRING:
return new Text(((StringObjectInspector) inspector).getPrimitiveJavaObject(obj));
case DATE:
return ((DateObjectInspector) inspector).getPrimitiveWritableObject(obj);
case TIMESTAMP:
return ((TimestampObjectInspector) inspector).getPrimitiveWritableObject(obj);
default:
throw new SerDeException("Can't serialize primitive : " + inspector.getPrimitiveCategory());
}
}
示例3: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
throws UDFArgumentException {
if(arguments.length != 1
|| ! arguments[0].getCategory().equals( Category.PRIMITIVE)
|| ((PrimitiveObjectInspector)arguments[0]).getPrimitiveCategory() != PrimitiveCategory.STRING) {
throw new UDFArgumentException("Usage : json_split(jsonstring) ");
}
stringInspector = (StringObjectInspector) arguments[0];
ArrayList<String> outputColumns = new ArrayList<String>();
outputColumns.add("row_id");
outputColumns.add("json_string");
ArrayList<ObjectInspector> outputTypes = new ArrayList<ObjectInspector>();
outputTypes.add(PrimitiveObjectInspectorFactory.javaIntObjectInspector);
outputTypes.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
return ObjectInspectorFactory.getStandardListObjectInspector
(ObjectInspectorFactory.getStandardStructObjectInspector( outputColumns, outputTypes));
}
示例4: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
dateOi = arguments[0];
if (!(dateOi instanceof StringObjectInspector || dateOi instanceof TimestampObjectInspector)) {
throw new IllegalArgumentException("Can only call this UDF on types 'string' and 'timestamp'. " +
"Given " + dateOi.getTypeName());
}
hasSecondArg = arguments.length > 1;
if (hasSecondArg) {
ObjectInspector oi = arguments[1];
if (oi instanceof WritableConstantStringObjectInspector) {
constSecondArg = ((WritableConstantStringObjectInspector) oi).getWritableConstantValue().toString();
} else if (oi instanceof StringObjectInspector) {
formatOi = (StringObjectInspector) oi;
} else {
throw new IllegalArgumentException("Second argument must be of String type");
}
}
return PrimitiveObjectInspectorFactory.javaLongObjectInspector;
}
示例5: testTwoArgStringInput
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Test
public void testTwoArgStringInput() throws HiveException {
ToEpoch udf = new ToEpoch();
StringObjectInspector soi = PrimitiveObjectInspectorFactory.javaStringObjectInspector;
ObjectInspector i =
PrimitiveObjectInspectorFactory.getPrimitiveWritableConstantObjectInspector(
PrimitiveObjectInspector.PrimitiveCategory.STRING,
new Text("yyyy/MM/dd HH:mm:ss.SSSZ"));
udf.initialize(new ObjectInspector[]{soi, i});
Text date = new Text("2014/04/14 13:57:33.809-0700");
long output = (Long) udf.evaluate(new GenericUDF.DeferredObject[]{new GenericUDF.DeferredJavaObject(date)});
Assert.assertEquals(1397509053809L, output);
}
示例6: write
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Override
void write(Object obj) throws IOException {
if (obj != null) {
Text val = ((StringObjectInspector) inspector).getPrimitiveWritableObject(obj);
buffer[bufferIndex++] = new Text(val);
setRawDataSize(val.getLength());
// Increment the memory estimate by the buffered bytes
memoryEstimate.incrementTotalMemory(val.getLength());
bufferedBytes += val.getLength();
} else {
buffer[bufferIndex++] = null;
setRawDataSize(RawDatasizeConst.NULL_SIZE);
}
if (bufferIndex == buffer.length) {
flush();
}
}
示例7: testCaseInsensitiveFieldsStruct
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
/**
* Tests that fields can be accessed from the OrcStructObjectInspector in a case
* insensitive manner.
* @throws Exception
*/
@Test
public void testCaseInsensitiveFieldsStruct() throws Exception {
OrcStruct struct = new OrcStruct(Lists.newArrayList(FIELD_0));
struct.setFieldValue(0, new Text("a"));
// Test control case (cases match)
StructField field = NON_LAZY_STRUCT_OI.getStructFieldRef(FIELD_0);
Assert.assertEquals("a",
((StringObjectInspector) field.getFieldObjectInspector()).getPrimitiveJavaObject(
NON_LAZY_STRUCT_OI.getStructFieldData(struct, field)));
// Test upper case
field = NON_LAZY_STRUCT_OI.getStructFieldRef(FIELD_0.toUpperCase());
Assert.assertEquals("a",
((StringObjectInspector) field.getFieldObjectInspector()).getPrimitiveJavaObject(
NON_LAZY_STRUCT_OI.getStructFieldData(struct, field)));
// Test lower case (even if someone changes the value of FIELD_0 in the future either upper
// or lower case should be different from the actual case)
field = NON_LAZY_STRUCT_OI.getStructFieldRef(FIELD_0.toLowerCase());
Assert.assertEquals("a",
((StringObjectInspector) field.getFieldObjectInspector()).getPrimitiveJavaObject(
NON_LAZY_STRUCT_OI.getStructFieldData(struct, field)));
}
示例8: setSafeValue
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Override
public void setSafeValue(ObjectInspector oi, Object hiveFieldValue, ValueVector outputVV, int outputIndex) {
final Text value = ((StringObjectInspector)oi).getPrimitiveWritableObject(hiveFieldValue);
final byte[] valueBytes = value.getBytes();
final int len = value.getLength();
((NullableVarCharVector) outputVV).getMutator().setSafe(outputIndex, valueBytes, 0, len);
}
示例9: get
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
public static IHivePrimitiveConverter get( final ObjectInspector objectInspector ){
switch( objectInspector.getCategory() ){
case PRIMITIVE:
PrimitiveObjectInspector primitiveInspector = (PrimitiveObjectInspector)objectInspector;
switch( primitiveInspector.getPrimitiveCategory() ){
case BINARY:
return new HiveBytesPrimitiveConverter( (BinaryObjectInspector)objectInspector );
case BOOLEAN:
return new HiveBooleanPrimitiveConverter( (BooleanObjectInspector)objectInspector );
case BYTE:
return new HiveBytePrimitiveConverter( (ByteObjectInspector)objectInspector );
case DOUBLE:
return new HiveDoublePrimitiveConverter( (DoubleObjectInspector)objectInspector );
case FLOAT:
return new HiveFloatPrimitiveConverter( (FloatObjectInspector)objectInspector );
case INT:
return new HiveIntegerPrimitiveConverter( (IntObjectInspector)objectInspector );
case LONG:
return new HiveLongPrimitiveConverter( (LongObjectInspector)objectInspector );
case SHORT:
return new HiveShortPrimitiveConverter( (ShortObjectInspector)objectInspector );
case STRING:
return new HiveStringPrimitiveConverter( (StringObjectInspector)objectInspector );
case DATE:
case TIMESTAMP:
case VOID:
case UNKNOWN:
default:
return new HiveDefaultPrimitiveConverter();
}
default :
return new HiveDefaultPrimitiveConverter();
}
}
示例10: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException {
// ================================
// Check the input
assert (args.length == 1); // This UDF accepts one argument
// The first argument is a list
ObjectInspector inputOI = args[0];
if (!(inputOI instanceof StringObjectInspector)) {
throw new UDFArgumentException("The argument must be a string");
}
useragentOI = (StringObjectInspector) inputOI;
// ================================
// Initialize the parser
constructAnalyzer();
// ================================
// Define the output
// https://stackoverflow.com/questions/26026027/how-to-return-struct-from-hive-udf
// Define the field names for the struct<> and their types
List<ObjectInspector> fieldObjectInspectors = new ArrayList<>(fieldNames.size());
for (String ignored : fieldNames) {
fieldObjectInspectors.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldObjectInspectors);
}
示例11: BitcoinTransactionHashUDFInvalidArguments
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Test
public void BitcoinTransactionHashUDFInvalidArguments() throws HiveException {
final BitcoinTransactionHashUDF bthUDF = new BitcoinTransactionHashUDF();
UDFArgumentLengthException exNull = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(null), "Exception is thrown in case of null parameter");
UDFArgumentLengthException exLen = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(new ObjectInspector[2]), "Exception is thrown in case of invalid length parameter");
StringObjectInspector[] testStringOI = new StringObjectInspector[1];
testStringOI[0]=PrimitiveObjectInspectorFactory.javaStringObjectInspector;
UDFArgumentException wrongType = assertThrows(UDFArgumentException.class, ()->bthUDF.initialize(testStringOI), "Exception is thrown in case of invalid type of parameter");
}
示例12: BitcoinTransactionHashSegwitUDFInvalidArguments
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Test
public void BitcoinTransactionHashSegwitUDFInvalidArguments() throws HiveException {
BitcoinTransactionHashSegwitUDF bthUDF = new BitcoinTransactionHashSegwitUDF();
UDFArgumentLengthException exNull = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(null), "Exception is thrown in case of null parameter");
UDFArgumentLengthException exLen = assertThrows(UDFArgumentLengthException.class, ()->bthUDF.initialize(new ObjectInspector[2]), "Exception is thrown in case of invalid length parameter");
StringObjectInspector[] testStringOI = new StringObjectInspector[1];
testStringOI[0]=PrimitiveObjectInspectorFactory.javaStringObjectInspector;
UDFArgumentException wrongType = assertThrows(UDFArgumentException.class, ()->bthUDF.initialize(testStringOI), "Exception is thrown in case of invalid type of parameter");
}
示例13: asStringOI
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Nonnull
public static StringObjectInspector asStringOI(@Nonnull final ObjectInspector argOI)
throws UDFArgumentException {
if (!STRING_TYPE_NAME.equals(argOI.getTypeName())) {
throw new UDFArgumentException("Argument type must be String: " + argOI.getTypeName());
}
return (StringObjectInspector) argOI;
}
示例14: init
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
@Override
public ObjectInspector init(Mode mode, ObjectInspector[] parameters)
throws HiveException {
assert (parameters.length == 2);
super.init(mode, parameters);
// init input
if (mode == Mode.PARTIAL1 || mode == Mode.COMPLETE) {
inputOI = (StringObjectInspector) parameters[0];
nOI = (StringObjectInspector) parameters[1];
} else {
sOI = (StructObjectInspector) parameters[0];
sumField = sOI.getStructFieldRef("sum");
nField = sOI.getStructFieldRef("n");
sumFieldOI = (StringObjectInspector) sumField.getFieldObjectInspector();
nFieldOI = (StringObjectInspector) nField.getFieldObjectInspector();
}
// init output
if (mode == Mode.PARTIAL1 || mode == Mode.PARTIAL2) {
// The output of a partial aggregation is a struct containing
// a "string" sum and a "string" public key n.
ArrayList<ObjectInspector> foi = new ArrayList<ObjectInspector>();
foi.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
foi.add(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
ArrayList<String> fname = new ArrayList<String>();
fname.add("sum");
fname.add("n");
partialResult = new Object[2];
partialResult[0] = new Text("0");
partialResult[1] = new Text();
return ObjectInspectorFactory.getStandardStructObjectInspector(fname,
foi);
} else {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
}
}
示例15: checkParameterOI
import org.apache.hadoop.hive.serde2.objectinspector.primitive.StringObjectInspector; //导入依赖的package包/类
/**
* Checks if hive function call is valid, and defines stringOI attribute.
* @param argOIs The hive function arguments
* @throws UDFArgumentException If function call is invalid
*/
protected void checkParameterOI(ObjectInspector[] argOIs) throws UDFArgumentException {
if(argOIs.length != 1) {
throw new UDFArgumentException("HiveImporter UDTF takes 1 argument: STRING");
}
ObjectInspector arg1 = argOIs[0]; //First parameter, corresponding to OSM XML file path
if(!(arg1 instanceof StringObjectInspector)) {
throw new UDFArgumentException("HiveImporter UDTF takes 1 argument: STRING");
}
this.stringOI = (StringObjectInspector) arg1;
}