本文整理汇总了Java中org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils.getPrimitiveGrouping方法的典型用法代码示例。如果您正苦于以下问题:Java PrimitiveObjectInspectorUtils.getPrimitiveGrouping方法的具体用法?Java PrimitiveObjectInspectorUtils.getPrimitiveGrouping怎么用?Java PrimitiveObjectInspectorUtils.getPrimitiveGrouping使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils
的用法示例。
在下文中一共展示了PrimitiveObjectInspectorUtils.getPrimitiveGrouping方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: checkArgGroups
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
public static void checkArgGroups(ObjectInspector[] arguments, int i,
PrimitiveCategory[] inputTypes, PrimitiveGrouping... grps)
throws UDFArgumentTypeException {
PrimitiveCategory inputType = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
for (PrimitiveGrouping grp : grps) {
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputType) == grp) {
inputTypes[i] = inputType;
return;
}
}
// build error message
StringBuilder sb = new StringBuilder();
sb.append("_FUNC_ only takes ");
sb.append(grps[0]);
for (int j = 1; j < grps.length; j++) {
sb.append(", ");
sb.append(grps[j]);
}
sb.append(" types as ");
sb.append(getArgOrder(i));
sb.append(" argument, got ");
sb.append(inputType);
throw new UDFArgumentTypeException(i, sb.toString());
}
示例2: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
// the function should support both string and binary input types
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, BINARY_GROUP);
checkArgGroups(arguments, 1, inputTypes, NUMERIC_GROUP);
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputTypes[0]) == STRING_GROUP) {
obtainStringConverter(arguments, 0, inputTypes, converters);
isStr = true;
} else {
obtainBinaryConverter(arguments, 0, inputTypes, converters);
isStr = false;
}
if (arguments[1] instanceof ConstantObjectInspector) {
Integer lenObj = getConstantIntValue(arguments, 1);
if (lenObj != null) {
int len = lenObj.intValue();
if (len == 0) {
len = 256;
}
try {
digest = MessageDigest.getInstance("SHA-" + len);
} catch (NoSuchAlgorithmException e) {
// ignore
}
}
} else {
throw new UDFArgumentTypeException(1, getFuncName() + " only takes constant as "
+ getArgOrder(1) + " argument");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例3: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length == 2) {
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if ((PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.DATE_GROUP
|| PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.STRING_GROUP)
&& PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType2) == PrimitiveGrouping.STRING_GROUP) {
dateTypeArg = true;
return initializeDate(arguments);
} else if (PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP
&& PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType2) == PrimitiveGrouping.NUMERIC_GROUP) {
dateTypeArg = false;
return initializeNumber(arguments);
}
throw new UDFArgumentException("Got wrong argument types : first argument type : "
+ arguments[0].getTypeName() + ", second argument type : " + arguments[1].getTypeName());
} else if (arguments.length == 1) {
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType1) == PrimitiveGrouping.NUMERIC_GROUP) {
dateTypeArg = false;
return initializeNumber(arguments);
} else {
throw new UDFArgumentException(
"Only primitive type arguments are accepted, when arguments length is one, got "
+ arguments[1].getTypeName());
}
}
throw new UDFArgumentException("TRUNC requires one or two argument, got " + arguments.length);
}
示例4: checkIfStringGroup
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
protected void checkIfStringGroup(ObjectInspector[] arguments, int i, String argOrder)
throws UDFArgumentTypeException {
inputTypes[i] = ((PrimitiveObjectInspector) arguments[i]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputTypes[i]) != PrimitiveGrouping.STRING_GROUP &&
PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputTypes[i]) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(i, getFuncName() + " only takes STRING_GROUP types as "
+ argOrder + " argument, got " + inputTypes[i]);
}
}
示例5: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
// the function should support both string and binary input types
if (canParam0BeStr()) {
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, BINARY_GROUP);
} else {
checkArgGroups(arguments, 0, inputTypes, BINARY_GROUP);
}
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, BINARY_GROUP);
if (isStr0 = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputTypes[0]) == STRING_GROUP) {
obtainStringConverter(arguments, 0, inputTypes, converters);
} else {
obtainBinaryConverter(arguments, 0, inputTypes, converters);
}
isKeyConstant = arguments[1] instanceof ConstantObjectInspector;
byte[] key = null;
int keyLength = 0;
if (isStr1 = PrimitiveObjectInspectorUtils.getPrimitiveGrouping(inputTypes[1]) == STRING_GROUP) {
if (isKeyConstant) {
String keyStr = getConstantStringValue(arguments, 1);
if (keyStr != null) {
key = keyStr.getBytes();
keyLength = key.length;
}
} else {
obtainStringConverter(arguments, 1, inputTypes, converters);
}
} else {
if (isKeyConstant) {
BytesWritable keyWr = getConstantBytesValue(arguments, 1);
if (keyWr != null) {
key = keyWr.getBytes();
keyLength = keyWr.getLength();
}
} else {
obtainBinaryConverter(arguments, 1, inputTypes, converters);
}
}
if (key != null) {
secretKey = getSecretKey(key, keyLength);
}
try {
cipher = Cipher.getInstance("AES");
} catch (NoSuchPaddingException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
return outputOI;
}
示例6: initializeDate
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
private ObjectInspector initializeDate(ObjectInspector[] arguments)
throws UDFArgumentLengthException, UDFArgumentTypeException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException("trunc() requires 2 argument, got " + arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(0, "Only primitive type arguments are accepted but "
+ arguments[0].getTypeName() + " is passed. as first arguments");
}
if (arguments[1].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentTypeException(1, "Only primitive type arguments are accepted but "
+ arguments[1].getTypeName() + " is passed. as second arguments");
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
inputType1 = ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory();
switch (inputType1) {
case STRING:
case VARCHAR:
case CHAR:
case VOID:
inputType1 = PrimitiveCategory.STRING;
textConverter1 = ObjectInspectorConverters.getConverter(arguments[0],
PrimitiveObjectInspectorFactory.writableStringObjectInspector);
break;
case TIMESTAMP:
timestampConverter = new TimestampConverter((PrimitiveObjectInspector) arguments[0],
PrimitiveObjectInspectorFactory.writableTimestampObjectInspector);
break;
case DATE:
dateWritableConverter = ObjectInspectorConverters.getConverter(arguments[0],
PrimitiveObjectInspectorFactory.writableDateObjectInspector);
break;
default:
throw new UDFArgumentTypeException(0,
"TRUNC() only takes STRING/TIMESTAMP/DATEWRITABLE types as first argument, got "
+ inputType1);
}
inputType2 = ((PrimitiveObjectInspector) arguments[1]).getPrimitiveCategory();
if (PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.STRING_GROUP
&& PrimitiveObjectInspectorUtils
.getPrimitiveGrouping(inputType2) != PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(1,
"trunk() only takes STRING/CHAR/VARCHAR types as second argument, got " + inputType2);
}
inputType2 = PrimitiveCategory.STRING;
if (arguments[1] instanceof ConstantObjectInspector) {
Object obj = ((ConstantObjectInspector) arguments[1]).getWritableConstantValue();
fmtInput = obj != null ? obj.toString() : null;
} else {
textConverter2 = ObjectInspectorConverters.getConverter(arguments[1],
PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
return outputOI;
}
示例7: initialize
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils; //导入方法依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
argumentOIs = arguments;
checkArgsSize(arguments, 2, 2);
returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver(true);
returnOIResolver.update(arguments[0]);
boolean isPrimitive = (arguments[0] instanceof PrimitiveObjectInspector);
if (isPrimitive)
{
PrimitiveObjectInspector primitive0 = (PrimitiveObjectInspector) arguments[0];
PrimitiveObjectInspector primitive1 = (PrimitiveObjectInspector) arguments[1];
PrimitiveGrouping pcat0 =
PrimitiveObjectInspectorUtils.getPrimitiveGrouping(primitive0.getPrimitiveCategory());
PrimitiveGrouping pcat1 =
PrimitiveObjectInspectorUtils.getPrimitiveGrouping(primitive1.getPrimitiveCategory());
if (pcat0 == PrimitiveGrouping.VOID_GROUP) {
throw new UDFArgumentTypeException(0,
"NULLIF may not accept types belonging to " + pcat0 + " as first argument");
}
if (pcat1 != PrimitiveGrouping.VOID_GROUP && pcat0 != pcat1) {
throw new UDFArgumentTypeException(1,
"The expressions after NULLIF should belong to the same category: \"" + pcat0
+ "\" is expected but \"" + pcat1 + "\" is found");
}
} else {
String typeName0 = arguments[0].getTypeName();
String typeName1 = arguments[1].getTypeName();
if (!typeName0.equals(typeName1)) {
throw new UDFArgumentTypeException(1,
"The expressions after NULLIF should all have the same type: \"" + typeName0
+ "\" is expected but \"" + typeName1 + "\" is found");
}
}
return returnOIResolver.get();
}