本文整理汇总了Java中org.apache.hadoop.hive.ql.exec.UDFArgumentException类的典型用法代码示例。如果您正苦于以下问题:Java UDFArgumentException类的具体用法?Java UDFArgumentException怎么用?Java UDFArgumentException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
UDFArgumentException类属于org.apache.hadoop.hive.ql.exec包,在下文中一共展示了UDFArgumentException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
private StructObjectInspector initialize(final ObjectInspector jsonArg, final ObjectInspector jqArg, final List<ObjectInspector> nameAndTypeArgs) throws UDFArgumentException {
this.in = Arguments.asString(jsonArg, "JSON");
try {
this.jq = JsonQuery.compile(Arguments.asConstantNonNullString(jqArg, "JQ"));
} catch (final JsonQueryException e) {
throw new UDFArgumentException("JQ is invalid: " + e.getMessage());
}
this.marshaller = ResultObjectMarshallers.create(Arguments.asConstantNonNullStrings(nameAndTypeArgs, "TYPE or NAME:TYPE"));
this.scope = new Scope();
this.mapper = new ObjectMapper(new JsonFactory().enable(Feature.ALLOW_UNQUOTED_CONTROL_CHARS));
return marshaller.objectInspector();
}
示例2: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
public StructObjectInspector initialize(ObjectInspector[] arg0) throws UDFArgumentException {
if (arg0.length != 1) {
//throw new UDFArgumentLengthException("ExplodeMap takes only one argument");
}
if (arg0[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
//throw new UDFArgumentException("ExplodeMap takes string as a parameter");
}
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("col1");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
fieldNames.add("col2");
fieldOIs.add(PrimitiveObjectInspectorFactory.javaStringObjectInspector);
return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames, fieldOIs);
}
示例3: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException(String.format("%s needs 1 argument, got %d", udfName, arguments.length));
}
if (arguments[0].getCategory() != Category.PRIMITIVE ||
((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() != inputType) {
String actual = arguments[0].getCategory() + (arguments[0].getCategory() == Category.PRIMITIVE ?
"[" + ((PrimitiveObjectInspector) arguments[0]).getPrimitiveCategory() + "]" : "");
throw new UDFArgumentException(
String.format("%s only takes primitive type %s, got %s", udfName, inputType, actual));
}
argumentOI = arguments[0];
return PrimitiveObjectInspectorFactory.getPrimitiveJavaObjectInspector(outputType);
}
示例4: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
obtainStringConverter(arguments, 1, inputTypes, converters);
if (arguments[1] instanceof ConstantObjectInspector) {
String dayOfWeek = getConstantStringValue(arguments, 1);
isDayOfWeekConst = true;
dayOfWeekIntConst = getIntDayOfWeek(dayOfWeek);
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例5: getIntDayOfWeek
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
protected int getIntDayOfWeek(String dayOfWeek) throws UDFArgumentException {
if (dayOfWeek == null) {
return -1;
}
if (MON.matches(dayOfWeek)) {
return Calendar.MONDAY;
}
if (TUE.matches(dayOfWeek)) {
return Calendar.TUESDAY;
}
if (WED.matches(dayOfWeek)) {
return Calendar.WEDNESDAY;
}
if (THU.matches(dayOfWeek)) {
return Calendar.THURSDAY;
}
if (FRI.matches(dayOfWeek)) {
return Calendar.FRIDAY;
}
if (SAT.matches(dayOfWeek)) {
return Calendar.SATURDAY;
}
if (SUN.matches(dayOfWeek)) {
return Calendar.SUNDAY;
}
return -1;
}
示例6: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, STRING_GROUP, VOID_GROUP);
obtainStringConverter(arguments, 0, inputTypes, converters);
obtainStringConverter(arguments, 1, inputTypes, converters);
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
return outputOI;
}
示例7: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
checkArgsSize(arguments, 2, 2);
checkArgPrimitive(arguments, 0);
checkArgPrimitive(arguments, 1);
checkArgGroups(arguments, 0, inputTypes, STRING_GROUP, DATE_GROUP, VOID_GROUP);
checkArgGroups(arguments, 1, inputTypes, NUMERIC_GROUP, VOID_GROUP);
obtainDateConverter(arguments, 0, inputTypes, converters);
obtainIntConverter(arguments, 1, inputTypes, converters);
if (arguments[1] instanceof ConstantObjectInspector) {
numMonthsConst = getConstantIntValue(arguments, 1);
isNumMonthsConst = true;
}
ObjectInspector outputOI = PrimitiveObjectInspectorFactory.writableStringObjectInspector;
return outputOI;
}
示例8: evalDate
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
private Calendar evalDate(Date d) throws UDFArgumentException {
calendar.setTime(d);
if ("MONTH".equals(fmtInput) || "MON".equals(fmtInput) || "MM".equals(fmtInput)) {
calendar.set(Calendar.DAY_OF_MONTH, 1);
return calendar;
} else if ("QUARTER".equals(fmtInput) || "Q".equals(fmtInput)) {
int month = calendar.get(Calendar.MONTH);
int quarter = month / 3;
int monthToSet = quarter * 3;
calendar.set(Calendar.MONTH, monthToSet);
calendar.set(Calendar.DAY_OF_MONTH, 1);
return calendar;
} else if ("YEAR".equals(fmtInput) || "YYYY".equals(fmtInput) || "YY".equals(fmtInput)) {
calendar.set(Calendar.MONTH, 0);
calendar.set(Calendar.DAY_OF_MONTH, 1);
return calendar;
} else {
return null;
}
}
示例9: create
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
public static ResultObjectMarshaller create(final List<String> nameAndTypeArgs) throws UDFArgumentException {
final List<String> columns = new ArrayList<>(nameAndTypeArgs.size());
final List<ObjectInspector> inspectors = new ArrayList<>(nameAndTypeArgs.size());
for (int i = 0; i < nameAndTypeArgs.size(); ++i) {
final Pair<String, ObjectInspector> nameAndType = parseNameAndType(nameAndTypeArgs.get(i), i > 0);
columns.add(nameAndType._1);
inspectors.add(nameAndType._2);
}
if (columns.size() == 1 && columns.get(0) == null) { // _FUNC_(JSON, 'JQ', 'TYPE') form
columns.set(0, "col1");
return new ResultObjectMarshaller(true, ObjectInspectorFactory.getStandardStructObjectInspector(columns, inspectors));
} else { // _FUNC_(JSON, 'JQ', 'FIELD_1:TYPE_1', ..., 'FIELD_N:TYPE_N') form
return new ResultObjectMarshaller(false, ObjectInspectorFactory.getStandardStructObjectInspector(columns, inspectors));
}
}
示例10: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type StructObjectInspector
*
* @param arguments array of length 1 containing one StructObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (BinaryWritable)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of StructObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("bitcoinTransactionHash only takes one argument: Struct<BitcoinTransaction> ");
}
if (!(arguments[0] instanceof StructObjectInspector)) {
throw new UDFArgumentException("first argument must be a Struct containing a BitcoinTransaction");
}
this.soi = (StructObjectInspector)arguments[0];
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
this.wioi = PrimitiveObjectInspectorFactory.writableIntObjectInspector;
this.wloi = PrimitiveObjectInspectorFactory.writableLongObjectInspector;
// the UDF returns the hash value of a BitcoinTransaction as byte array
return PrimitiveObjectInspectorFactory.writableBinaryObjectInspector;
}
示例11: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(@Nonnull ObjectInspector[] argOIs)
throws UDFArgumentException {
if (argOIs.length != 1 && argOIs.length != 2) {
throw new UDFArgumentLengthException(
"The feature_hashing function takes 1 or 2 arguments: " + argOIs.length);
}
ObjectInspector argOI0 = argOIs[0];
this._listOI = HiveUtils.isListOI(argOI0) ? (ListObjectInspector) argOI0 : null;
if (argOIs.length == 2) {
String opts = HiveUtils.getConstString(argOIs[1]);
processOptions(opts);
}
if (_listOI == null) {
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
} else {
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
}
示例12: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
/**
*
* Initialize HiveUDF and create object inspectors. It requires that the argument length is = 1 and that the ObjectInspector of arguments[0] is of type org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableBinaryObjectInspector
*
* @param arguments array of length 1 containing one WritableBinaryObjectInspector
*
* @return ObjectInspector that is able to parse the result of the evaluate method of the UDF (List Object Inspector for Strings)
*
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentException in case the first argument is not of WritableBinaryObjectInspector
* @throws org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException in case the number of arguments is != 1
*
*/
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments==null) {
throw new UDFArgumentLengthException("namecoinExtractField only takes one argument: Binary ");
}
if (arguments.length != 1) {
throw new UDFArgumentLengthException("namecoinExtractField only takes one argument: Binary ");
}
if (!(arguments[0] instanceof BinaryObjectInspector)) {
throw new UDFArgumentException("first argument must be a Binary containing a Namecoin script");
}
// these are only used for bitcointransaction structs exported to other formats, such as ORC
this.wboi = (BinaryObjectInspector)arguments[0];
// the UDF returns the hash value of a BitcoinTransaction as byte array
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
示例13: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length != 1) {
throw new UDFArgumentLengthException(
"add_bias() has an single arguments: array<string> features");
}
switch (arguments[0].getCategory()) {
case LIST:
argumentOI = (ListObjectInspector) arguments[0];
ObjectInspector elmOI = argumentOI.getListElementObjectInspector();
if (elmOI.getCategory().equals(Category.PRIMITIVE)) {
if (((PrimitiveObjectInspector) elmOI).getPrimitiveCategory() == PrimitiveCategory.STRING) {
break;
}
}
default:
throw new UDFArgumentTypeException(0, "Type mismatch: features");
}
return ObjectInspectorFactory.getStandardListObjectInspector(argumentOI.getListElementObjectInspector());
}
示例14: validateFeatureOI
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
private static void validateFeatureOI(@Nonnull ObjectInspector argOI)
throws UDFArgumentException {
if (!HiveUtils.isPrimitiveOI(argOI)) {
throw new UDFArgumentException(
"_FUNC_ expects integer type or string for `feature` but got "
+ argOI.getTypeName());
}
final PrimitiveObjectInspector oi = (PrimitiveObjectInspector) argOI;
switch (oi.getPrimitiveCategory()) {
case INT:
case SHORT:
case LONG:
case BYTE:
case STRING:
break;
default: {
throw new UDFArgumentException(
"_FUNC_ expects integer type or string for `feature` but got "
+ argOI.getTypeName());
}
}
}
示例15: initialize
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
final int arglen = arguments.length;
if (arglen < 1 || arglen > 5) {
throw new UDFArgumentException("Invalid number of arguments for `tokenize_ja`: "
+ arglen);
}
this._mode = (arglen >= 2) ? tokenizationMode(arguments[1]) : Mode.NORMAL;
this._stopWords = (arglen >= 3) ? stopWords(arguments[2])
: JapaneseAnalyzer.getDefaultStopSet();
this._stopTags = (arglen >= 4) ? stopTags(arguments[3])
: JapaneseAnalyzer.getDefaultStopTags();
this._userDict = (arglen >= 5) ? userDictionary(arguments[4]) : null;
this._analyzer = null;
return ObjectInspectorFactory.getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}