本文整理汇总了Java中org.apache.hadoop.hive.ql.exec.FunctionRegistry类的典型用法代码示例。如果您正苦于以下问题:Java FunctionRegistry类的具体用法?Java FunctionRegistry怎么用?Java FunctionRegistry使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
FunctionRegistry类属于org.apache.hadoop.hive.ql.exec包,在下文中一共展示了FunctionRegistry类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: isAggregateAST
import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
public static boolean isAggregateAST(ASTNode node) {
int exprTokenType = node.getType();
if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
|| exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
assert (node.getChildCount() != 0);
if (node.getChild(0).getType() == HiveParser.Identifier) {
String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
try {
if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
return true;
}
} catch (SemanticException e) {
log.error("Error trying to find whether {} is aggregate.", getString(node), e);
return false;
}
}
}
return false;
}
示例2: isNonAggregateFunctionAST
import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
public static boolean isNonAggregateFunctionAST(ASTNode node) {
int exprTokenType = node.getType();
if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
|| exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
assert (node.getChildCount() != 0);
if (node.getChild(0).getType() == HiveParser.Identifier) {
String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
try {
if (FunctionRegistry.getGenericUDAFResolver(functionName) == null) {
return true;
}
} catch (SemanticException e) {
log.error("Error trying to find whether {} is udf node.", getString(node), e);
return false;
}
}
}
return false;
}
示例3: HiveAuthzBindingHook
import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
public HiveAuthzBindingHook() throws Exception {
SessionState session = SessionState.get();
if(session == null) {
throw new IllegalStateException("Session has not been started");
}
// HACK: set a random classname to force the Auth V2 in Hive
SessionState.get().setAuthorizer(null);
HiveConf hiveConf = session.getConf();
if(hiveConf == null) {
throw new IllegalStateException("Session HiveConf is null");
}
authzConf = loadAuthzConf(hiveConf);
hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);
String serdeWhiteLists = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
serdeURIPrivilegesEnabled = authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);
FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
}
示例4: initialize
import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
if (arguments.length < 2) {
throw new UDFArgumentLengthException(getFuncName() + " requires at least 2 arguments, got "
+ arguments.length);
}
if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
throw new UDFArgumentException(getFuncName() + " only takes primitive types, got "
+ arguments[0].getTypeName());
}
argumentOIs = arguments;
converters = new Converter[arguments.length];
TypeInfo commonInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]);
for (int i = 1; i < arguments.length; i++) {
PrimitiveTypeInfo currInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[i]);
commonInfo = FunctionRegistry.getCommonClassForComparison(
commonInfo, currInfo);
}
resultOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(
(commonInfo == null) ?
TypeInfoFactory.doubleTypeInfo : commonInfo);
for (int i = 0; i < arguments.length; i++) {
converters[i] = ObjectInspectorConverters.getConverter(arguments[i], resultOI);
}
return resultOI;
}
示例5: setup
import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
private void setup() {
FunctionRegistry. getFunctionNames();
conf = new JobConf();
conf.set(serdeConstants.LIST_COLUMNS, "field1,rid");
conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string");
conf.set(AccumuloSerde.COLUMN_MAPPINGS, "cf|f1,rowID");
}
示例6: evaluate
import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
List<?> inputs = (List<?>)
ObjectInspectorUtils.copyToStandardObject(arguments[0].get(), listOI);
if (inputs == null) {
return null;
}
if (evaluator == null) {
Text functionName =
(Text)ObjectInspectorUtils.copyToStandardObject(arguments[1].get(), stringOI);
if (functionName == null) {
throw new HiveException("Function name cannot be null.");
}
GenericUDAFResolver resolver =
FunctionRegistry.getGenericUDAFResolver(functionName.toString());
if (resolver == null) {
throw new HiveException("Could not find function with name " +
functionName.toString());
}
ObjectInspector[] objectInspectorArray = new ObjectInspector[1];
objectInspectorArray[0] = elementOI;
TypeInfo[] typeInfoArray = new TypeInfo[1];
typeInfoArray[0] =
TypeInfoUtils.getTypeInfoFromObjectInspector(elementOI);
evaluator = resolver.getEvaluator(typeInfoArray);
converter = ObjectInspectorConverters.getConverter(
evaluator.init(GenericUDAFEvaluator.Mode.COMPLETE, objectInspectorArray),
elementOI);
buffer = evaluator.getNewAggregationBuffer();
}
evaluator.reset(buffer);
for (Object input : inputs) {
inputArray[0] = input;
evaluator.iterate(buffer, inputArray);
}
return converter.convert(evaluator.terminate(buffer));
}