当前位置: 首页>>代码示例>>Java>>正文


Java FunctionRegistry类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.exec.FunctionRegistry的典型用法代码示例。如果您正苦于以下问题:Java FunctionRegistry类的具体用法?Java FunctionRegistry怎么用?Java FunctionRegistry使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


FunctionRegistry类属于org.apache.hadoop.hive.ql.exec包,在下文中一共展示了FunctionRegistry类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: isAggregateAST

import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
public static boolean isAggregateAST(ASTNode node) {
  int exprTokenType = node.getType();
  if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
    || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
    assert (node.getChildCount() != 0);
    if (node.getChild(0).getType() == HiveParser.Identifier) {
      String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
      try {
        if (FunctionRegistry.getGenericUDAFResolver(functionName) != null) {
          return true;
        }
      } catch (SemanticException e) {
        log.error("Error trying to find whether {} is aggregate.", getString(node), e);
        return false;
      }
    }
  }

  return false;
}
 
开发者ID:apache,项目名称:lens,代码行数:21,代码来源:HQLParser.java

示例2: isNonAggregateFunctionAST

import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
public static boolean isNonAggregateFunctionAST(ASTNode node) {
  int exprTokenType = node.getType();
  if (exprTokenType == HiveParser.TOK_FUNCTION || exprTokenType == HiveParser.TOK_FUNCTIONDI
    || exprTokenType == HiveParser.TOK_FUNCTIONSTAR) {
    assert (node.getChildCount() != 0);
    if (node.getChild(0).getType() == HiveParser.Identifier) {
      String functionName = BaseSemanticAnalyzer.unescapeIdentifier(node.getChild(0).getText());
      try {
        if (FunctionRegistry.getGenericUDAFResolver(functionName) == null) {
          return true;
        }
      } catch (SemanticException e) {
        log.error("Error trying to find whether {} is udf node.", getString(node), e);
        return false;
      }
    }
  }
  return false;
}
 
开发者ID:apache,项目名称:lens,代码行数:20,代码来源:HQLParser.java

示例3: HiveAuthzBindingHook

import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
public HiveAuthzBindingHook() throws Exception {
  SessionState session = SessionState.get();
  if(session == null) {
    throw new IllegalStateException("Session has not been started");
  }
  // HACK: set a random classname to force the Auth V2 in Hive
  SessionState.get().setAuthorizer(null);

  HiveConf hiveConf = session.getConf();
  if(hiveConf == null) {
    throw new IllegalStateException("Session HiveConf is null");
  }
  authzConf = loadAuthzConf(hiveConf);
  hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);

  String serdeWhiteLists = authzConf.get(HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST,
      HiveAuthzConf.HIVE_SENTRY_SERDE_WHITELIST_DEFAULT);
  serdeWhiteList = Arrays.asList(serdeWhiteLists.split(","));
  serdeURIPrivilegesEnabled = authzConf.getBoolean(HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED,
      HiveAuthzConf.HIVE_SENTRY_SERDE_URI_PRIVILIEGES_ENABLED_DEFAULT);

  FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
}
 
开发者ID:apache,项目名称:incubator-sentry,代码行数:24,代码来源:HiveAuthzBindingHook.java

示例4: initialize

import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
@Override
public ObjectInspector initialize(ObjectInspector[] arguments) throws UDFArgumentException {
  if (arguments.length < 2) {
    throw new UDFArgumentLengthException(getFuncName() + " requires at least 2 arguments, got "
      + arguments.length);
  }
  if (arguments[0].getCategory() != ObjectInspector.Category.PRIMITIVE) {
    throw new UDFArgumentException(getFuncName() + " only takes primitive types, got "
      + arguments[0].getTypeName());
  }

  argumentOIs = arguments;
  converters = new Converter[arguments.length];

  TypeInfo commonInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[0]);

  for (int i = 1; i < arguments.length; i++) {
    PrimitiveTypeInfo currInfo = (PrimitiveTypeInfo) TypeInfoUtils.getTypeInfoFromObjectInspector(arguments[i]);

    commonInfo = FunctionRegistry.getCommonClassForComparison(
      commonInfo, currInfo);
  }

  resultOI = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(
    (commonInfo == null) ?
      TypeInfoFactory.doubleTypeInfo : commonInfo);

  for (int i = 0; i < arguments.length; i++) {
    converters[i] = ObjectInspectorConverters.getConverter(arguments[i], resultOI);
  }

  return resultOI;
}
 
开发者ID:myui,项目名称:hive-udf-backports,代码行数:34,代码来源:GenericUDFBaseNwayCompare.java

示例5: setup

import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
private void setup() {
    FunctionRegistry. getFunctionNames();
    conf = new JobConf();
    conf.set(serdeConstants.LIST_COLUMNS, "field1,rid");
    conf.set(serdeConstants.LIST_COLUMN_TYPES, "string,string");
    conf.set(AccumuloSerde.COLUMN_MAPPINGS, "cf|f1,rowID");
}
 
开发者ID:bfemiano,项目名称:accumulo-hive-storage-manager,代码行数:8,代码来源:PredicateHandlerTest.java

示例6: evaluate

import org.apache.hadoop.hive.ql.exec.FunctionRegistry; //导入依赖的package包/类
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
  List<?> inputs = (List<?>)
    ObjectInspectorUtils.copyToStandardObject(arguments[0].get(), listOI);

  if (inputs == null) {
    return null;
  }

  if (evaluator == null) {
    Text functionName = 
      (Text)ObjectInspectorUtils.copyToStandardObject(arguments[1].get(), stringOI);

    if (functionName == null) {
      throw new HiveException("Function name cannot be null.");
    }

    GenericUDAFResolver resolver =
      FunctionRegistry.getGenericUDAFResolver(functionName.toString());
    if (resolver == null) {
      throw new HiveException("Could not find function with name " + 
                              functionName.toString());
    }
    
    ObjectInspector[] objectInspectorArray = new ObjectInspector[1];
    objectInspectorArray[0] = elementOI;
   
    TypeInfo[] typeInfoArray = new TypeInfo[1];
    typeInfoArray[0] = 
      TypeInfoUtils.getTypeInfoFromObjectInspector(elementOI);
    
    evaluator = resolver.getEvaluator(typeInfoArray);
    converter = ObjectInspectorConverters.getConverter(
      evaluator.init(GenericUDAFEvaluator.Mode.COMPLETE, objectInspectorArray),
      elementOI);
    buffer = evaluator.getNewAggregationBuffer();
  } 

  evaluator.reset(buffer);

  for (Object input : inputs) {
    inputArray[0] = input;
    evaluator.iterate(buffer, inputArray);
  }

  return converter.convert(evaluator.terminate(buffer));
}
 
开发者ID:brndnmtthws,项目名称:facebook-hive-udfs,代码行数:48,代码来源:UDFArrayAggregate.java


注:本文中的org.apache.hadoop.hive.ql.exec.FunctionRegistry类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。