当前位置: 首页>>代码示例>>Java>>正文


Java HiveParser类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.parse.HiveParser的典型用法代码示例。如果您正苦于以下问题:Java HiveParser类的具体用法?Java HiveParser怎么用?Java HiveParser使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


HiveParser类属于org.apache.hadoop.hive.ql.parse包,在下文中一共展示了HiveParser类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getTableNamesForUpdateDelete

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
public static void getTableNamesForUpdateDelete(
        List<String> isrcTableNames, ASTNode input) {
    if (input == null) {
        return;
    }
    if (input.getToken().getType() == HiveParser.TOK_TABNAME) {
        if (input.getChildCount() == 1) {
            isrcTableNames.add(input.getChild(0).getText());
            return;
        } else if (input.getChildCount() == 2) {
            isrcTableNames.add(input.getChild(0).getText());
            return;
        } else if (input.getChildCount() == 3) {
            isrcTableNames.add(input.getChild(0).getText());
            return;
        }
    } else {
        int childCount = input.getChildCount();
        for (int i = 0; i < childCount; i++) {
            getTableNamesForUpdateDelete(isrcTableNames,
                    (ASTNode) input.getChild(i));
        }
    }
}
 
开发者ID:thomas-young-2013,项目名称:wherehowsX,代码行数:25,代码来源:HiveSqlAnalyzer.java

示例2: parseHiveType

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private static ASTNode parseHiveType(final String hiveType) {
	try {
		final ParseDriver driver = new ParseDriver();
		final HiveLexer lexer = new HiveLexer(driver.new ANTLRNoCaseStringStream(hiveType));
		final HiveParser parser = new HiveParser(new TokenRewriteStream(lexer));
		parser.setTreeAdaptor(ParseDriver.adaptor);

		final HiveParser.type_return type = parser.type();

		final ASTNode ast = (ASTNode) type.getTree();
		ast.setUnknownTokenBoundaries();
		return ast;
	} catch (Exception e) {
		throw new IllegalArgumentException("invalid type: " + hiveType, e);
	}
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:17,代码来源:ObjectInspectors.java

示例3: getTblAliasNameFromTabRef

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
/**
 * Get table alias name from tab ref tree
 *
 * @param node
 * @return
 */
public static String getTblAliasNameFromTabRef(ASTNode node) {
  // return the first tableRef or subq alias found
  if (node.getType() == HiveParser.TOK_SUBQUERY) {
    return node.getChild(1).getText();
  } else if (node.getType() == HiveParser.TOK_TABNAME) {
    if (node.getChildCount() == 1) {
      return node.getChild(0).getText();
    } else {
      return node.getChild(1).getText();
    }
  }
  for (int i = 0; i < node.getChildCount(); i++) {
    String tab = getTblAliasNameFromTabRef((ASTNode) node.getChild(i));
    if (tab != null) {
      return tab;
    }
  }
  return null;
}
 
开发者ID:adrian-wang,项目名称:project-panthera-skin,代码行数:26,代码来源:SqlXlateUtil.java

示例4: isCubeTableNode

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private static boolean isCubeTableNode(ASTNode node, HiveConf conf) throws LensException {
  if (node.getType() == HiveParser.TOK_TABREF || node.getType() == HiveParser.TOK_TABNAME) {
    return isCubeTableNode((ASTNode) node.getChild(0), conf);
  }
  if (node.getText().contains("JOIN")) {
    if (isCubeTableNode((ASTNode) node.getChild(0), conf)) {
      for (int i = 1; i < node.getChildCount(); i += 2) {
        if (!isCubeTableNode((ASTNode) node.getChild(i), conf)) {
          return false;
        }
      }
      return true;
    }
  }
  return node.getType() == HiveParser.Identifier && getClient(conf).isLensQueryableTable(node.getText());
}
 
开发者ID:apache,项目名称:lens,代码行数:17,代码来源:RewriteUtil.java

示例5: getMockedCubeContext

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
/**
 * Gets the mocked cube context.
 *
 * @param ast the ast
 * @return the mocked cube context
 * @throws ParseException    the parse exception
 * @throws LensException  the lens exception
 */
private CubeQueryContext getMockedCubeContext(ASTNode ast) throws ParseException, LensException {
  CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
  if (ast.getToken().getType() == HiveParser.TOK_QUERY) {
    if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) {
      // remove cube child from AST
      for (int i = 0; i < ast.getChildCount() - 1; i++) {
        ast.setChild(i, ast.getChild(i + 1));
      }
      ast.deleteChild(ast.getChildCount() - 1);
    }
  }
  StringBuilder builder = new StringBuilder();
  HQLParser.toInfixString(ast, builder);
  Mockito.when(context.toHQL()).thenReturn(builder.toString());
  Mockito.when(context.toAST(Matchers.any(Context.class))).thenReturn(ast);
  return context;
}
 
开发者ID:apache,项目名称:lens,代码行数:26,代码来源:TestRewriting.java

示例6: getWhereString

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private ArrayList<String> getWhereString(ASTNode node, ArrayList<String> filters) throws LensException {

    if (node == null) {
      return null;
    }
    if (node.getToken().getType() == HiveParser.KW_AND) {
      // left child is "and" and right child is subquery
      if (node.getChild(0).getType() == HiveParser.KW_AND) {
        filters.add(getfilterSubquery(node, 1));
      } else if (node.getChildCount() > 1) {
        for (int i = 0; i < node.getChildCount(); i++) {
          filters.add(getfilterSubquery(node, i));
        }
      }
    } else if (node.getParent().getType() == HiveParser.TOK_WHERE
      && node.getToken().getType() != HiveParser.KW_AND) {
      filters.add(HQLParser.getString(node, HQLParser.AppendMode.DEFAULT));
    }
    for (int i = 0; i < node.getChildCount(); i++) {
      ASTNode child = (ASTNode) node.getChild(i);
      return getWhereString(child, filters);
    }
    return filters;
  }
 
开发者ID:apache,项目名称:lens,代码行数:25,代码来源:DruidSQLRewriter.java

示例7: isExpressionsAnswerableFromFact

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
/**
 * Check if expression is answerable from fact, then push it to fact pushdown subquery
 *
 * @param node
 * @return true if expressions is used
 */
public boolean isExpressionsAnswerableFromFact(ASTNode node) {
  boolean isAnswerable = true;
  for (int i = 0; i < node.getChildCount(); i++) {
    if (node.getChild(i).getType() == HiveParser.TOK_SELEXPR) {
      int cnt = getColumnCount((ASTNode) node.getChild(i));
      if (cnt >= 2) {
        if (cnt == getNumFactTableInExpressions((ASTNode) node.getChild(i), new MutableInt(0))) {
          isAnswerable = true;
        } else {
          isAnswerable = false;
        }
      }
    }
  }
  return isAnswerable;
}
 
开发者ID:apache,项目名称:lens,代码行数:23,代码来源:ColumnarSQLRewriter.java

示例8: getNumFactTableInExpressions

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
/**
 * Get number of fact columns used in the an expression
 *
 * @param node
 * @param count
 * @return Number of fact columns used in expression
 */
protected int getNumFactTableInExpressions(ASTNode node, MutableInt count) {

  if (node == null) {
    log.debug("ASTNode is null ");
    return 0;
  }
  if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
    String factAlias = getFactAlias();
    String table = node.getChild(0).getText();
    if (table.equals(factAlias)) {
      count.add(1);
    }
  }
  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    getNumFactTableInExpressions(child, count);
  }

  return count.intValue();
}
 
开发者ID:apache,项目名称:lens,代码行数:28,代码来源:ColumnarSQLRewriter.java

示例9: getFilterInJoinCond

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
/**
 * Gets the filter in join cond.
 *
 * @param node the node
 */
public void getFilterInJoinCond(ASTNode node) {

  if (node == null) {
    log.debug("Join AST is null ");
    return;
  }

  if (node.getToken().getType() == HiveParser.KW_AND) {
    ASTNode right = (ASTNode) node.getChild(1);
    String filterCond = HQLParser.getString(right);
    rightFilter.add(filterCond);
  }

  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    getFilterInJoinCond(child);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:24,代码来源:ColumnarSQLRewriter.java

示例10: updateOuterHavingAST

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
/**
 * Update outer havingAST with proper alias name projected.
 *
 * @param node
 * @return
 */
private ASTNode updateOuterHavingAST(ASTNode node) {
  if (node.getToken().getType() == HiveParser.TOK_FUNCTION
      && (HQLParser.isAggregateAST(node))) {
    if (innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode(node))
        || innerToOuterHavingASTs.containsKey(new HQLParser.HashableASTNode(node))) {
      ASTNode expr = innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode(node))
          ? innerToOuterSelectASTs.get(new HQLParser.HashableASTNode(node))
          : innerToOuterHavingASTs.get(new HQLParser.HashableASTNode(node));
      if (node.getChildCount() > 1) {
        node.replaceChildren(1, 1, expr.getChild(1));
      } else {
        node.replaceChildren(0, 0, expr);
      }
    }
  }
  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    updateOuterHavingAST(child);
  }
  return node;
}
 
开发者ID:apache,项目名称:lens,代码行数:28,代码来源:UnionQueryWriter.java

示例11: updateOuterASTDuplicateAliases

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private void updateOuterASTDuplicateAliases(ASTNode node, Map<String, List<String>> aliasMap) {
  if (node.getToken().getType() == HiveParser.DOT) {
    String col = node.getChild(1).toString();
    for (Map.Entry<String, List<String>> entry : aliasMap.entrySet()) {
      if (entry.getValue().contains(col)) {
        try {
          node.setChild(1, HQLParser.parseExpr(entry.getKey()));
        } catch (LensException e) {
          log.error("Unable to parse select expression: {}.", entry.getKey());
        }
      }

    }
  }
  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    updateOuterASTDuplicateAliases(child, aliasMap);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:20,代码来源:UnionQueryWriter.java

示例12: getColsForHavingAST

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private void getColsForHavingAST(CubeQueryContext cubeql, ASTNode clause) throws LensException {
  if (clause == null) {
    return;
  }

  // split having clause phrases to be column level sothat having clause can be pushed to multiple facts if required.
  if (HQLParser.isAggregateAST(clause) || clause.getType() == HiveParser.TOK_TABLE_OR_COL
    || clause.getType() == HiveParser.DOT || clause.getChildCount() == 0) {
    QueriedPhraseContext qur = new QueriedPhraseContext(clause);
    qur.setAggregate(true);
    getColsForTree(cubeql, clause, qur, true);
    cubeql.addColumnsQueried(qur.getTblAliasToColumns());
    cubeql.addQueriedPhrase(qur);
  } else {
    for (Node child : clause.getChildren()) {
      getColsForHavingAST(cubeql, (ASTNode)child);
    }
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:20,代码来源:ColumnResolver.java

示例13: transform

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private ASTNode transform(CubeQueryContext cubeql, ASTNode parent, ASTNode node, int nodePos) throws LensException {
  if (node == null) {
    return node;
  }
  int nodeType = node.getToken().getType();

  if (!(HQLParser.isAggregateAST(node))) {
    if (nodeType == HiveParser.TOK_TABLE_OR_COL || nodeType == HiveParser.DOT) {
      // Leaf node
      ASTNode wrapped = wrapAggregate(cubeql, node);
      if (wrapped != node) {
        if (parent != null) {
          parent.setChild(nodePos, wrapped);
        } else {
          return wrapped;
        }
      }
    } else {
      // Dig deeper in non-leaf nodes
      for (int i = 0; i < node.getChildCount(); i++) {
        transform(cubeql, node, (ASTNode) node.getChild(i), i);
      }
    }
  }
  return node;
}
 
开发者ID:apache,项目名称:lens,代码行数:27,代码来源:AggregateResolver.java

示例14: generate

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
@Override
public boolean generate(ASTNode hiveRoot, CommonTree sqlRoot, ASTNode currentHiveNode,
    CommonTree currentSqlNode, TranslateContext context) throws SqlXlateException {

  ASTNode ret = SqlXlateUtil.newASTNode(HiveParser.Identifier, currentSqlNode.getText());
  super.attachHiveNode(hiveRoot, currentHiveNode, ret);

  CommonTree node = (CommonTree) (currentSqlNode.getChildCount() == 1 ? currentSqlNode
      .getChild(0) : currentSqlNode.getChild(1));
  if (node.getType() == PantheraParser_PLSQLParser.ASTERISK) {
    return true;
  }
  ASTNode hiveNode = new ASTNode();
  GeneratorFactory.getGenerator(node).generateHiveAST(null, null, hiveNode, node,
      context);
  super.attachHiveNode(hiveRoot, currentHiveNode, (ASTNode) hiveNode.getChild(0));
  return true;
}
 
开发者ID:adrian-wang,项目名称:project-panthera-skin,代码行数:19,代码来源:CountGenerator.java

示例15: isDistinctClauseUsed

import org.apache.hadoop.hive.ql.parse.HiveParser; //导入依赖的package包/类
private boolean isDistinctClauseUsed(ASTNode node) {
  if (node == null) {
    return false;
  }
  if (node.getToken() != null) {
    if (node.getToken().getType() == HiveParser.TOK_FUNCTIONDI
      || node.getToken().getType() == HiveParser.TOK_SELECTDI) {
      return true;
    }
  }
  for (int i = 0; i < node.getChildCount(); i++) {
    if (isDistinctClauseUsed((ASTNode) node.getChild(i))) {
      return true;
    }
  }
  return false;
}
 
开发者ID:apache,项目名称:lens,代码行数:18,代码来源:AggregateResolver.java


注:本文中的org.apache.hadoop.hive.ql.parse.HiveParser类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。