当前位置: 首页>>代码示例>>Java>>正文


Java ASTNode类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.parse.ASTNode的典型用法代码示例。如果您正苦于以下问题:Java ASTNode类的具体用法?Java ASTNode怎么用?Java ASTNode使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ASTNode类属于org.apache.hadoop.hive.ql.parse包,在下文中一共展示了ASTNode类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: rewrite

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
public String rewrite(String sourceQry) throws RewriteException {
    String result = sourceQry;
    ASTNode tree = null;
    try {
        ParseDriver pd = new ParseDriver();
        tree = pd.parse(sourceQry, queryContext, true);
        tree = ParseUtils.findRootNonNullToken(tree);
        this.rwCtx = new RewriteContext(sourceQry, tree, queryContext.getTokenRewriteStream());
        rewrite(tree);
        result = toSQL();
    } catch (ParseException e) {
       LOG.error("Could not parse the query {} ", sourceQry, e);
        throw new RewriteException("Could not parse query : " , e);
    }
    return result;
}
 
开发者ID:apache,项目名称:incubator-atlas,代码行数:17,代码来源:HiveASTRewriter.java

示例2: getTableNamesForUpdateDelete

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
public static void getTableNamesForUpdateDelete(
        List<String> isrcTableNames, ASTNode input) {
    if (input == null) {
        return;
    }
    if (input.getToken().getType() == HiveParser.TOK_TABNAME) {
        if (input.getChildCount() == 1) {
            isrcTableNames.add(input.getChild(0).getText());
            return;
        } else if (input.getChildCount() == 2) {
            isrcTableNames.add(input.getChild(0).getText());
            return;
        } else if (input.getChildCount() == 3) {
            isrcTableNames.add(input.getChild(0).getText());
            return;
        }
    } else {
        int childCount = input.getChildCount();
        for (int i = 0; i < childCount; i++) {
            getTableNamesForUpdateDelete(isrcTableNames,
                    (ASTNode) input.getChild(i));
        }
    }
}
 
开发者ID:thomas-young-2013,项目名称:wherehowsX,代码行数:25,代码来源:HiveSqlAnalyzer.java

示例3: analyzeSql

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
public static String analyzeSql(String sql,List<String> isrcTableNames ,List<String> idesTableNames){
    try {
        ParseDriver pd = new ParseDriver();
        ASTNode node = pd.parse(sql);
        node = findRootNonNullToken(node);
        String opType = collectTableNamesNew(node, isrcTableNames,idesTableNames);
        return opType;
    }  catch (Exception e) {
        LOG.warn("analyzeSql sql fail "+e);
    }
    return "UNKNOW";
}
 
开发者ID:thomas-young-2013,项目名称:wherehowsX,代码行数:13,代码来源:HiveSqlAnalyzer.java

示例4: parseHiveType

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
private static ASTNode parseHiveType(final String hiveType) {
	try {
		final ParseDriver driver = new ParseDriver();
		final HiveLexer lexer = new HiveLexer(driver.new ANTLRNoCaseStringStream(hiveType));
		final HiveParser parser = new HiveParser(new TokenRewriteStream(lexer));
		parser.setTreeAdaptor(ParseDriver.adaptor);

		final HiveParser.type_return type = parser.type();

		final ASTNode ast = (ASTNode) type.getTree();
		ast.setUnknownTokenBoundaries();
		return ast;
	} catch (Exception e) {
		throw new IllegalArgumentException("invalid type: " + hiveType, e);
	}
}
 
开发者ID:CyberAgent,项目名称:hive-jq-udtf,代码行数:17,代码来源:ObjectInspectors.java

示例5: traverseTableName

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
/**
 * Visit table name
 */
private void traverseTableName() throws InvalidQueryException {
  try {
    final ASTNode dbSchemaTable = HQLParser.findNodeByPath(
      rootQueryNode,
      HiveParser.TOK_FROM,
      HiveParser.TOK_TABREF,
      HiveParser.TOK_TABNAME);
    Validate.notNull(dbSchemaTable, "Index and type not present");
    Validate.isTrue(dbSchemaTable.getChildren().size() == 2, "Index and type not present");
    final String dbSchema = dbSchemaTable.getChild(0).getText();
    final String tableName = dbSchemaTable.getChild(1).getText();
    visitor.visitFrom(dbSchema, tableName);
  } catch (Exception e) {
    throw new InvalidQueryException("Error while traversing table name "
      + "- Expected grammar .. from <index>.<type>", e);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:21,代码来源:ASTTraverserForES.java

示例6: generate

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
@Override
public boolean generate(ASTNode hiveRoot, CommonTree sqlRoot, ASTNode currentHiveNode,
    CommonTree currentSqlNode, TranslateContext context) throws SqlXlateException {
  if (currentHiveNode.getChildCount() == 2&&currentHiveNode.getType()==HiveParser.TOK_SELEXPR) {

    ASTNode dot = super.newHiveASTNode(HiveParser.DOT, ".");
    dot.addChild((ASTNode) currentHiveNode.getChild(0));
    // if children count == 2 the second should only be text element
    dot.addChild((ASTNode) currentHiveNode.getChild(1).getChild(0));
    currentHiveNode.deleteChild(0);
    currentHiveNode.deleteChild(0);
    currentHiveNode.addChild(dot);
    LOG.debug("Generated Cascated Element : " + dot.toStringTree());
    return true;
  }
  return true;
}
 
开发者ID:adrian-wang,项目名称:project-panthera-skin,代码行数:18,代码来源:PostCascatedElementGenerator.java

示例7: getMockedCubeContext

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
/**
 * Gets the mocked cube context.
 *
 * @param ast the ast
 * @return the mocked cube context
 * @throws ParseException    the parse exception
 * @throws LensException  the lens exception
 */
private CubeQueryContext getMockedCubeContext(ASTNode ast) throws ParseException, LensException {
  CubeQueryContext context = Mockito.mock(CubeQueryContext.class);
  if (ast.getToken().getType() == HiveParser.TOK_QUERY) {
    if (((ASTNode) ast.getChild(0)).getToken().getType() == HiveParser.KW_CUBE) {
      // remove cube child from AST
      for (int i = 0; i < ast.getChildCount() - 1; i++) {
        ast.setChild(i, ast.getChild(i + 1));
      }
      ast.deleteChild(ast.getChildCount() - 1);
    }
  }
  StringBuilder builder = new StringBuilder();
  HQLParser.toInfixString(ast, builder);
  Mockito.when(context.toHQL()).thenReturn(builder.toString());
  Mockito.when(context.toAST(Matchers.any(Context.class))).thenReturn(ast);
  return context;
}
 
开发者ID:apache,项目名称:lens,代码行数:26,代码来源:TestRewriting.java

示例8: testLiteralCaseIsPreserved

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
@Test
public void testLiteralCaseIsPreserved() throws Exception {
  String literalQuery = "SELECT 'abc' AS col1, 'DEF' AS col2 FROM foo where col3='GHI' " + "AND col4 = 'JKLmno'";

  ASTNode tree = HQLParser.parseHQL(literalQuery, conf);

  ASTNode select = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_SELECT);
  String selectStr = HQLParser.getString(select).trim();
  String expectedSelect = "'abc' as `col1`, 'DEF' as `col2`";
  Assert.assertEquals(selectStr, expectedSelect);

  ASTNode where = HQLParser.findNodeByPath(tree, TOK_INSERT, TOK_WHERE);
  String whereStr = HQLParser.getString(where).trim();
  String expectedWhere = "((col3 = 'GHI') and (col4 = 'JKLmno'))";
  Assert.assertEquals(whereStr, expectedWhere);
}
 
开发者ID:apache,项目名称:lens,代码行数:17,代码来源:TestHQLParser.java

示例9: getNumFactTableInExpressions

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
/**
 * Get number of fact columns used in the an expression
 *
 * @param node
 * @param count
 * @return Number of fact columns used in expression
 */
protected int getNumFactTableInExpressions(ASTNode node, MutableInt count) {

  if (node == null) {
    log.debug("ASTNode is null ");
    return 0;
  }
  if (node.getToken().getType() == HiveParser.TOK_TABLE_OR_COL) {
    String factAlias = getFactAlias();
    String table = node.getChild(0).getText();
    if (table.equals(factAlias)) {
      count.add(1);
    }
  }
  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    getNumFactTableInExpressions(child, count);
  }

  return count.intValue();
}
 
开发者ID:apache,项目名称:lens,代码行数:28,代码来源:ColumnarSQLRewriter.java

示例10: getFilterInJoinCond

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
/**
 * Gets the filter in join cond.
 *
 * @param node the node
 */
public void getFilterInJoinCond(ASTNode node) {

  if (node == null) {
    log.debug("Join AST is null ");
    return;
  }

  if (node.getToken().getType() == HiveParser.KW_AND) {
    ASTNode right = (ASTNode) node.getChild(1);
    String filterCond = HQLParser.getString(right);
    rightFilter.add(filterCond);
  }

  for (int i = 0; i < node.getChildCount(); i++) {
    ASTNode child = (ASTNode) node.getChild(i);
    getFilterInJoinCond(child);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:24,代码来源:ColumnarSQLRewriter.java

示例11: replaceAlias

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
private static ASTNode replaceAlias(final ASTNode expr, final CubeQueryContext cubeql) throws LensException {
  final ASTNode finalAST = MetastoreUtil.copyAST(expr);
  HQLParser.bft(finalAST, visited -> {
    ASTNode node = visited.getNode();
    ASTNode parent = null;
    if (visited.getParent() != null) {
      parent = visited.getParent().getNode();
    }

    if (node.getToken().getType() == TOK_TABLE_OR_COL && (parent != null && parent.getToken().getType() == DOT)) {
      ASTNode current = (ASTNode) node.getChild(0);
      if (current.getToken().getType() == Identifier) {
        String tableName = current.getToken().getText().toLowerCase();
        String alias = cubeql.getAliasForTableName(tableName);
        if (!alias.equalsIgnoreCase(tableName)) {
          node.setChild(0, new ASTNode(new CommonToken(HiveParser.Identifier, alias)));
        }
      }
    }
  });
  return finalAST;
}
 
开发者ID:apache,项目名称:lens,代码行数:23,代码来源:ExpressionResolver.java

示例12: getAllTablesfromFromAST

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
/**
 * Gets the all tablesfrom from ast.
 *
 * @param from       the from
 * @param fromTables the from tables
 * @return the all tablesfrom from ast
 */
protected void getAllTablesfromFromAST(ASTNode from, ArrayList<String> fromTables) {
  String table;
  if (TOK_TABREF == from.getToken().getType()) {
    ASTNode tabName = (ASTNode) from.getChild(0);
    if (tabName.getChildCount() == 2) {
      table = tabName.getChild(0).getText() + "." + tabName.getChild(1).getText();
    } else {
      table = tabName.getChild(0).getText();
    }
    if (from.getChildCount() > 1) {
      table = table + " " + from.getChild(1).getText();
    }
    fromTables.add(table);
  }

  for (int i = 0; i < from.getChildCount(); i++) {
    ASTNode child = (ASTNode) from.getChild(i);
    getAllTablesfromFromAST(child, fromTables);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:28,代码来源:ColumnarSQLRewriter.java

示例13: testEqualsAST

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
@Test
public void testEqualsAST() throws Exception {
  ASTNode expr1 = parseExpr("T1.a + T2.b - T2.c");
  ASTNode expr2 = parseExpr("t1.A + t2.B - t2.C");

  Assert.assertTrue(HQLParser.equalsAST(expr1, expr2));

  ASTNode literalExpr1 = parseExpr("A = 'FooBar'");
  ASTNode literalExpr2 = parseExpr("a = 'FooBar'");
  Assert.assertTrue(HQLParser.equalsAST(literalExpr1, literalExpr2));

  ASTNode literalExpr3 = parseExpr("A = 'fOObAR'");
  Assert.assertFalse(HQLParser.equalsAST(literalExpr1, literalExpr3));

  ASTNode literalExpr4 = parseExpr("A <> 'FooBar'");
  Assert.assertFalse(HQLParser.equalsAST(literalExpr1, literalExpr4));
}
 
开发者ID:apache,项目名称:lens,代码行数:18,代码来源:TestHQLParser.java

示例14: isAggregateDimExprUsedInSelect

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
private boolean isAggregateDimExprUsedInSelect(CubeQueryContext cubeql, ASTNode selectAST) throws LensException {
  for (int i = 0; i < selectAST.getChildCount(); i++) {
    ASTNode child = (ASTNode) selectAST.getChild(i);
    String expr = HQLParser.getString((ASTNode) child.getChild(0).getChild(1));
    if (cubeql.getQueriedExprs().contains(expr)) {
      for (ExpressionResolver.ExpressionContext expressionContext
        : cubeql.getExprCtx().getAllExprsQueried().get(expr)) {
        for (ExprColumn.ExprSpec exprSpec : expressionContext.getExprCol().getExpressionSpecs()) {
          ASTNode exprAST = HQLParser.parseExpr(exprSpec.getExpr(), cubeql.getConf());
          if (HQLParser.isAggregateAST(exprAST)) {
            return true;
          }
        }
      }
    }
  }
  return false;
}
 
开发者ID:apache,项目名称:lens,代码行数:19,代码来源:AggregateResolver.java

示例15: trimHavingAndOrderby

import org.apache.hadoop.hive.ql.parse.ASTNode; //导入依赖的package包/类
private void trimHavingAndOrderby(ASTNode ast, Cube innerCube) {
  ASTNode havingAst = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_HAVING);
  if (havingAst != null) {
    ASTNode newHavingAst = HQLParser.trimHavingAst(havingAst, innerCube.getAllFieldNames());
    if (newHavingAst != null) {
      havingAst.getParent().setChild(havingAst.getChildIndex(), newHavingAst);
    } else {
      havingAst.getParent().deleteChild(havingAst.getChildIndex());
    }
  }
  ASTNode orderByAst = HQLParser.findNodeByPath(ast, TOK_INSERT, TOK_ORDERBY);
  if (orderByAst != null) {
    ASTNode newOrderByAst = HQLParser.trimOrderByAst(orderByAst, innerCube.getAllFieldNames());
    if (newOrderByAst != null) {
      orderByAst.getParent().setChild(orderByAst.getChildIndex(), newOrderByAst);
    } else {
      orderByAst.getParent().deleteChild(orderByAst.getChildIndex());
    }
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:21,代码来源:SegmentationCandidate.java


注:本文中的org.apache.hadoop.hive.ql.parse.ASTNode类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。