本文整理汇总了Java中org.apache.hadoop.hive.ql.parse.ASTNode.getChildren方法的典型用法代码示例。如果您正苦于以下问题:Java ASTNode.getChildren方法的具体用法?Java ASTNode.getChildren怎么用?Java ASTNode.getChildren使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.parse.ASTNode
的用法示例。
在下文中一共展示了ASTNode.getChildren方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: traverseOrderBy
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
/**
* Visit order by
*/
private void traverseOrderBy() throws InvalidQueryException {
try {
final ASTNode orderByNode = HQLParser.findNodeByPath(rootQueryNode,
HiveParser.TOK_INSERT, HiveParser.TOK_ORDERBY);
if (orderByNode != null) {
for (Node orderBy : orderByNode.getChildren()) {
visitor.visitOrderBy(
Helper.getColumnNameFrom(Helper.getFirstChild(orderBy)),
orderBy.getName().equals(String.valueOf(HiveParser.TOK_TABSORTCOLNAMEDESC))
?
ASTVisitor.OrderBy.DESC
:
ASTVisitor.OrderBy.ASC
);
}
}
} catch (Exception e) {
throw new InvalidQueryException("Exception while parsing order by", e);
}
}
示例2: getColsForHavingAST
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
private void getColsForHavingAST(CubeQueryContext cubeql, ASTNode clause) throws LensException {
if (clause == null) {
return;
}
// split having clause phrases to be column level sothat having clause can be pushed to multiple facts if required.
if (HQLParser.isAggregateAST(clause) || clause.getType() == HiveParser.TOK_TABLE_OR_COL
|| clause.getType() == HiveParser.DOT || clause.getChildCount() == 0) {
QueriedPhraseContext qur = new QueriedPhraseContext(clause);
qur.setAggregate(true);
getColsForTree(cubeql, clause, qur, true);
cubeql.addColumnsQueried(qur.getTblAliasToColumns());
cubeql.addQueriedPhrase(qur);
} else {
for (Node child : clause.getChildren()) {
getColsForHavingAST(cubeql, (ASTNode)child);
}
}
}
示例3: updateOrderByWithFinalAlias
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
static void updateOrderByWithFinalAlias(ASTNode orderby, ASTNode select) throws LensException{
if (orderby == null) {
return;
}
for (Node orderbyNode : orderby.getChildren()) {
ASTNode orderBychild = (ASTNode) orderbyNode;
for (Node selectNode : select.getChildren()) {
ASTNode selectChild = (ASTNode) selectNode;
if (selectChild.getChildCount() == 2) {
if (HQLParser.getString((ASTNode) selectChild.getChild(0))
.equals(HQLParser.getString((ASTNode) orderBychild.getChild(0)))) {
ASTNode alias = new ASTNode((ASTNode) selectChild.getChild(1));
if (!alias.toString().matches("\\S+")) {
throw new LensException(LensCubeErrorCode.ORDERBY_ALIAS_CONTAINING_WHITESPACE.getLensErrorInfo(), alias);
}
orderBychild.replaceChildren(0, 0, alias);
break;
}
}
}
}
}
示例4: newObjectInspectorFromHiveType
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
private static ObjectInspector newObjectInspectorFromHiveType(final ASTNode type) {
// matching by token names, because token IDs (which are static final) drastically change between versions.
switch (type.getToken().getText()) {
case "TOK_STRING":
return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
case "TOK_INT":
return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
case "TOK_DOUBLE":
return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
case "TOK_FLOAT":
return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
case "TOK_BIGINT":
return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
case "TOK_BOOLEAN": {
return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
}
case "TOK_STRUCT": {
final ASTNode tabColList = (ASTNode) type.getChild(0);
final List<String> names = new ArrayList<>();
final List<ObjectInspector> ois = new ArrayList<>();
for (final Node tabCol : tabColList.getChildren()) {
final ASTNode a = (ASTNode) tabCol;
names.add(a.getChild(0).toString());
ois.add(newObjectInspectorFromHiveType((ASTNode) a.getChild(1)));
}
return ObjectInspectorFactory.getStandardStructObjectInspector(names, ois);
}
case "TOK_MAP": {
final ObjectInspector keyType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
final ObjectInspector valueType = newObjectInspectorFromHiveType((ASTNode) type.getChild(1));
return ObjectInspectorFactory.getStandardMapObjectInspector(keyType, valueType);
}
case "TOK_LIST": {
final ObjectInspector itemType = newObjectInspectorFromHiveType((ASTNode) type.getChild(0));
return ObjectInspectorFactory.getStandardListObjectInspector(itemType);
}
default:
throw new IllegalArgumentException("unsupported type: " + type.toStringTree());
}
}
示例5: rewrite
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
private void rewrite(ASTNode origin) throws RewriteException {
ASTNode node = origin;
if (node != null) {
for(ASTRewriter rewriter : rewriters) {
rewriter.rewrite(rwCtx, node);
}
if (node.getChildren() != null) {
for (int i = 0; i < node.getChildren().size(); i++) {
rewrite((ASTNode) node.getChild(i));
}
}
}
}
示例6: traverseSelects
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
/**
* Visit select expressions
*/
public void traverseSelects() throws InvalidQueryException {
final ASTNode selectNode = HQLParser.findNodeByPath(rootQueryNode, HiveParser.TOK_INSERT, HiveParser.TOK_SELECT);
if (selectNode == null) {
throw new InvalidQueryException("No columns are selected!");
}
try {
for (Node selectExp : selectNode.getChildren()) {
final Node innerNode = Helper.getFirstChild(selectExp);
final String alias = Helper.getAliasFromSelectExpr(selectExp);
if (innerNode.getName().equals(String.valueOf(HiveParser.TOK_FUNCTION))) {
Validate.isTrue(innerNode.getChildren().size() == 2);
visitor.visitAggregation(
Helper.getFirstChild(innerNode).toString(),
Helper.getColumnNameFrom(innerNode.getChildren().get(1)),
alias
);
} else if (innerNode.getName().equals(String.valueOf(HiveParser.TOK_ALLCOLREF))) {
visitor.visitAllCols();
} else if (innerNode.getName().equals(String.valueOf(HiveParser.TOK_TABLE_OR_COL))
|| innerNode.toString().equals(".")) {
visitor.visitSimpleSelect(
Helper.getColumnNameFrom(innerNode),
alias
);
} else {
throw new InvalidQueryException(selectExp.getName() + " seems to be invalid");
}
}
} catch (Exception e) {
throw new InvalidQueryException("Exception while traversing select expressions", e);
}
}
示例7: traverseGroupBy
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
/**
* Visit group by
*/
private void traverseGroupBy() throws InvalidQueryException {
try {
final ASTNode groupByNode = HQLParser.findNodeByPath(rootQueryNode,
HiveParser.TOK_INSERT, HiveParser.TOK_GROUPBY);
if (groupByNode != null) {
for (Node groupBy : groupByNode.getChildren()) {
visitor.visitGroupBy(Helper.getColumnNameFrom(groupBy));
}
}
} catch (Exception e) {
throw new InvalidQueryException("Exception while parsing group by", e);
}
}
示例8: copyAST
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
public static ASTNode copyAST(ASTNode original,
Function<ASTNode, Pair<ASTNode, Boolean>> overrideCopyFunction) {
Pair<ASTNode, Boolean> copy1 = overrideCopyFunction.apply(original);
ASTNode copy = copy1.getLeft();
if (copy1.getRight()) {
if (original.getChildren() != null) {
for (Node o : original.getChildren()) {
copy.addChild(copyAST((ASTNode) o, overrideCopyFunction));
}
}
}
return copy;
}
示例9: processGroupByExpression
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
/**
* GroupbyAST is having dim only columns all the columns should have been
* projected. Get the alias for the projected columns and add to group by clause.
*
* @param astNode
* @return
* @throws LensException
*/
private ASTNode processGroupByExpression(ASTNode astNode) throws LensException {
ASTNode outerExpression = new ASTNode(astNode);
// iterate over all children of the ast and get outer ast corresponding to it.
for (Node child : astNode.getChildren()) {
// Columns in group by should have been projected as they are dimension columns
if (innerToOuterSelectASTs.containsKey(new HQLParser.HashableASTNode((ASTNode) child))) {
outerExpression.addChild(innerToOuterSelectASTs.get(new HQLParser.HashableASTNode((ASTNode) child)));
}
}
return outerExpression;
}
示例10: attachHiveNode
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
void attachHiveNode(ASTNode hiveRoot, ASTNode currentHiveNode,
ASTNode ret) {
currentHiveNode.addChild(ret);
if (hiveRoot != null && (hiveRoot.getChildren() == null || hiveRoot.getChildren().size() == 0)) {
hiveRoot.addChild(currentHiveNode);
}
}
示例11: getASTNodeChildIndex
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
public static int getASTNodeChildIndex(ASTNode parent, ASTNode child) {
List<Node> children = parent.getChildren();
for (int i = 0; i < children.size(); i++) {
if (child == children.get(i)) {
return i;
}
}
return -1;
}
示例12: processOrderbyExpression
import org.apache.hadoop.hive.ql.parse.ASTNode; //导入方法依赖的package包/类
private ASTNode processOrderbyExpression(ASTNode astNode) throws LensException {
if (astNode == null) {
return null;
}
ASTNode outerExpression = new ASTNode(astNode);
// sample orderby AST looks the following :
/*
TOK_ORDERBY
TOK_TABSORTCOLNAMEDESC
TOK_NULLS_LAST
.
TOK_TABLE_OR_COL
testcube
cityid
TOK_TABSORTCOLNAMEASC
TOK_NULLS_FIRST
.
TOK_TABLE_OR_COL
testcube
stateid
TOK_TABSORTCOLNAMEASC
TOK_NULLS_FIRST
.
TOK_TABLE_OR_COL
testcube
zipcode
*/
for (Node node : astNode.getChildren()) {
ASTNode child = (ASTNode) node;
ASTNode outerOrderby = new ASTNode(child);
ASTNode tokNullsChild = (ASTNode) child.getChild(0);
ASTNode outerTokNullsChild = new ASTNode(tokNullsChild);
if (((ASTNode) tokNullsChild.getChild(0)).getToken().getType() == HiveParser.DOT
|| ((ASTNode) tokNullsChild.getChild(0)).getToken().getType() == HiveParser.TOK_FUNCTION) {
outerTokNullsChild.addChild(innerToOuterSelectASTs.get(new HQLParser.HashableASTNode((ASTNode) tokNullsChild)));
} else {
outerTokNullsChild.addChild(tokNullsChild);
}
outerOrderby.addChild(outerTokNullsChild);
outerExpression.addChild(outerOrderby);
}
return outerExpression;
}