本文整理汇总了Java中org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc类的典型用法代码示例。如果您正苦于以下问题:Java ExprNodeGenericFuncDesc类的具体用法?Java ExprNodeGenericFuncDesc怎么用?Java ExprNodeGenericFuncDesc使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ExprNodeGenericFuncDesc类属于org.apache.hadoop.hive.ql.plan包,在下文中一共展示了ExprNodeGenericFuncDesc类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: addChildNode
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
@Override
public void addChildNode( final ExprNodeGenericFuncDesc exprNodeDesc ){
GenericUDF udf = exprNodeDesc.getGenericUDF();
if( udf instanceof GenericUDFOPAnd ){
childNodeList.add( new HiveExprAndNode( exprNodeDesc.getChildren() ) );
}
else if( udf instanceof GenericUDFOPOr ){
childNodeList.add( new HiveExprOrNode( exprNodeDesc.getChildren() ) );
}
else if( udf instanceof GenericUDFOPNot ){
childNodeList.add( new HiveExprNotNode( exprNodeDesc.getChildren() ) );
}
else{
childNodeList.add( HiveExprFactory.get( exprNodeDesc , udf , exprNodeDesc.getChildren() ) );
}
}
示例2: decomposePredicate
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
/**
* Decompose the predicates (filter expressions) provided in hive query and if some
* predicates can be pushed down to the Monarch, use them at query time reduce the
* data queried from Monarch (Geode). The residual predicates (the ones that cannot
* be executed on Monarch/Geode) will need to be executed in hive query engine.
* <p>
* The predicates to be executed on Monarch are decided by the column-type and
* predicate operations. Following is the current list supported for execution
* on Monarch/Geode side (as of 2015-12-23):
* - Predicate Operations:
* -- EQUAL
* -- LESS THAN
* -- LESS THAN OR EQUAL
* - Column Types:
* -- INT
* -- LONG
* -- STRING
*
* @param jobConf the job configuration
* @param deserializer the deserializer
* @param exprNodeDesc the hive expression to be decpomposed
* @return the decomposed predicate indicating which predicates will be executed on Monarch
* and which predicates (residual) will be by Hive query engine
*/
public static DecomposedPredicate decomposePredicate(final JobConf jobConf,
final MonarchSerDe deserializer,
final ExprNodeDesc exprNodeDesc) {
List<IndexSearchCondition> indexSearchConditions = new ArrayList<>(5);
IndexPredicateAnalyzer ipa = getIndexPredicateAnalyzer(deserializer);
ExprNodeDesc residual = ipa.analyzePredicate(exprNodeDesc, indexSearchConditions);
ipa.clearAllowedColumnNames();
if (indexSearchConditions.isEmpty()) {
if (logger.isDebugEnabled())
logger.debug("nothing to decompose. Returning");
return null;
}
DecomposedPredicate dp = new DecomposedPredicate();
dp.pushedPredicate = ipa.translateSearchConditions(indexSearchConditions);
dp.residualPredicate = (ExprNodeGenericFuncDesc) residual;
dp.pushedPredicateObject = null;
if (logger.isDebugEnabled()) {
logger.debug("[To Monarch -->] PushedPredicate= {}", dp.pushedPredicate);
logger.debug("[In Hive -->] ResidualPredicate= {}", dp.residualPredicate);
}
return dp;
}
示例3: convertToExpression
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
/**
* Convert generic Ampool filter(s) to the corresponding generic UDF(s).
*
* @param filter the Ampool filters
* @param td the Ampool table descriptor
* @return the generic ORC predicates
*/
public static ExprNodeDesc convertToExpression(final Filter filter, final TableDescriptor td)
throws IOException {
if (filter instanceof FilterList) {
FilterList fl = (FilterList) filter;
ExprNodeDesc expression = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
fl.getOperator() == FilterList.Operator.MUST_PASS_ALL ? new GenericUDFOPAnd()
: new GenericUDFOPOr(),
new ArrayList<>());
for (Filter f : fl.getFilters()) {
expression.getChildren().add(convertToExpression(f, td));
}
return expression;
} else if (filter instanceof SingleColumnValueFilter) {
SingleColumnValueFilter cf = (SingleColumnValueFilter) filter;
if (!UDF_CONVERT_MAP.containsKey(cf.getOperator())) {
throw new IOException("Failed to convert ComparisonOperator: " + cf.getOperator());
}
return UDF_CONVERT_MAP.get(cf.getOperator()).apply(cf, td);
} else {
return null;
}
}
示例4: pushPredicate
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public DecomposedPredicate pushPredicate(Map<String, String> hiveTypeMapping, ExprNodeDesc
predicate) {
log.info("Checking predicates for pushdown in DynamoDB query");
List<IndexSearchCondition> searchConditions = getGenericSearchConditions(hiveTypeMapping,
predicate);
log.info("Pushed predicates: " + searchConditions);
if (searchConditions.isEmpty()) {
return null;
} else {
List<IndexSearchCondition> finalSearchCondition =
prioritizeSearchConditions(searchConditions);
IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate =
analyzer.translateSearchConditions(finalSearchCondition);
decomposedPredicate.residualPredicate = (ExprNodeGenericFuncDesc) predicate;
return decomposedPredicate;
}
}
示例5: dumpFilterExpr
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
protected static void dumpFilterExpr(ExprNodeDesc node) {
if (node != null) {
log.debug("dump: " + node.getClass().getName()+" name="+node.getName()+" expr="+node.getExprString()+ "[ ");
if (node instanceof ExprNodeGenericFuncDesc) {
log.debug(" func="+ ((ExprNodeGenericFuncDesc)node).getGenericUDF() );
}
List<ExprNodeDesc> children = node.getChildren();
if (children != null) {
for (ExprNodeDesc child: children) {
if (child != null) dumpFilterExpr(child);
log.debug(",");
}
}
log.debug("]");
}
}
示例6: HiveExprAndNode
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public HiveExprAndNode( final List<ExprNodeDesc> childExprNodeDesc ){
for( ExprNodeDesc nodeChild : childExprNodeDesc ){
if( nodeChild instanceof ExprNodeGenericFuncDesc ){
addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
}
else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
childNodeList.add( new BooleanHiveExpr( nodeChild ) );
}
else{
childNodeList.add( new UnsupportHiveExpr() );
}
}
}
示例7: HiveExprOrNode
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public HiveExprOrNode( final List<ExprNodeDesc> childExprNodeDesc ){
for( ExprNodeDesc nodeChild : childExprNodeDesc ){
if( nodeChild instanceof ExprNodeGenericFuncDesc ){
addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
}
else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
childNodeList.add( new BooleanHiveExpr( nodeChild ) );
}
else{
childNodeList.add( new UnsupportHiveExpr() );
}
}
}
示例8: HiveExprNotNode
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public HiveExprNotNode( final List<ExprNodeDesc> childExprNodeDesc ){
for( ExprNodeDesc nodeChild : childExprNodeDesc ){
if( nodeChild instanceof ExprNodeGenericFuncDesc ){
addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
}
else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
childNodeList.add( new BooleanHiveExpr( nodeChild ) );
}
else{
childNodeList.add( new UnsupportHiveExpr() );
}
}
}
示例9: getExtractNode
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public static IExtractNode getExtractNode(final ExprNodeDesc target ){
if( target instanceof ExprNodeGenericFuncDesc ){
return getExtractNodeFromGenericFunc( (ExprNodeGenericFuncDesc)target );
}
else if( target instanceof ExprNodeFieldDesc ){
return getExtractNodeFromField( (ExprNodeFieldDesc)target );
}
else if( target instanceof ExprNodeColumnDesc ){
if( ( (ExprNodeColumnDesc)target ).getIsPartitionColOrVirtualCol() ){
return null;
}
return getExtractNodeFromColumn( (ExprNodeColumnDesc)target );
}
return null;
}
示例10: getExtractNodeFromGenericFunc
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public static IExtractNode getExtractNodeFromGenericFunc( final ExprNodeGenericFuncDesc target ){
GenericUDF udf = target.getGenericUDF();
if( ! ( udf instanceof GenericUDFIndex ) ){
return null;
}
return getExtractNodeFromGenericIndex( target , (GenericUDFIndex)udf );
}
示例11: createExpressionNode
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
public IExpressionNode createExpressionNode( final List<ExprNodeGenericFuncDesc> filterExprs ){
HiveExprOrNode hiveOrNode = new HiveExprOrNode();
for( ExprNodeGenericFuncDesc filterExpr : filterExprs ){
if( filterExpr != null ){
hiveOrNode.addChildNode( filterExpr );
}
}
return hiveOrNode.getPushDownFilterNode();
}
示例12: deserializeExpression
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
/**
* Deserialize the the expression using the hive version specific utility classes
* @param expression expression to deserialize
* @return the deserialized ExprNodeGenericFuncDesc
*/
public static ExprNodeGenericFuncDesc deserializeExpression(String expression){
try {
return (ExprNodeGenericFuncDesc) deserializeMethod.invoke(null, expression);
} catch (IllegalAccessException | InvocationTargetException e) {
logger.error("Exception in invoking deserialization method. Exception {}" , e);
}
logger.info("Pushdown filters are skipped as the respective deserialization expression method not found.");
return null;
}
示例13: testPredicate_Multiple_1
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
@Test
public void testPredicate_Multiple_1() {
ExprNodeDesc expr1 = getExprNodeDesc(TypeInfoFactory.intTypeInfo, "c1", 10, TypeInfoFactory.booleanTypeInfo, new GenericUDFOPEqual());
ExprNodeDesc expr2 = getExprNodeDesc(TypeInfoFactory.intTypeInfo, "c1", 10, TypeInfoFactory.booleanTypeInfo, new GenericUDFOPEqual());
ExprNodeDesc exprT = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(), new ArrayList<>(2));
exprT.getChildren().add(expr1);
exprT.getChildren().add(expr2);
DecomposedPredicate dp = MonarchPredicateHandler.decomposePredicate(null, serDe, exprT);
assertNotNull(dp);
assertNotNull(dp.pushedPredicate);
assertNull(dp.residualPredicate);
assertEquals(dp.pushedPredicate.toString(), exprT.toString());
}
示例14: testPredicate_Multiple_2
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
@Test
public void testPredicate_Multiple_2() {
ExprNodeDesc expr1 = getExprNodeDesc(TypeInfoFactory.intTypeInfo, "c1", 10, TypeInfoFactory.booleanTypeInfo, new GenericUDFOPEqual());
ExprNodeDesc expr2 = getExprNodeDesc(TypeInfoFactory.intTypeInfo, "c1", 100, TypeInfoFactory.booleanTypeInfo, new GenericUDFOPLessThan());
ExprNodeDesc exprT = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(), new ArrayList<>(2));
exprT.getChildren().add(expr1);
exprT.getChildren().add(expr2);
DecomposedPredicate dp = MonarchPredicateHandler.decomposePredicate(null, serDe, exprT);
assertNotNull(dp);
assertNotNull(dp.pushedPredicate);
assertNull(dp.residualPredicate);
assertEquals(dp.pushedPredicate.toString(), exprT.toString());
}
示例15: testPredicate_Multiple_3
import org.apache.hadoop.hive.ql.plan.ExprNodeGenericFuncDesc; //导入依赖的package包/类
@Test
public void testPredicate_Multiple_3() {
ExprNodeDesc expr1 = getExprNodeDesc(TypeInfoFactory.intTypeInfo, "c1", 10, TypeInfoFactory.booleanTypeInfo, new GenericUDFOPEqual());
ExprNodeDesc expr2 = getExprNodeDesc(TypeInfoFactory.intTypeInfo, "c1", 10, TypeInfoFactory.booleanTypeInfo, new GenericUDFOPMinus());
ExprNodeDesc exprT = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo, new GenericUDFOPAnd(), new ArrayList<>(2));
exprT.getChildren().add(expr1);
exprT.getChildren().add(expr2);
DecomposedPredicate dp = MonarchPredicateHandler.decomposePredicate(null, serDe, exprT);
assertNotNull(dp);
assertNotNull(dp.pushedPredicate);
assertNotNull(dp.residualPredicate);
assertEquals(dp.pushedPredicate.toString(), expr1.toString());
assertEquals(dp.residualPredicate.toString(), expr2.toString());
}