本文整理汇总了Java中org.apache.hadoop.hive.ql.plan.ExprNodeDesc类的典型用法代码示例。如果您正苦于以下问题:Java ExprNodeDesc类的具体用法?Java ExprNodeDesc怎么用?Java ExprNodeDesc使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ExprNodeDesc类属于org.apache.hadoop.hive.ql.plan包,在下文中一共展示了ExprNodeDesc类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getPushDownFilterNode
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
@Override
public IExpressionNode getPushDownFilterNode(){
if( nodeDescList.size() != 1 ){
return null;
}
ExprNodeDesc columnDesc = nodeDescList.get( 0 );
if( ! ( columnDesc instanceof ExprNodeColumnDesc ) ){
return null;
}
IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnDesc );
if( extractNode == null ){
return null;
}
ColumnType targetColumnType = MDSColumnTypeUtil.typeInfoToColumnType( columnDesc.getTypeInfo() );
return new ExecuterNode( extractNode , new NullFilter( targetColumnType ) );
}
示例2: getPushDownFilterNode
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
@Override
public IExpressionNode getPushDownFilterNode(){
if( nodeDescList.size() != 1 ){
return null;
}
ExprNodeDesc columnDesc = nodeDescList.get( 0 );
if( ! ( columnDesc instanceof ExprNodeColumnDesc ) ){
return null;
}
IExtractNode extractNode = CreateExtractNodeUtil.getExtractNode( columnDesc );
if( extractNode == null ){
return null;
}
ColumnType targetColumnType = MDSColumnTypeUtil.typeInfoToColumnType( columnDesc.getTypeInfo() );
return new ExecuterNode( extractNode , new NotNullFilter( targetColumnType ) );
}
示例3: decomposePredicate
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
/**
* Decompose the predicates (filter expressions) provided in hive query and if some
* predicates can be pushed down to the Monarch, use them at query time reduce the
* data queried from Monarch (Geode). The residual predicates (the ones that cannot
* be executed on Monarch/Geode) will need to be executed in hive query engine.
* <p>
* The predicates to be executed on Monarch are decided by the column-type and
* predicate operations. Following is the current list supported for execution
* on Monarch/Geode side (as of 2015-12-23):
* - Predicate Operations:
* -- EQUAL
* -- LESS THAN
* -- LESS THAN OR EQUAL
* - Column Types:
* -- INT
* -- LONG
* -- STRING
*
* @param jobConf the job configuration
* @param deserializer the deserializer
* @param exprNodeDesc the hive expression to be decpomposed
* @return the decomposed predicate indicating which predicates will be executed on Monarch
* and which predicates (residual) will be by Hive query engine
*/
public static DecomposedPredicate decomposePredicate(final JobConf jobConf,
final MonarchSerDe deserializer,
final ExprNodeDesc exprNodeDesc) {
List<IndexSearchCondition> indexSearchConditions = new ArrayList<>(5);
IndexPredicateAnalyzer ipa = getIndexPredicateAnalyzer(deserializer);
ExprNodeDesc residual = ipa.analyzePredicate(exprNodeDesc, indexSearchConditions);
ipa.clearAllowedColumnNames();
if (indexSearchConditions.isEmpty()) {
if (logger.isDebugEnabled())
logger.debug("nothing to decompose. Returning");
return null;
}
DecomposedPredicate dp = new DecomposedPredicate();
dp.pushedPredicate = ipa.translateSearchConditions(indexSearchConditions);
dp.residualPredicate = (ExprNodeGenericFuncDesc) residual;
dp.pushedPredicateObject = null;
if (logger.isDebugEnabled()) {
logger.debug("[To Monarch -->] PushedPredicate= {}", dp.pushedPredicate);
logger.debug("[In Hive -->] ResidualPredicate= {}", dp.residualPredicate);
}
return dp;
}
示例4: testPredicate_Single
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
/**
* Test single function predicates.
*
* @param exprNodeDesc the node expression
* @param checkPushed the null check for pushed predicate
* @param checkResidual the null check for residual predicate
* @throws Exception
*/
@Test(dataProvider = "getDataForSinglePredicate")
public void testPredicate_Single(final ExprNodeDesc exprNodeDesc,
final NullCheck checkPushed, final NullCheck checkResidual,
final Filter expectedPredicate) throws
Exception {
DecomposedPredicate dp = MonarchPredicateHandler.decomposePredicate(null, serDe, exprNodeDesc);
//assertNotNull(dp);
//checkPushed.check(dp.pushedPredicate);
//checkResidual.check(dp.residualPredicate);
if (checkPushed.equals(NullCheck.NotNull)) {
final String expression = Utilities.serializeExpression(dp.pushedPredicate);
final String[] cols = Arrays.stream(properties.getProperty("columns").split(",")).toArray(String[]::new);
MPredicateHolder[] phs1 = MonarchPredicateHandler.getPushDownPredicates(expression, cols);
FilterList phs = MonarchPredicateHandler.getPushDownFilters(expression, cols);
assertEquals(phs.getFilters().size(), 1);
assertEquals(phs.getFilters().get(0).toString(), expectedPredicate.toString());
}
}
示例5: convertToExpression
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
/**
* Convert generic Ampool filter(s) to the corresponding generic UDF(s).
*
* @param filter the Ampool filters
* @param td the Ampool table descriptor
* @return the generic ORC predicates
*/
public static ExprNodeDesc convertToExpression(final Filter filter, final TableDescriptor td)
throws IOException {
if (filter instanceof FilterList) {
FilterList fl = (FilterList) filter;
ExprNodeDesc expression = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
fl.getOperator() == FilterList.Operator.MUST_PASS_ALL ? new GenericUDFOPAnd()
: new GenericUDFOPOr(),
new ArrayList<>());
for (Filter f : fl.getFilters()) {
expression.getChildren().add(convertToExpression(f, td));
}
return expression;
} else if (filter instanceof SingleColumnValueFilter) {
SingleColumnValueFilter cf = (SingleColumnValueFilter) filter;
if (!UDF_CONVERT_MAP.containsKey(cf.getOperator())) {
throw new IOException("Failed to convert ComparisonOperator: " + cf.getOperator());
}
return UDF_CONVERT_MAP.get(cf.getOperator()).apply(cf, td);
} else {
return null;
}
}
示例6: decomposePredicate
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
@Override
public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) {
// PhoenixSerDe phoenixSerDe = (PhoenixSerDe)deserializer;
// String tableName = phoenixSerDe.getTableProperties().getProperty(PhoenixStorageHandlerConstants.PHOENIX_TABLE_NAME);
// String predicateKey = PhoenixStorageHandlerUtil.getTableKeyOfSession(jobConf, tableName);
//
// if (LOG.isDebugEnabled()) {
// LOG.debug("<<<<<<<<<< predicateKey : " + predicateKey + " >>>>>>>>>>");
// }
//
// List<String> columnNameList = phoenixSerDe.getSerdeParams().getColumnNames();
// PhoenixPredicateDecomposer predicateDecomposer = PhoenixPredicateDecomposerManager.createPredicateDecomposer(predicateKey, columnNameList);
//
// return predicateDecomposer.decomposePredicate(predicate);
// 2016-04-04 modified by JeongMin Ju : Changed predicate push down processing to tez-way. reference PhoenixInputFormat.getSplits.
return null;
}
示例7: getFilter
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
public DynamoDBFilter getFilter(DynamoDBFilterOperator operator, String columnName, String
columnType, IndexSearchCondition condition) {
switch (operator.getType()) {
case UNARY:
return getFilter(operator, columnName, columnType);
case BINARY:
return getFilter(operator, columnName, columnType, condition.getConstantDesc().getValue()
.toString());
case NARY:
List<ExprNodeDesc> children = ShimsLoader.getHiveShims().getIndexExpression(condition)
.getChildren();
String[] values = new String[children.size() - 1];
// This currently supports IN clause only
// The first element is column name and rest of the elements are
// the values it can take
for (int i = 1; i < children.size(); i++) {
values[i - 1] = ((ExprNodeConstantDesc) children.get(i)).getValue().toString();
}
return getFilter(operator, columnName, columnType, values);
default:
throw new RuntimeException("Unknown operator type. Operator: " + operator + " "
+ "OperatorType: " + operator.getType());
}
}
示例8: pushPredicate
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
public DecomposedPredicate pushPredicate(Map<String, String> hiveTypeMapping, ExprNodeDesc
predicate) {
log.info("Checking predicates for pushdown in DynamoDB query");
List<IndexSearchCondition> searchConditions = getGenericSearchConditions(hiveTypeMapping,
predicate);
log.info("Pushed predicates: " + searchConditions);
if (searchConditions.isEmpty()) {
return null;
} else {
List<IndexSearchCondition> finalSearchCondition =
prioritizeSearchConditions(searchConditions);
IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate =
analyzer.translateSearchConditions(finalSearchCondition);
decomposedPredicate.residualPredicate = (ExprNodeGenericFuncDesc) predicate;
return decomposedPredicate;
}
}
示例9: getGenericSearchConditions
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
private List<IndexSearchCondition> getGenericSearchConditions(Map<String, String> hiveTypeMapping,
ExprNodeDesc predicate) {
IndexPredicateAnalyzer analyzer = new IndexPredicateAnalyzer();
// DynamoDB does not support filters on columns of types set
for (Entry<String, String> entry : hiveTypeMapping.entrySet()) {
if (eligibleHiveTypes.contains(entry.getValue())) {
analyzer.allowColumnName(entry.getKey());
}
}
for (DynamoDBFilterOperator op : DynamoDBFilterOperator.values()) {
if (op.getHiveClass() != null) {
analyzer.addComparisonOp(op.getHiveClass());
}
}
List<IndexSearchCondition> searchConditions = new ArrayList<>();
analyzer.analyzePredicate(predicate, searchConditions);
return searchConditions;
}
示例10: getQueryFilter
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
private DynamoDBQueryFilter getQueryFilter(JobConf conf, Map<String, String>
hiveDynamoDBMapping, Map<String, String> hiveTypeMapping) throws IOException {
if (hiveDynamoDBMapping == null) {
/*
* Column mapping may be null when user has mapped a DynamoDB item
* onto a single hive map<string, string> column.
*/
return new DynamoDBQueryFilter();
}
DynamoDBClient client = new DynamoDBClient(conf);
String filterExprSerialized = conf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
if (filterExprSerialized == null) {
return new DynamoDBQueryFilter();
}
ExprNodeDesc filterExpr =
ShimsLoader.getHiveShims().deserializeExpression(filterExprSerialized);
DynamoDBFilterPushdown pushdown = new DynamoDBFilterPushdown();
List<KeySchemaElement> schema =
client.describeTable(conf.get(DynamoDBConstants.TABLE_NAME)).getKeySchema();
DynamoDBQueryFilter queryFilter = pushdown.predicateToDynamoDBFilter(
schema, hiveDynamoDBMapping, hiveTypeMapping, filterExpr);
return queryFilter;
}
示例11: SolrTable
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
public SolrTable(JobConf conf) {
String filterExprSerialized = conf.get(TableScanDesc.FILTER_EXPR_CONF_STR);
if (filterExprSerialized != null) {
ExprNodeDesc filterExpr = Utilities.deserializeExpression(filterExprSerialized, conf);
log.debug("filterExpr="+filterExpr.getExprString());
SolrStorageHandler.buildQuery(filterExpr,fq,q);
}
this.url = ConfigurationUtil.getUrl(conf);
this.qs = ConfigurationUtil.getQs(conf);
this.fields = ConfigurationUtil.getAllColumns(conf.get(ConfigurationUtil.SOLR_COLUMN_MAPPING));
this.facetType = conf.get(ConfigurationUtil.SOLR_FACET_MAPPING);
log.info("solr.url="+url+" solr.qs="+qs+" fq="+fq+" q="+q);
this.solrSplitSize = ConfigurationUtil.getSolrSplitSize(conf);
this.outputBuffer = new ArrayList<SolrInputDocument>(solrSplitSize);
this.server = new HttpSolrServer(url);
}
示例12: decomposePredicate
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
@Override
public DecomposedPredicate decomposePredicate(JobConf jobConf, Deserializer deserializer, ExprNodeDesc predicate) {
SolrSerDe serDe = (SolrSerDe)deserializer;
log.debug(ConfigurationUtil.SOLR_COLUMN_MAPPING+"="+serDe.colNames+" predicate columns="+predicate.getCols());
boolean found = false;
for(String s:predicate.getCols()) {
if (serDe.colNames.contains(s)) found=true;
}
if (!found) return null;
log.debug(" predicate="+predicate.getExprString());
DecomposedPredicate dp = new DecomposedPredicate();
if (pushDownFilter(serDe.colNames, predicate, dp)) {
log.debug("decomposed pushed: "+dp.pushedPredicate.getExprString());
log.debug("decomposed residual: "+((dp.residualPredicate == null) ? null : dp.residualPredicate.getExprString()));
return dp;
}
return null;
}
示例13: dumpFilterExpr
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
protected static void dumpFilterExpr(ExprNodeDesc node) {
if (node != null) {
log.debug("dump: " + node.getClass().getName()+" name="+node.getName()+" expr="+node.getExprString()+ "[ ");
if (node instanceof ExprNodeGenericFuncDesc) {
log.debug(" func="+ ((ExprNodeGenericFuncDesc)node).getGenericUDF() );
}
List<ExprNodeDesc> children = node.getChildren();
if (children != null) {
for (ExprNodeDesc child: children) {
if (child != null) dumpFilterExpr(child);
log.debug(",");
}
}
log.debug("]");
}
}
示例14: decompose
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
/**
*
*
* @param conf JobConf
* @param desc predicate expression node.
* @return DecomposedPredicate containing translated search conditions the analyzer can support.
*/
public DecomposedPredicate decompose(JobConf conf, ExprNodeDesc desc) {
IndexPredicateAnalyzer analyzer = newAnalyzer(conf);
List<IndexSearchCondition> sConditions = new ArrayList<IndexSearchCondition>();
ExprNodeDesc residualPredicate = analyzer.analyzePredicate(desc, sConditions);
if(sConditions.size() == 0){
if(log.isInfoEnabled())
log.info("nothing to decompose. Returning");
return null;
}
DecomposedPredicate decomposedPredicate = new DecomposedPredicate();
decomposedPredicate.pushedPredicate = analyzer.translateSearchConditions(sConditions);
decomposedPredicate.residualPredicate = residualPredicate;
return decomposedPredicate;
}
示例15: HiveExprOrNode
import org.apache.hadoop.hive.ql.plan.ExprNodeDesc; //导入依赖的package包/类
public HiveExprOrNode( final List<ExprNodeDesc> childExprNodeDesc ){
for( ExprNodeDesc nodeChild : childExprNodeDesc ){
if( nodeChild instanceof ExprNodeGenericFuncDesc ){
addChildNode( (ExprNodeGenericFuncDesc)nodeChild );
}
else if( ( nodeChild instanceof ExprNodeColumnDesc ) || ( nodeChild instanceof ExprNodeFieldDesc ) ){
childNodeList.add( new BooleanHiveExpr( nodeChild ) );
}
else{
childNodeList.add( new UnsupportHiveExpr() );
}
}
}