当前位置: 首页>>代码示例>>Java>>正文


Java SchemaPath类代码示例

本文整理汇总了Java中org.apache.drill.common.expression.SchemaPath的典型用法代码示例。如果您正苦于以下问题:Java SchemaPath类的具体用法?Java SchemaPath怎么用?Java SchemaPath使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


SchemaPath类属于org.apache.drill.common.expression包,在下文中一共展示了SchemaPath类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: MongoRecordReader

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
public MongoRecordReader(
    MongoSubScan.MongoSubScanSpec subScanSpec,
    List<SchemaPath> projectedColumns,
    FragmentContext context,
    MongoStoragePlugin plugin) {

  fields = new BasicDBObject();
  // exclude _id field, if not mentioned by user.
  fields.put(DrillMongoConstants.ID, Integer.valueOf(0));
  setColumns(projectedColumns);
  fragmentContext = context;
  this.plugin = plugin;
  filters = new BasicDBObject();
  Map<String, List<BasicDBObject>> mergedFilters = MongoUtils.mergeFilters(
      subScanSpec.getMinFilters(), subScanSpec.getMaxFilters());
  buildFilters(subScanSpec.getFilter(), mergedFilters);
  enableAllTextMode = fragmentContext.getOptions().getOption(ExecConstants.MONGO_ALL_TEXT_MODE).bool_val;
  readNumbersAsDouble = fragmentContext.getOptions().getOption(ExecConstants.MONGO_READER_READ_NUMBERS_AS_DOUBLE).bool_val;
  init(subScanSpec);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:21,代码来源:MongoRecordReader.java

示例2: fieldSelected

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
private boolean fieldSelected(MaterializedField field) {
  // TODO - not sure if this is how we want to represent this
  // for now it makes the existing tests pass, simply selecting
  // all available data if no columns are provided
  if (isStarQuery()) {
    return true;
  }

  int i = 0;
  for (SchemaPath expr : getColumns()) {
    if ( field.matches(expr)) {
      columnsFound[i] = true;
      return true;
    }
    i++;
  }
  return false;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:19,代码来源:ParquetRecordReader.java

示例3: ProjectPushInfo

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
public ProjectPushInfo(List<SchemaPath> columns, ImmutableList<DesiredField> desiredFields) {
  super();
  this.columns = columns;
  this.desiredFields = desiredFields;

  this.fieldNames = Lists.newArrayListWithCapacity(desiredFields.size());
  this.types = Lists.newArrayListWithCapacity(desiredFields.size());
  IntIntOpenHashMap oldToNewIds = new IntIntOpenHashMap();

  int i =0;
  for (DesiredField f : desiredFields) {
    fieldNames.add(f.name);
    types.add(f.field.getType());
    oldToNewIds.put(f.origIndex, i);
    i++;
  }
  this.rewriter = new InputRewriter(oldToNewIds);
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:19,代码来源:PrelUtil.java

示例4: process

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
public static CompareFunctionsProcessor process(FunctionCall call, boolean nullComparatorSupported) {
  String functionName = call.getName();
  LogicalExpression nameArg = call.args.get(0);
  LogicalExpression valueArg = call.args.size() >= 2 ? call.args.get(1) : null;
  CompareFunctionsProcessor evaluator = new CompareFunctionsProcessor(functionName);

  if (valueArg != null) { // binary function
    if (VALUE_EXPRESSION_CLASSES.contains(nameArg.getClass())) {
      LogicalExpression swapArg = valueArg;
      valueArg = nameArg;
      nameArg = swapArg;
      evaluator.functionName = COMPARE_FUNCTIONS_TRANSPOSE_MAP.get(functionName);
    }
    evaluator.success = nameArg.accept(evaluator, valueArg);
  } else if (nullComparatorSupported && call.args.get(0) instanceof SchemaPath) {
    evaluator.success = true;
    evaluator.path = (SchemaPath) nameArg;
  }

  return evaluator;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:22,代码来源:CompareFunctionsProcessor.java

示例5: HiveTextRecordReader

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
public HiveTextRecordReader(Table table, Partition partition, InputSplit inputSplit, List<SchemaPath> projectedColumns, FragmentContext context) throws ExecutionSetupException {
  super(table, partition, inputSplit, projectedColumns, context, null);
  String d = table.getSd().getSerdeInfo().getParameters().get("field.delim");
  if (d != null) {
    delimiter = d.getBytes()[0];
  } else {
    delimiter = (byte) 1;
  }
  assert delimiter > 0;
  List<Integer> ids = Lists.newArrayList();
  for (int i = 0; i < tableColumns.size(); i++) {
    if (selectedColumnNames.contains(tableColumns.get(i))) {
      ids.add(i);
    }
  }
  columnIds = ids;
  numCols = tableColumns.size();
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:19,代码来源:HiveTextRecordReader.java

示例6: testNullableFilter

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@Test
public void testNullableFilter() throws Exception {
  final List<QueryDataBatch> result = testSqlWithResults(
      "select count(wr_return_quantity) as row_count from dfs.`/tmp/web_returns` where wr_return_quantity = 1");
  assertEquals("Only expected one batch with data, and then the empty finishing batch.", 2, result.size());
  final RecordBatchLoader loader = new RecordBatchLoader(getDrillbitContext().getAllocator());

  final QueryDataBatch b = result.get(0);
  loader.load(b.getHeader().getDef(), b.getData());

  final VectorWrapper vw = loader.getValueAccessorById(
      BigIntVector.class,
      loader.getValueVectorId(SchemaPath.getCompoundPath("row_count")).getFieldIds()
  );
  assertEquals(3573l, vw.getValueVector().getAccessor().getObject(0));
  b.release();
  loader.clear();
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:19,代码来源:ParquetRecordReaderTest.java

示例7: initFieldWriters

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
private void initFieldWriters() throws IOException {
  fieldConverters = Lists.newArrayList();
  try {
    int fieldId = 0;
    for (VectorWrapper w : batch) {
      if (w.getField().getPath().equals(SchemaPath.getSimplePath(WriterPrel.PARTITION_COMPARATOR_FIELD))) {
        continue;
      }
      FieldReader reader = w.getValueVector().getReader();
      FieldConverter converter = getConverter(recordWriter, fieldId++, w.getField().getLastName(), reader);
      fieldConverters.add(converter);
    }
  } catch(Exception e) {
    logger.error("Failed to create FieldWriter.", e);
    throw new IOException("Failed to initialize FieldWriters.", e);
  }
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:18,代码来源:EventBasedRecordWriter.java

示例8: TestBuilder

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
public TestBuilder(BufferAllocator allocator, String query, UserBitShared.QueryType queryType, Boolean ordered,
                   boolean approximateEquality, Map<SchemaPath, TypeProtos.MajorType> baselineTypeMap,
                   String baselineOptionSettingQueries, String testOptionSettingQueries, boolean highPerformanceComparison,
                   int expectedNumBatches) {
  this(allocator);
  if (ordered == null) {
    throw new RuntimeException("Ordering not set, when using a baseline file or query you must explicitly call the ordered() or unOrdered() method on the " + this.getClass().getSimpleName());
  }
  this.query = query;
  this.queryType = queryType;
  this.ordered = ordered;
  this.approximateEquality = approximateEquality;
  this.baselineTypeMap = baselineTypeMap;
  this.baselineOptionSettingQueries = baselineOptionSettingQueries;
  this.testOptionSettingQueries = testOptionSettingQueries;
  this.highPerformanceComparison = highPerformanceComparison;
  this.expectedNumBatches = expectedNumBatches;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:19,代码来源:TestBuilder.java

示例9: testMaterializingLateboundField

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@Test
public void testMaterializingLateboundField(final @Injectable RecordBatch batch) throws SchemaChangeException {
  final SchemaBuilder builder = BatchSchema.newBuilder();
  builder.addField(getField(2, "test", bigIntType));
  final BatchSchema schema = builder.build();

  new NonStrictExpectations() {
    {
      batch.getValueVectorId(new SchemaPath("test", ExpressionPosition.UNKNOWN));
      result = new TypedFieldId(Types.required(MinorType.BIGINT), -5);
    }
  };

  ErrorCollector ec = new ErrorCollectorImpl();
  LogicalExpression expr = ExpressionTreeMaterializer.materialize(new FieldReference("test",
      ExpressionPosition.UNKNOWN), batch, ec, registry);
  assertEquals(bigIntType, expr.getMajorType());
  assertFalse(ec.hasErrors());
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:20,代码来源:ExpressionTreeMaterializerTest.java

示例10: MongoGroupScan

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
public MongoGroupScan(String userName, MongoStoragePlugin storagePlugin,
    MongoScanSpec scanSpec, List<SchemaPath> columns) throws IOException {
  super(userName);
  this.storagePlugin = storagePlugin;
  this.storagePluginConfig = storagePlugin.getConfig();
  this.scanSpec = scanSpec;
  this.columns = columns;
  this.storagePluginConfig.getConnection();
  init();
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:11,代码来源:MongoGroupScan.java

示例11: DrillConfig

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@VisibleForTesting
public DrillConfig(Config config, boolean enableServerConfigs) {
  super(config);
  logger.debug("Setting up DrillConfig object.");
  logger.trace("Given Config object is:\n{}",
               config.root().render(ConfigRenderOptions.defaults()));
  mapper = new ObjectMapper();

  if (enableServerConfigs) {
    SimpleModule deserModule = new SimpleModule("LogicalExpressionDeserializationModule")
      .addDeserializer(LogicalExpression.class, new LogicalExpression.De(this))
      .addDeserializer(SchemaPath.class, new SchemaPath.De());

    mapper.registerModule(deserModule);
    mapper.enable(SerializationFeature.INDENT_OUTPUT);
    mapper.configure(Feature.ALLOW_UNQUOTED_FIELD_NAMES, true);
    mapper.configure(JsonGenerator.Feature.QUOTE_FIELD_NAMES, true);
    mapper.configure(Feature.ALLOW_COMMENTS, true);
    mapper.registerSubtypes(LogicalOperatorBase.getSubTypes(this));
    mapper.registerSubtypes(StoragePluginConfigBase.getSubTypes(this));
    mapper.registerSubtypes(FormatPluginConfigBase.getSubTypes(this));
  }

  RuntimeMXBean bean = ManagementFactory.getRuntimeMXBean();
  this.startupArguments = ImmutableList.copyOf(bean.getInputArguments());
  logger.debug("DrillConfig object initialized.");
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:28,代码来源:DrillConfig.java

示例12: getIdIfValid

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@Override
public Integer getIdIfValid(String name) {
  SchemaPath schemaPath = SchemaPath.getSimplePath(name);
  int id = partitionColumns.indexOf(schemaPath);
  if (id == -1) {
    return null;
  }
  return id;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:10,代码来源:ParquetPartitionDescriptor.java

示例13: twoKeyAgg

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@Test
  public void twoKeyAgg(@Injectable final DrillbitContext bitContext, @Injectable UserClientConnection connection) throws Throwable {
    SimpleRootExec exec = doTest(bitContext, connection, "/agg/twokey.json");

    while(exec.next()) {
      final IntVector key1 = exec.getValueVectorById(SchemaPath.getSimplePath("key1"), IntVector.class);
      final BigIntVector key2 = exec.getValueVectorById(SchemaPath.getSimplePath("key2"), BigIntVector.class);
      final BigIntVector cnt = exec.getValueVectorById(SchemaPath.getSimplePath("cnt"), BigIntVector.class);
      final NullableBigIntVector total = exec.getValueVectorById(SchemaPath.getSimplePath("total"), NullableBigIntVector.class);
      final Integer[] keyArr1 = {Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MIN_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE, Integer.MAX_VALUE};
      final long[] keyArr2 = {0,1,2,0,1,2};
      final long[] cntArr = {34,34,34,34,34,34};
      final long[] totalArr = {0,34,68,0,34,68};

      for(int i = 0; i < exec.getRecordCount(); i++) {
//        System.out.print(key1.getAccessor().getObject(i));
//        System.out.print("\t");
//        System.out.print(key2.getAccessor().getObject(i));
//        System.out.print("\t");
//        System.out.print(cnt.getAccessor().getObject(i));
//        System.out.print("\t");
//        System.out.print(total.getAccessor().getObject(i));
//        System.out.println();
        assertEquals((Long) cntArr[i], cnt.getAccessor().getObject(i));
        assertEquals(keyArr1[i], key1.getAccessor().getObject(i));
        assertEquals((Long) keyArr2[i], key2.getAccessor().getObject(i));
        assertEquals((Long) totalArr[i], total.getAccessor().getObject(i));
      }
    }

    if(exec.getContext().getFailureCause() != null){
      throw exec.getContext().getFailureCause();
    }
    assertTrue(!exec.getContext().isFailed());

  }
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:37,代码来源:TestAgg.java

示例14: visitUnknown

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@Override
public Set<SchemaPath> visitUnknown(LogicalExpression e, Void value) {
  Set<SchemaPath> paths = Sets.newHashSet();
  for (LogicalExpression ex : e) {
    paths.addAll(ex.accept(this, null));
  }
  return paths;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:9,代码来源:ScanFieldDeterminer.java

示例15: isStarQuery

import org.apache.drill.common.expression.SchemaPath; //导入依赖的package包/类
@Override
public boolean isStarQuery() {
  if(settings.isUseRepeatedVarChar()) {
    return super.isStarQuery() || Iterables.tryFind(getColumns(), new Predicate<SchemaPath>() {
      @Override
      public boolean apply(@Nullable SchemaPath path) {
        return path.equals(RepeatedVarCharOutput.COLUMNS);
      }
    }).isPresent();
  }
  return super.isStarQuery();
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:13,代码来源:CompliantTextRecordReader.java


注:本文中的org.apache.drill.common.expression.SchemaPath类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。