当前位置: 首页>>代码示例>>Java>>正文


Java TypeInfoFactory类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory的典型用法代码示例。如果您正苦于以下问题:Java TypeInfoFactory类的具体用法?Java TypeInfoFactory怎么用?Java TypeInfoFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


TypeInfoFactory类属于org.apache.hadoop.hive.serde2.typeinfo包,在下文中一共展示了TypeInfoFactory类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: convertToExpression

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
/**
 * Convert generic Ampool filter(s) to the corresponding generic UDF(s).
 *
 * @param filter the Ampool filters
 * @param td the Ampool table descriptor
 * @return the generic ORC predicates
 */
public static ExprNodeDesc convertToExpression(final Filter filter, final TableDescriptor td)
    throws IOException {
  if (filter instanceof FilterList) {
    FilterList fl = (FilterList) filter;
    ExprNodeDesc expression = new ExprNodeGenericFuncDesc(TypeInfoFactory.booleanTypeInfo,
        fl.getOperator() == FilterList.Operator.MUST_PASS_ALL ? new GenericUDFOPAnd()
            : new GenericUDFOPOr(),
        new ArrayList<>());
    for (Filter f : fl.getFilters()) {
      expression.getChildren().add(convertToExpression(f, td));
    }
    return expression;
  } else if (filter instanceof SingleColumnValueFilter) {
    SingleColumnValueFilter cf = (SingleColumnValueFilter) filter;
    if (!UDF_CONVERT_MAP.containsKey(cf.getOperator())) {
      throw new IOException("Failed to convert ComparisonOperator: " + cf.getOperator());
    }
    return UDF_CONVERT_MAP.get(cf.getOperator()).apply(cf, td);
  } else {
    return null;
  }
}
 
开发者ID:ampool,项目名称:monarch,代码行数:30,代码来源:OrcUtils.java

示例2: newPartitionFields

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Override
public List<FieldSchema> newPartitionFields(String format) {
  Boolean hiveIntegration = (Boolean) config.get(HiveConfig.HIVE_INTEGRATION_CONFIG);
  hiveIntegration = hiveIntegration == null
                    ? HiveConfig.HIVE_INTEGRATION_DEFAULT
                    : hiveIntegration;
  String delim = (String) config.get(StorageCommonConfig.DIRECTORY_DELIM_CONFIG);
  delim = delim == null ? StorageCommonConfig.DIRECTORY_DELIM_DEFAULT : delim;
  if (hiveIntegration && !verifyDateTimeFormat(format, delim)) {
    throw new IllegalArgumentException(
        "Path format doesn't meet the requirements for Hive integration, "
        + "which require prefixing each DateTime component with its name."
    );
  }

  List<FieldSchema> fields = new ArrayList<>();

  for (String field : format.split(delim)) {
    String[] parts = field.split("=");
    FieldSchema fieldSchema =
        new FieldSchema(parts[0].replace("'", ""), TypeInfoFactory.stringTypeInfo.toString(), "");
    fields.add(fieldSchema);
  }

  return fields;
}
 
开发者ID:confluentinc,项目名称:kafka-connect-storage-common,代码行数:27,代码来源:TimeBasedSchemaGenerator.java

示例3: getObjectInspector

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
private ObjectInspector getObjectInspector(final TypeInfo typeInfo) {
    if (typeInfo.equals(TypeInfoFactory.doubleTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableDoubleObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.booleanTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableBooleanObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.floatTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableFloatObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.intTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableIntObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.longTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableLongObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.stringTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableStringObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.timestampTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableTimestampObjectInspector;
    } else if (typeInfo.equals(TypeInfoFactory.dateTypeInfo)) {
        return PrimitiveObjectInspectorFactory.writableDateObjectInspector;
    } else {
        throw new UnsupportedOperationException("Unknown field type: " + typeInfo);
    }
}
 
开发者ID:shunfei,项目名称:indexr,代码行数:22,代码来源:ArrayWritableObjectInspector.java

示例4: unionStringLongJava

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void unionStringLongJava() throws UnexpectedTypeException {
  List<TypeInfo> typeInfos = Arrays.asList((TypeInfo) TypeInfoFactory.stringTypeInfo, TypeInfoFactory.longTypeInfo);
  TypeInfo typeInfo = TypeInfoFactory.getUnionTypeInfo(typeInfos);
  Converter converter = getConverter(typeInfo);

  assertThat(converter.toJavaObject(null), is(nullValue()));
  assertThat(converter.toJavaObject(new StandardUnion((byte) 0, new Text("a"))), is((Object) "a"));
  assertThat(converter.toJavaObject(new StandardUnion((byte) 1, new LongWritable(1L))), is((Object) 1L));

  try {
    converter.toJavaObject(new StandardUnion((byte) 1, new IntWritable(1)));
  } catch (UnexpectedTypeException e) {
    return;
  }
  fail();
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:18,代码来源:DefaultConverterFactoryTest.java

示例5: readFullyReadSchemaFromSplit

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void readFullyReadSchemaFromSplit() throws IOException {
  StructTypeInfo typeInfo = new StructTypeInfoBuilder()
      .add("a", TypeInfoFactory.stringTypeInfo)
      .add("b", TypeInfoFactory.stringTypeInfo)
      .build();
  CorcInputFormat.setTypeInfo(conf, typeInfo);
  CorcInputFormat.setConverterFactoryClass(conf, DefaultConverterFactory.class);

  RecordReader<NullWritable, Corc> reader = inputFormat.getRecordReader(split, conf, reporter);

  Corc corc = reader.createValue();

  reader.next(NullWritable.get(), corc);
  assertThat(corc.get("a"), is((Object) "A1"));
  assertThat(corc.get("b"), is((Object) "B1"));
  reader.close();
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:19,代码来源:CorcInputFormatTest.java

示例6: readFullyDeclaredSchema

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void readFullyDeclaredSchema() throws IOException {
  StructTypeInfo typeInfo = new StructTypeInfoBuilder()
      .add("a", TypeInfoFactory.stringTypeInfo)
      .add("b", TypeInfoFactory.stringTypeInfo)
      .build();
  CorcInputFormat.setTypeInfo(conf, typeInfo);
  CorcInputFormat.setSchemaTypeInfo(conf, typeInfo);
  CorcInputFormat.setConverterFactoryClass(conf, DefaultConverterFactory.class);

  RecordReader<NullWritable, Corc> reader = inputFormat.getRecordReader(split, conf, reporter);

  Corc corc = reader.createValue();

  reader.next(NullWritable.get(), corc);
  assertThat(corc.get("a"), is((Object) "A1"));
  assertThat(corc.get("b"), is((Object) "B1"));
  reader.close();
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:20,代码来源:CorcInputFormatTest.java

示例7: typical

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void typical() throws IOException {
  Path path = new Path(temporaryFolder.getRoot().getCanonicalPath(), "part-00000");

  try (OrcWriter writer = new OrcWriter.Builder(conf, path).addField("a", TypeInfoFactory.stringTypeInfo).build()) {
    writer.addRow("hello");
  }

  ReaderOptions options = OrcFile.readerOptions(conf);
  Reader reader = OrcFile.createReader(path, options);
  RecordReader rows = reader.rows();

  @SuppressWarnings("unchecked")
  List<Object> next = (List<Object>) ObjectInspectorUtils.copyToStandardJavaObject(rows.next(null),
      reader.getObjectInspector());
  assertThat(next.size(), is(1));
  assertThat(next.get(0), is((Object) "hello"));
  assertThat(rows.hasNext(), is(false));

  rows.close();
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:22,代码来源:OrcWriterTest.java

示例8: newStructTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
static StructTypeInfo newStructTypeInfo(Fields fields) {
  List<String> names = new ArrayList<>();
  List<TypeInfo> typeInfos = new ArrayList<>();

  for (int i = 0; i < fields.size(); i++) {
    String name = fields.get(i).toString();
    if (ROW_ID_NAME.equals(name)) {
      if (!fields.getTypeClass(i).equals(RecordIdentifier.class)) {
        throw new IllegalArgumentException(ROW_ID_NAME + " column is not of type "
            + RecordIdentifier.class.getSimpleName() + ". Found type: " + fields.getTypeClass(i));
      }
      continue;
    }
    names.add(name.toLowerCase());
    Class<?> type = fields.getTypeClass(i);
    if (type == null) {
      throw new IllegalArgumentException("Missing type information for field: " + name);
    }

    TypeInfo typeInfo = getTypeInfoFromClass(type);
    typeInfos.add(typeInfo);
  }

  return (StructTypeInfo) TypeInfoFactory.getStructTypeInfo(names, typeInfos);
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:26,代码来源:SchemaFactory.java

示例9: createTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
private StructTypeInfo createTypeInfo() {
  return new StructTypeInfoBuilder()
      .add("a", TypeInfoFactory.stringTypeInfo)
      .add("b", TypeInfoFactory.booleanTypeInfo)
      .add("c", TypeInfoFactory.byteTypeInfo)
      .add("d", TypeInfoFactory.shortTypeInfo)
      .add("e", TypeInfoFactory.intTypeInfo)
      .add("f", TypeInfoFactory.longTypeInfo)
      .add("g", TypeInfoFactory.floatTypeInfo)
      .add("h", TypeInfoFactory.doubleTypeInfo)
      .add("i", TypeInfoFactory.timestampTypeInfo)
      .add("j", TypeInfoFactory.dateTypeInfo)
      .add("k", TypeInfoFactory.binaryTypeInfo)
      .add("l", TypeInfoFactory.decimalTypeInfo)
      .add("m", TypeInfoFactory.getListTypeInfo(TypeInfoFactory.intTypeInfo))
      .add("n", TypeInfoFactory.getMapTypeInfo(TypeInfoFactory.intTypeInfo, TypeInfoFactory.intTypeInfo))
      .add("o", new StructTypeInfoBuilder().add("a", TypeInfoFactory.intTypeInfo).build())
      .add("p", TypeInfoFactory.getUnionTypeInfo(Arrays.asList((TypeInfo) TypeInfoFactory.stringTypeInfo)))
      .build();
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:21,代码来源:OrcFileSinkPerformanceTest.java

示例10: createStructTypeInfo

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void createStructTypeInfo() {
  String[] names = new String[] { "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k" };
  Class<?>[] types = new Class<?>[] { String.class, Boolean.class, Byte.class, Short.class, Integer.class, Long.class,
      Float.class, Double.class, Timestamp.class, Date.class, byte[].class };

  Fields fields = new Fields(names, types);

  StructTypeInfo typeInfo = SchemaFactory.newStructTypeInfo(fields);

  assertThat(typeInfo.getStructFieldTypeInfo("a"), is((TypeInfo) TypeInfoFactory.stringTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("b"), is((TypeInfo) TypeInfoFactory.booleanTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("c"), is((TypeInfo) TypeInfoFactory.byteTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("d"), is((TypeInfo) TypeInfoFactory.shortTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("e"), is((TypeInfo) TypeInfoFactory.intTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("f"), is((TypeInfo) TypeInfoFactory.longTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("g"), is((TypeInfo) TypeInfoFactory.floatTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("h"), is((TypeInfo) TypeInfoFactory.doubleTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("i"), is((TypeInfo) TypeInfoFactory.timestampTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("j"), is((TypeInfo) TypeInfoFactory.dateTypeInfo));
  assertThat(typeInfo.getStructFieldTypeInfo("k"), is((TypeInfo) TypeInfoFactory.binaryTypeInfo));
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:23,代码来源:SchemaFactoryTest.java

示例11: writeString

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void writeString() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add("hello");
  values.add(null);

  write(TypeInfoFactory.stringTypeInfo, values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is((Object) "hello"));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:19,代码来源:OrcFileTest.java

示例12: writeChar

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void writeChar() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add("hello");
  values.add(new HiveChar("world", 1));
  values.add(null);

  write(TypeInfoFactory.getCharTypeInfo(1), values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveChar) reader.next().get(0)).getValue(), is("h"));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveChar) reader.next().get(0)).getValue(), is("w"));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:23,代码来源:OrcFileTest.java

示例13: writeVarchar

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void writeVarchar() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add("hello");
  values.add(new HiveVarchar("world", 1));
  values.add(null);

  write(TypeInfoFactory.getVarcharTypeInfo(1), values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveVarchar) reader.next().get(0)).getValue(), is("h"));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveVarchar) reader.next().get(0)).getValue(), is("w"));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:23,代码来源:OrcFileTest.java

示例14: writeDecimal

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void writeDecimal() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add(HiveDecimal.create(new BigDecimal("1.23")));
  values.add(new BigDecimal("2.34"));
  values.add("3.45");
  values.add(null);

  write(TypeInfoFactory.getDecimalTypeInfo(2, 1), values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveDecimal) reader.next().get(0)).bigDecimalValue(), is(new BigDecimal("1.2")));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveDecimal) reader.next().get(0)).bigDecimalValue(), is(new BigDecimal("2.3")));

    assertThat(reader.hasNext(), is(true));
    assertThat(((HiveDecimal) reader.next().get(0)).bigDecimalValue(), is(new BigDecimal("3.5")));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:27,代码来源:OrcFileTest.java

示例15: writeListString

import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory; //导入依赖的package包/类
@Test
public void writeListString() throws IOException {
  List<Object> values = new ArrayList<>();
  values.add(Arrays.asList("hello"));
  values.add(null);

  write(TypeInfoFactory.getListTypeInfo(TypeInfoFactory.stringTypeInfo), values);

  try (OrcReader reader = getOrcReader()) {
    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is((Object) Arrays.asList("hello")));

    assertThat(reader.hasNext(), is(true));
    assertThat(reader.next().get(0), is(nullValue()));

    assertThat(reader.hasNext(), is(false));
  }
}
 
开发者ID:HotelsDotCom,项目名称:corc,代码行数:19,代码来源:OrcFileTest.java


注:本文中的org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。