当前位置: 首页>>代码示例>>Java>>正文


Java SerDe类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.serde2.SerDe的典型用法代码示例。如果您正苦于以下问题:Java SerDe类的具体用法?Java SerDe怎么用?Java SerDe使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


SerDe类属于org.apache.hadoop.hive.serde2包,在下文中一共展示了SerDe类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: runImportRCFile

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
private void runImportRCFile(ExaIterator ctx, List<HCatTableColumn> columns, List<HCatTableColumn> partitionColumns, List<OutputColumnSpec> outputColumns, String file) throws Exception {
    List<HCatSerDeParameter> serDeParameters = new ArrayList<>();
    serDeParameters.add(new HCatSerDeParameter("serialization.format", "1"));
    
    String inputFormatClassName = "org.apache.hadoop.hive.ql.io.RCFileInputFormat";
    String serDeClassName = "org.apache.hadoop.hive.serde2.columnar.ColumnarSerDe";
    String hdfsUser = "hdfs";
    boolean useKerberos = false;
    
    List<String> hdfsServers = new ArrayList<>();
    hdfsServers.add("file:///");
    final Configuration conf = new Configuration();
    FileSystem fs = HdfsService.getFileSystem(hdfsServers,conf);
    
    InputFormat<?, ?> inputFormat = (InputFormat<?, ?>) UdfUtils.getInstanceByName(inputFormatClassName);
    SerDe serDe = (SerDe) UdfUtils.getInstanceByName(serDeClassName);
    HdfsSerDeImportService.importFile(fs, file, partitionColumns, inputFormat, serDe, serDeParameters, hdfsServers, hdfsUser, columns, outputColumns, useKerberos, ctx);
}
 
开发者ID:EXASOL,项目名称:hadoop-etl-udfs,代码行数:19,代码来源:HdfsSerDeImportServiceTest.java

示例2: BenchmarkFile

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
private BenchmarkFile(
        String name,
        InputFormat<?, ? extends Writable> inputFormat,
        HiveOutputFormat<?, ?> outputFormat,
        @SuppressWarnings("deprecation") SerDe serDe,
        Iterable<? extends HiveRecordCursorProvider> recordCursorProviders,
        Iterable<? extends HivePageSourceFactory> pageSourceFactories)
        throws Exception
{
    this.name = name;
    this.inputFormat = inputFormat;
    this.outputFormat = outputFormat;
    this.serDe = serDe;
    this.recordCursorProviders = ImmutableList.copyOf(recordCursorProviders);
    this.pageSourceFactories = ImmutableList.copyOf(pageSourceFactories);

    serDe.initialize(new Configuration(), createTableProperties(COLUMNS));
}
 
开发者ID:y-lan,项目名称:presto,代码行数:19,代码来源:BenchmarkHiveFileFormats.java

示例3: testRCText

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testRCText()
        throws Exception
{
    List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
        return !testColumn.getName().equals("t_struct_null") // TODO: This is a bug in the RC text reader
                && !testColumn.getName().equals("t_map_null_key_complex_key_value"); // RC file does not support complex type as key of a map
    }));

    HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
    InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
    @SuppressWarnings("deprecation")
    SerDe serde = new ColumnarSerDe();
    File file = File.createTempFile("presto_test", "rc-text");
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        testCursorProvider(new ColumnarTextHiveRecordCursorProvider(), split, inputFormat, serde, testColumns, NUM_ROWS);
        testCursorProvider(new GenericHiveRecordCursorProvider(), split, inputFormat, serde, testColumns, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例4: testRcTextPageSource

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test(enabled = false)
public void testRcTextPageSource()
        throws Exception
{
    HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
    InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
    @SuppressWarnings("deprecation")
    SerDe serde = new ColumnarSerDe();
    File file = File.createTempFile("presto_test", "rc-binary");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS, NUM_ROWS);
        testPageSourceFactory(new RcFilePageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, TEST_COLUMNS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:20,代码来源:TestHiveFileFormats.java

示例5: testRCBinary

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testRCBinary()
        throws Exception
{
    List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
        // RC file does not support complex type as key of a map
        return !testColumn.getName().equals("t_map_null_key_complex_key_value");
    }));

    HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
    InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
    @SuppressWarnings("deprecation")
    SerDe serde = new LazyBinaryColumnarSerDe();
    File file = File.createTempFile("presto_test", "rc-binary");
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        testCursorProvider(new ColumnarBinaryHiveRecordCursorProvider(), split, inputFormat, serde, testColumns, NUM_ROWS);
        testCursorProvider(new GenericHiveRecordCursorProvider(), split, inputFormat, serde, testColumns, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例6: testRcBinaryPageSource

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test(enabled = false)
public void testRcBinaryPageSource()
        throws Exception
{
    HiveOutputFormat<?, ?> outputFormat = new RCFileOutputFormat();
    InputFormat<?, ?> inputFormat = new RCFileInputFormat<>();
    @SuppressWarnings("deprecation")
    SerDe serde = new LazyBinaryColumnarSerDe();
    File file = File.createTempFile("presto_test", "rc-binary");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS, NUM_ROWS);
        testPageSourceFactory(new RcFilePageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, TEST_COLUMNS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:20,代码来源:TestHiveFileFormats.java

示例7: testOrc

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testOrc()
        throws Exception
{
    HiveOutputFormat<?, ?> outputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat();
    InputFormat<?, ?> inputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new org.apache.hadoop.hive.ql.io.orc.OrcSerde();
    File file = File.createTempFile("presto_test", "orc");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS, NUM_ROWS);
        testCursorProvider(new OrcRecordCursorProvider(), split, inputFormat, serde, TEST_COLUMNS, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:20,代码来源:TestHiveFileFormats.java

示例8: testOrcDataStream

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testOrcDataStream()
        throws Exception
{
    HiveOutputFormat<?, ?> outputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcOutputFormat();
    InputFormat<?, ?> inputFormat = new org.apache.hadoop.hive.ql.io.orc.OrcInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new org.apache.hadoop.hive.ql.io.orc.OrcSerde();
    File file = File.createTempFile("presto_test", "orc");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, TEST_COLUMNS, NUM_ROWS);
        testPageSourceFactory(new OrcPageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, TEST_COLUMNS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:20,代码来源:TestHiveFileFormats.java

示例9: testParquet

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testParquet()
        throws Exception
{
    List<TestColumn> testColumns = getTestColumnsSupportedByParquet();

    HiveOutputFormat<?, ?> outputFormat = new MapredParquetOutputFormat();
    InputFormat<?, ?> inputFormat = new MapredParquetInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new ParquetHiveSerDe();
    File file = File.createTempFile("presto_test", "parquet");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        HiveRecordCursorProvider cursorProvider = new ParquetRecordCursorProvider(false);
        testCursorProvider(cursorProvider, split, inputFormat, serde, testColumns, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:23,代码来源:TestHiveFileFormats.java

示例10: testParquetPageSource

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test(enabled = false)
public void testParquetPageSource()
        throws Exception
{
    List<TestColumn> testColumns = getTestColumnsSupportedByParquet();
    testColumns = testColumns.stream()
            .filter(column -> column.getObjectInspector().getCategory() == Category.PRIMITIVE)
            .collect(toList());

    HiveOutputFormat<?, ?> outputFormat = new MapredParquetOutputFormat();
    InputFormat<?, ?> inputFormat = new MapredParquetInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new ParquetHiveSerDe();
    File file = File.createTempFile("presto_test", "parquet");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        TestingConnectorSession session = new TestingConnectorSession(
                new HiveSessionProperties(new HiveClientConfig().setParquetOptimizedReaderEnabled(true)).getSessionProperties());
        testPageSourceFactory(new ParquetPageSourceFactory(TYPE_MANAGER, false), split, inputFormat, serde, testColumns, session);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:27,代码来源:TestHiveFileFormats.java

示例11: testParquetUseColumnNames

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testParquetUseColumnNames()
        throws Exception
{
    List<TestColumn> testColumns = getTestColumnsSupportedByParquet();

    HiveOutputFormat<?, ?> outputFormat = new MapredParquetOutputFormat();
    InputFormat<?, ?> inputFormat = new MapredParquetInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new ParquetHiveSerDe();
    File file = File.createTempFile("presto_test", "parquet");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        // Reverse the order of the columns to test access by name, not by index
        Collections.reverse(testColumns);
        HiveRecordCursorProvider cursorProvider = new ParquetRecordCursorProvider(true);
        testCursorProvider(cursorProvider, split, inputFormat, serde, testColumns, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例12: testDwrf

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testDwrf()
        throws Exception
{
    List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
        ObjectInspector objectInspector = testColumn.getObjectInspector();
        return !hasType(objectInspector, PrimitiveCategory.DATE);
    }));

    HiveOutputFormat<?, ?> outputFormat = new com.facebook.hive.orc.OrcOutputFormat();
    InputFormat<?, ?> inputFormat = new com.facebook.hive.orc.OrcInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new com.facebook.hive.orc.OrcSerde();
    File file = File.createTempFile("presto_test", "dwrf");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        testCursorProvider(new DwrfRecordCursorProvider(), split, inputFormat, serde, testColumns, NUM_ROWS);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例13: testDwrfDataStream

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
@Test
public void testDwrfDataStream()
        throws Exception
{
    List<TestColumn> testColumns = ImmutableList.copyOf(filter(TEST_COLUMNS, testColumn -> {
        ObjectInspector objectInspector = testColumn.getObjectInspector();
        return !hasType(objectInspector, PrimitiveCategory.DATE);
    }));

    HiveOutputFormat<?, ?> outputFormat = new com.facebook.hive.orc.OrcOutputFormat();
    InputFormat<?, ?> inputFormat = new com.facebook.hive.orc.OrcInputFormat();
    @SuppressWarnings("deprecation")
    SerDe serde = new com.facebook.hive.orc.OrcSerde();
    File file = File.createTempFile("presto_test", "dwrf");
    file.delete();
    try {
        FileSplit split = createTestFile(file.getAbsolutePath(), outputFormat, serde, null, testColumns, NUM_ROWS);
        testPageSourceFactory(new DwrfPageSourceFactory(TYPE_MANAGER), split, inputFormat, serde, testColumns);
    }
    finally {
        //noinspection ResultOfMethodCallIgnored
        file.delete();
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:25,代码来源:TestHiveFileFormats.java

示例14: initHeaderSerde

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
/**
 * Inits the header serde.
 *
 * @throws ClassNotFoundException the class not found exception
 * @throws SerDeException         the ser de exception
 */
@SuppressWarnings("unchecked")
private void initHeaderSerde() throws ClassNotFoundException, SerDeException {
  if (headerSerde == null) {
    headerSerde = ReflectionUtils.newInstance(
      ctx.getConf().getClass(LensConfConstants.QUERY_OUTPUT_SERDE,
        (Class<? extends AbstractSerDe>) Class.forName(LensConfConstants.DEFAULT_OUTPUT_SERDE), SerDe.class),
      ctx.getConf());

    Properties hprops = new Properties();
    if (columnNames.size() > 0) {
      hprops.setProperty(serdeConstants.LIST_COLUMNS, StringUtils.join(escapedColumnNames, ","));
    }
    if (htypes.length() > 0) {
      hprops.setProperty(serdeConstants.LIST_COLUMN_TYPES, htypes);
    }
    headerSerde.initialize(ctx.getConf(), hprops);

    headerOI = ObjectInspectorFactory.getStandardStructObjectInspector(columnNames, columnHeaderOIs);
  }
}
 
开发者ID:apache,项目名称:lens,代码行数:27,代码来源:AbstractOutputFormatter.java

示例15: createSerDe

import org.apache.hadoop.hive.serde2.SerDe; //导入依赖的package包/类
/**
 * Utility method which creates a SerDe object for given SerDe class name and properties.
 */
public static SerDe createSerDe(final JobConf job, final String sLib, final Properties properties) throws Exception {
  final Class<? extends SerDe> c = Class.forName(sLib).asSubclass(SerDe.class);
  final SerDe serde = c.getConstructor().newInstance();
  serde.initialize(job, properties);

  return serde;
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:11,代码来源:HiveAbstractReader.java


注:本文中的org.apache.hadoop.hive.serde2.SerDe类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。