当前位置: 首页>>代码示例>>Java>>正文


Java RecordWriter类代码示例

本文整理汇总了Java中org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter的典型用法代码示例。如果您正苦于以下问题:Java RecordWriter类的具体用法?Java RecordWriter怎么用?Java RecordWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


RecordWriter类属于org.apache.hadoop.hive.ql.exec.FileSinkOperator包,在下文中一共展示了RecordWriter类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: getWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
private RecordWriter getWriter() throws IOException {
  try {
    HiveOutputFormat<?, ?> outputFormat = HiveOutputFormat.class
        .cast(Class.forName(this.properties.getProp(HiveWritableHdfsDataWriterBuilder.WRITER_OUTPUT_FORMAT_CLASS))
            .newInstance());

    @SuppressWarnings("unchecked")
    Class<? extends Writable> writableClass = (Class<? extends Writable>) Class
        .forName(this.properties.getProp(HiveWritableHdfsDataWriterBuilder.WRITER_WRITABLE_CLASS));

    return outputFormat.getHiveRecordWriter(new JobConf(), this.stagingFile, writableClass, true,
        this.properties.getProperties(), null);
  } catch (Throwable t) {
    throw new IOException(String.format("Failed to create writer"), t);
  }
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:17,代码来源:HiveWritableHdfsDataWriter.java

示例2: createOrcRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
static RecordWriter createOrcRecordWriter(File outputFile, Format format, Compression compression, ObjectInspector columnObjectInspector)
        throws IOException
{
    JobConf jobConf = new JobConf();
    jobConf.set("hive.exec.orc.write.format", format == ORC_12 ? "0.12" : "0.11");
    jobConf.set("hive.exec.orc.default.compress", compression.name());
    ReaderWriterProfiler.setProfilerOptions(jobConf);

    return new OrcOutputFormat().getHiveRecordWriter(
            jobConf,
            new Path(outputFile.toURI()),
            Text.class,
            compression != NONE,
            createTableProperties("test", columnObjectInspector.getTypeName()),
            () -> { }
    );
}
 
开发者ID:y-lan,项目名称:presto,代码行数:18,代码来源:OrcTester.java

示例3: createDwrfRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
private static RecordWriter createDwrfRecordWriter(File outputFile, Compression compressionCodec, ObjectInspector columnObjectInspector)
        throws IOException
{
    JobConf jobConf = new JobConf();
    jobConf.set("hive.exec.orc.default.compress", compressionCodec.name());
    jobConf.set("hive.exec.orc.compress", compressionCodec.name());
    OrcConf.setIntVar(jobConf, OrcConf.ConfVars.HIVE_ORC_ENTROPY_STRING_THRESHOLD, 1);
    OrcConf.setIntVar(jobConf, OrcConf.ConfVars.HIVE_ORC_DICTIONARY_ENCODING_INTERVAL, 2);
    OrcConf.setBoolVar(jobConf, OrcConf.ConfVars.HIVE_ORC_BUILD_STRIDE_DICTIONARY, true);
    ReaderWriterProfiler.setProfilerOptions(jobConf);

    return new com.facebook.hive.orc.OrcOutputFormat().getHiveRecordWriter(
            jobConf,
            new Path(outputFile.toURI()),
            Text.class,
            compressionCodec != NONE,
            createTableProperties("test", columnObjectInspector.getTypeName()),
            () -> { }
    );
}
 
开发者ID:y-lan,项目名称:presto,代码行数:21,代码来源:OrcTester.java

示例4: createRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
private static RecordWriter createRecordWriter(Path target, Configuration conf)
        throws IOException
{
    try (ThreadContextClassLoader ignored = new ThreadContextClassLoader(FileSystem.class.getClassLoader())) {
        WriterOptions options = new OrcWriterOptions(conf)
                .memory(new NullMemoryManager(conf))
                .compress(ZLIB);

        try {
            return WRITER_CONSTRUCTOR.newInstance(target, options);
        }
        catch (ReflectiveOperationException e) {
            throw new RuntimeException(e);
        }
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:17,代码来源:TestOrcPageSourceMemoryTracking.java

示例5: getHiveRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getHiveRecordWriter(JobConf jobConf, Path finalOutPath, Class<? extends Writable> valueClass, boolean isCompressed, Properties tableProperties, Progressable progress)
        throws IOException {
    if(LOG.isDebugEnabled()) {
        LOG.debug("jobConf: " + jobConf);
        LOG.debug("tableProperties: " + tableProperties);
    }

    DBRecordWriter dbwriter = (DBRecordWriter) super.getRecordWriter(null, jobConf, null, progress);

    /*
    DBInputFormat.setInput(jobConf, DbRecordWritable.class, inputClass, inputQuery, inputCountQuery);
    DBInputFormat.setInput(jobConf, DbRecordWritable.class, tableName, conditions, orderBy, fieldNames);        
    DBOutputFormat.setOutput(jobConf, tableName, fieldNames);
    */

    return new JdbcRecordWriter(dbwriter);
}
 
开发者ID:myui,项目名称:HiveJdbcStorageHandler,代码行数:19,代码来源:JdbcOutputFormat.java

示例6: createOrcRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
static RecordWriter createOrcRecordWriter(File outputFile, Format format, Compression compression, ObjectInspector columnObjectInspector)
        throws IOException
{
    JobConf jobConf = new JobConf();
    jobConf.set("hive.exec.orc.write.format", format == ORC_12 ? "0.12" : "0.11");
    jobConf.set("hive.exec.orc.default.compress", compression.name());

    return new OrcOutputFormat().getHiveRecordWriter(
            jobConf,
            new Path(outputFile.toURI()),
            Text.class,
            compression != NONE,
            createTableProperties("test", columnObjectInspector.getTypeName()),
            () -> { }
    );
}
 
开发者ID:splicemachine,项目名称:spliceengine,代码行数:17,代码来源:OrcTester.java

示例7: createDwrfRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
private static RecordWriter createDwrfRecordWriter(File outputFile, Compression compressionCodec, ObjectInspector columnObjectInspector)
        throws IOException
{
    JobConf jobConf = new JobConf();
    jobConf.set("hive.exec.orc.default.compress", compressionCodec.name());
    jobConf.set("hive.exec.orc.compress", compressionCodec.name());
    OrcConf.setIntVar(jobConf, OrcConf.ConfVars.HIVE_ORC_ENTROPY_STRING_THRESHOLD, 1);
    OrcConf.setIntVar(jobConf, OrcConf.ConfVars.HIVE_ORC_DICTIONARY_ENCODING_INTERVAL, 2);
    OrcConf.setBoolVar(jobConf, OrcConf.ConfVars.HIVE_ORC_BUILD_STRIDE_DICTIONARY, true);
    return new OrcOutputFormat().getHiveRecordWriter(
            jobConf,
            new Path(outputFile.toURI()),
            Text.class,
            compressionCodec != NONE,
            createTableProperties("test", columnObjectInspector.getTypeName()),
            () -> { }
    );
}
 
开发者ID:splicemachine,项目名称:spliceengine,代码行数:19,代码来源:OrcTester.java

示例8: createEmptyFile

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
@SuppressWarnings({"rawtypes", "unchecked"})
private static Path createEmptyFile(Path hiveScratchDir,
    HiveOutputFormat outFileFormat, JobConf job,
    int sequenceNumber, Properties props, boolean dummyRow)
        throws IOException, InstantiationException, IllegalAccessException {

  // create a dummy empty file in a new directory
  String newDir = hiveScratchDir + Path.SEPARATOR + sequenceNumber;
  Path newPath = new Path(newDir);
  FileSystem fs = newPath.getFileSystem(job);
  fs.mkdirs(newPath);
  //Qualify the path against the file system. The user configured path might contain default port which is skipped
  //in the file status. This makes sure that all paths which goes into PathToPartitionInfo are always listed status
  //file path.
  newPath = fs.makeQualified(newPath);
  String newFile = newDir + Path.SEPARATOR + "emptyFile";
  Path newFilePath = new Path(newFile);

  RecordWriter recWriter = outFileFormat.getHiveRecordWriter(job, newFilePath,
      Text.class, false, props, null);
  if (dummyRow) {
    // empty files are omitted at CombineHiveInputFormat.
    // for meta-data only query, it effectively makes partition columns disappear..
    // this could be fixed by other methods, but this seemed to be the most easy (HIVEV-2955)
    recWriter.write(new Text("empty"));  // written via HiveIgnoreKeyTextOutputFormat
  }
  recWriter.close(false);

  return newPath;
}
 
开发者ID:mini666,项目名称:hive-phoenix-handler,代码行数:31,代码来源:Utilities.java

示例9: getHiveRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getHiveRecordWriter(JobConf jobConf, Path finalOutPath,
                                        Class valueClass, boolean isCompressed, Properties tableProperties,
                                        Progressable progressable) throws IOException {

  final Text text = new Text();
  final FusionRecordWriter writer = new FusionRecordWriter(jobConf, "FusionHiveWriter", progressable);
  LOG.info("Got new FusionRecordWriter for Hive");

  return new RecordWriter() {
    @Override
    public void write(Writable w) throws IOException {
      if (w instanceof LWDocumentWritable) {
        writer.write(text, (LWDocumentWritable) w);
      } else {
        throw new IOException(
            "Expected LWDocumentWritable type, but found "
                + w.getClass().getName());
      }
    }

    @Override
    public void close(boolean abort) throws IOException {
      LOG.info("Closing FusionRecordWriter for Hive");
      writer.close(Reporter.NULL);
    }
  };
}
 
开发者ID:lucidworks,项目名称:hive-solr,代码行数:29,代码来源:FusionHiveOutputFormat.java

示例10: getHiveRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
        Class valueClass, boolean isCompressed, Properties tableProperties,
        Progressable progress) throws IOException {

  final Text text = new Text();
  final LucidWorksWriter writer = new LucidWorksWriter(progress);

  writer.open(jc, "HiveWriter");
  LOG.info("Got new LucidWorksWriter for Hive");

  return new RecordWriter() {
    @Override
    public void write(Writable w) throws IOException {
      if (w instanceof LWDocumentWritable) {
        writer.write(text, (LWDocumentWritable) w);
      } else {
        throw new IOException(
                "Expected LWDocumentWritable type, but found "
                        + w.getClass().getName());
      }
    }

    @Override
    public void close(boolean abort) throws IOException {
      LOG.info("Closing LucidWorksWriter for Hive");
      writer.close();
    }
  };
}
 
开发者ID:lucidworks,项目名称:hive-solr,代码行数:31,代码来源:LWHiveOutputFormat.java

示例11: writeOrcColumn

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
static DataSize writeOrcColumn(File outputFile, Format format, Compression compression, ObjectInspector columnObjectInspector, Iterator<?> values)
        throws Exception
{
    RecordWriter recordWriter;
    if (DWRF == format) {
        recordWriter = createDwrfRecordWriter(outputFile, compression, columnObjectInspector);
    }
    else {
        recordWriter = createOrcRecordWriter(outputFile, format, compression, columnObjectInspector);
    }
    return writeOrcColumn(outputFile, format, recordWriter, columnObjectInspector, values);
}
 
开发者ID:y-lan,项目名称:presto,代码行数:13,代码来源:OrcTester.java

示例12: setDwrfLowMemoryFlag

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
private static void setDwrfLowMemoryFlag(RecordWriter recordWriter)
{
    Object writer = getFieldValue(recordWriter, "writer");
    Object memoryManager = getFieldValue(writer, "memoryManager");
    setFieldValue(memoryManager, "lowMemoryMode", true);
    try {
        writer.getClass().getMethod("enterLowMemoryMode").invoke(writer);
    }
    catch (Exception e) {
        throw Throwables.propagate(e);
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:13,代码来源:OrcTester.java

示例13: createRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
public static RecordWriter createRecordWriter(Path target, JobConf conf, Properties properties, String outputFormatName)
{
    try {
        Object writer = Class.forName(outputFormatName).getConstructor().newInstance();
        boolean isCompressed = HiveConf.getBoolVar(conf, COMPRESSRESULT);
        return ((HiveOutputFormat<?, ?>) writer).getHiveRecordWriter(conf, target, Text.class, isCompressed, properties, Reporter.NULL);
    }
    catch (IOException | ReflectiveOperationException e) {
        throw new PrestoException(HIVE_WRITER_ERROR, e);
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:12,代码来源:HiveWriteUtils.java

示例14: getOrcWriterConstructor

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
private static Constructor<? extends RecordWriter> getOrcWriterConstructor()
{
    try {
        Constructor<? extends RecordWriter> constructor = OrcOutputFormat.class.getClassLoader()
                .loadClass(ORC_RECORD_WRITER)
                .asSubclass(RecordWriter.class)
                .getDeclaredConstructor(Path.class, WriterOptions.class);
        constructor.setAccessible(true);
        return constructor;
    }
    catch (ReflectiveOperationException e) {
        throw Throwables.propagate(e);
    }
}
 
开发者ID:y-lan,项目名称:presto,代码行数:15,代码来源:TestOrcPageSourceMemoryTracking.java

示例15: getHiveRecordWriter

import org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getHiveRecordWriter(JobConf jc, Path finalOutPath,
    Class<? extends Writable> valueClass, boolean isCompressed,
    Properties tableProperties, Progressable progress) throws IOException {

  Base64RecordWriter writer = new Base64RecordWriter(super
      .getHiveRecordWriter(jc, finalOutPath, BytesWritable.class,
      isCompressed, tableProperties, progress));
  writer.configure(jc);
  return writer;
}
 
开发者ID:micmiu,项目名称:bigdata-tutorial,代码行数:12,代码来源:Base64TextOutputFormat.java


注:本文中的org.apache.hadoop.hive.ql.exec.FileSinkOperator.RecordWriter类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。