本文整理汇总了Java中org.apache.drill.exec.store.RecordWriter类的典型用法代码示例。如果您正苦于以下问题:Java RecordWriter类的具体用法?Java RecordWriter怎么用?Java RecordWriter使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
RecordWriter类属于org.apache.drill.exec.store包,在下文中一共展示了RecordWriter类的13个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put("separator", " ");
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put("extension", "json");
options.put("extended", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_EXTENDED_TYPES)));
RecordWriter recordWriter = new JsonRecordWriter();
recordWriter.init(options);
return recordWriter;
}
示例2: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put("separator", ((ModelFormatConfig)getConfig()).getDelimiter());
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put("extension", ((ModelFormatConfig)getConfig()).getExtensions().get(0));
RecordWriter recordWriter = new DrillModelWriter(/*context.getAllocator()*/);
recordWriter.init(options);
return recordWriter;
}
示例3: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(final FragmentContext context, final EasyWriter writer) throws IOException {
final Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put("separator", ((TextFormatConfig)getConfig()).getFieldDelimiterAsString());
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put("extension", ((TextFormatConfig)getConfig()).getExtensions().get(0));
RecordWriter recordWriter = new DrillTextRecordWriter(context.getAllocator());
recordWriter.init(options);
return recordWriter;
}
示例4: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
public RecordWriter getRecordWriter(FragmentContext context, ParquetWriter writer) throws IOException, OutOfMemoryException {
Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put(ExecConstants.PARQUET_BLOCK_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_BLOCK_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_PAGE_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_PAGE_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_DICT_PAGE_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_DICT_PAGE_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE,
context.getOptions().getOption(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE).string_val);
options.put(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING,
context.getOptions().getOption(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING).bool_val.toString());
RecordWriter recordWriter = new ParquetRecordWriter(context, writer);
recordWriter.init(options);
return recordWriter;
}
示例5: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put("separator", " ");
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put("extension", "json");
options.put("extended", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_EXTENDED_TYPES)));
options.put("uglify", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_WRITER_UGLIFY)));
options.put("skipnulls", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_WRITER_SKIPNULLFIELDS)));
options.put("enableNanInf", Boolean.toString(context.getOptions().getOption(ExecConstants.JSON_WRITER_NAN_INF_NUMBERS_VALIDATOR)));
RecordWriter recordWriter = new JsonRecordWriter(writer.getStorageStrategy());
recordWriter.init(options);
return recordWriter;
}
示例6: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(final FragmentContext context, final EasyWriter writer) throws IOException {
final Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put("separator", ((TextFormatConfig)getConfig()).getFieldDelimiterAsString());
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put("extension", ((TextFormatConfig)getConfig()).getExtensions().get(0));
RecordWriter recordWriter = new DrillTextRecordWriter(context.getAllocator(), writer.getStorageStrategy());
recordWriter.init(options);
return recordWriter;
}
示例7: WriterRecordBatch
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
public WriterRecordBatch(Writer writer, RecordBatch incoming, FragmentContext context, RecordWriter recordWriter) throws OutOfMemoryException {
super(writer, context, false);
this.incoming = incoming;
final FragmentHandle handle = context.getHandle();
fragmentUniqueId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
this.recordWriter = recordWriter;
}
示例8: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
public RecordWriter getRecordWriter(FragmentContext context, ParquetWriter writer) throws IOException, OutOfMemoryException {
Map<String, String> options = Maps.newHashMap();
options.put("location", writer.getLocation());
FragmentHandle handle = context.getHandle();
String fragmentId = String.format("%d_%d", handle.getMajorFragmentId(), handle.getMinorFragmentId());
options.put("prefix", fragmentId);
options.put(FileSystem.FS_DEFAULT_NAME_KEY, ((FileSystemConfig)writer.getStorageConfig()).connection);
options.put(ExecConstants.PARQUET_BLOCK_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_BLOCK_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_WRITER_USE_SINGLE_FS_BLOCK,
context.getOptions().getOption(ExecConstants.PARQUET_WRITER_USE_SINGLE_FS_BLOCK).bool_val.toString());
options.put(ExecConstants.PARQUET_PAGE_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_PAGE_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_DICT_PAGE_SIZE, context.getOptions().getOption(ExecConstants.PARQUET_DICT_PAGE_SIZE).num_val.toString());
options.put(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE,
context.getOptions().getOption(ExecConstants.PARQUET_WRITER_COMPRESSION_TYPE).string_val);
options.put(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING,
context.getOptions().getOption(ExecConstants.PARQUET_WRITER_ENABLE_DICTIONARY_ENCODING).bool_val.toString());
RecordWriter recordWriter = new ParquetRecordWriter(context, writer);
recordWriter.init(options);
return recordWriter;
}
示例9: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
throw new UnsupportedOperationException("unimplemented");
}
示例10: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
return null;
}
示例11: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(final FragmentContext context, final EasyWriter writer) throws IOException {
throw new UnsupportedOperationException("Drill doesn't currently support writing HTTPd logs");
}
示例12: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
@Override
public RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException {
throw new UnsupportedOperationException();
}
示例13: getRecordWriter
import org.apache.drill.exec.store.RecordWriter; //导入依赖的package包/类
public abstract RecordWriter getRecordWriter(FragmentContext context, EasyWriter writer) throws IOException;