本文整理汇总了Java中org.apache.orc.OrcFile.createWriter方法的典型用法代码示例。如果您正苦于以下问题:Java OrcFile.createWriter方法的具体用法?Java OrcFile.createWriter怎么用?Java OrcFile.createWriter使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.orc.OrcFile
的用法示例。
在下文中一共展示了OrcFile.createWriter方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: open
import org.apache.orc.OrcFile; //导入方法依赖的package包/类
@Override
public OrcWriter<T> open(Path path) {
if (writerOptions == null) {
writerOptions = OrcFile.writerOptions(configuration);
}
if (compressionKind != null) {
writerOptions.compress(compressionKind);
}
if (bufferSize != 0) {
writerOptions.bufferSize(bufferSize);
}
// Add the schema to the writer options.
TypeDescription schema = getTypeDescription();
writerOptions.setSchema(schema);
try {
writer = OrcFile.createWriter(path, writerOptions);
} catch (IOException e) {
throw new RuntimeException(e);
}
vectorizedRowBatch = schema.createRowBatch(batchSize);
specialCaseSetup();
return this;
}
示例2: JsonORCFileWriter
import org.apache.orc.OrcFile; //导入方法依赖的package包/类
public JsonORCFileWriter(LogFilePath logFilePath, CompressionCodec codec)
throws IOException {
Configuration conf = new Configuration();
Path path = new Path(logFilePath.getLogFilePath());
schema = schemaProvider.getSchema(logFilePath.getTopic(),
logFilePath);
List<TypeDescription> fieldTypes = schema.getChildren();
converters = new JsonConverter[fieldTypes.size()];
for (int c = 0; c < converters.length; ++c) {
converters[c] = VectorColumnFiller.createConverter(fieldTypes
.get(c));
}
writer = OrcFile.createWriter(path, OrcFile.writerOptions(conf)
.compress(resolveCompression(codec)).setSchema(schema));
batch = schema.createRowBatch();
}
示例3: PentahoOrcRecordWriter
import org.apache.orc.OrcFile; //导入方法依赖的package包/类
public PentahoOrcRecordWriter( SchemaDescription schemaDescription, TypeDescription schema, String filePath,
Configuration conf ) {
this.schemaDescription = schemaDescription;
this.schema = schema;
final AtomicInteger fieldNumber = new AtomicInteger(); //Mutable field count
schemaDescription.forEach( field -> setOutputMeta( fieldNumber, field ) );
outputRowMetaAndData = new RowMetaAndData( outputRowMeta, new Object[ fieldNumber.get() ] );
try {
writer = OrcFile.createWriter( new Path( filePath ),
OrcFile.writerOptions( conf )
.setSchema( schema ) );
batch = schema.createRowBatch();
} catch ( IOException e ) {
logger.error( e );
}
//Write the addition metadata for the fields
new OrcMetaDataWriter( writer ).write( schemaDescription );
}
示例4: initialize
import org.apache.orc.OrcFile; //导入方法依赖的package包/类
@Override
public void initialize(Map<String, Object> metaData) {
try {
Configuration conf = new Configuration();
// conf.set(OrcConf.BLOOM_FILTER_COLUMNS.getAttribute(), "tags");
processor = new OrcEntityProcessor(OrcFile.createWriter(new Path(filename),
OrcFile.writerOptions(conf).setSchema(SCHEMA)), SCHEMA.createRowBatch());
} catch (IOException e) {
throw new OsmosisRuntimeException(e);
}
}
示例5: flush
import org.apache.orc.OrcFile; //导入方法依赖的package包/类
private boolean flush(BufferSegment segment, String path, TypeDescription schema)
{
Configuration conf = new Configuration();
try {
Writer writer = OrcFile.createWriter(new Path(path),
OrcFile.writerOptions(conf)
.setSchema(schema)
.stripeSize(orcFileStripeSize)
.bufferSize(orcFileBufferSize)
.blockSize(orcFileBlockSize)
.compress(CompressionKind.ZLIB)
.version(OrcFile.Version.V_0_12));
VectorizedRowBatch batch = schema.createRowBatch();
while (segment.hasNext()) {
String[] contents = segment.getNext();
int rowCount = batch.size++;
// System.out.println("contents : message.getValues() : " + Arrays.toString(contents));
System.out.println("contents.length : " + contents.length);
for (int i = 0; i < contents.length; i++) {
((BytesColumnVector) batch.cols[i]).setVal(rowCount, contents[i].getBytes());
//batch full
if (batch.size == batch.getMaxSize()) {
writer.addRowBatch(batch);
batch.reset();
}
}
if (batch.size != 0) {
writer.addRowBatch(batch);
batch.reset();
}
writer.close();
segment.setFilePath(path);
System.out.println("path : " + path);
}
return true;
}
catch (IOException e) {
e.printStackTrace();
return false;
}
}