本文整理汇总了Java中org.apache.avro.file.DataFileReader.close方法的典型用法代码示例。如果您正苦于以下问题:Java DataFileReader.close方法的具体用法?Java DataFileReader.close怎么用?Java DataFileReader.close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.avro.file.DataFileReader
的用法示例。
在下文中一共展示了DataFileReader.close方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readAvroFile
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
/**
* Reads in binary Avro-encoded entities using the schema stored in the file
* and prints them out.
*/
public static void readAvroFile(File file) throws IOException {
GenericDatumReader datum = new GenericDatumReader();
DataFileReader reader = new DataFileReader(file, datum);
GenericData.Record record = new GenericData.Record(reader.getSchema());
while (reader.hasNext()) {
reader.next(record);
System.out.println("Name " + record.get("name") + " on "
+ record.get("Meetup_date") + " attending "
+ record.get("going") + " organized by "
+ record.get("organizer") + " on " + record.get("topics"));
}
reader.close();
}
示例2: MemberInfoDynDeser
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
/**
* 动态反序列:通过Schema文件进行动态反序列化操作
*
* @throws IOException
*/
public void MemberInfoDynDeser() throws IOException {
// 1.schema文件解析
Parser parser = new Parser();
Schema mSchema = parser.parse(this.getClass().getResourceAsStream("/Members.avsc"));
// 2.构建数据读对象
DatumReader<GenericRecord> mGr = new SpecificDatumReader<GenericRecord>(mSchema);
DataFileReader<GenericRecord> mDfr = new DataFileReader<GenericRecord>(new File("/Users/a/Desktop/tmp/members.avro"), mGr);
// 3.从序列化文件中进行数据反序列化取出数据
GenericRecord gr = null;
while (mDfr.hasNext()) {
gr = mDfr.next();
System.err.println("deser data:" + gr.toString());
}
mDfr.close();
System.out.println("Dyn Builder Ser Start Complete.");
}
示例3: readWithDifferentSchema
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
/**
* Reads in binary Avro-encoded entities using a schema that is different
* from the writer's schema.
*
*/
public static void readWithDifferentSchema(File file, Schema newSchema)
throws IOException {
GenericDatumReader datum = new GenericDatumReader(newSchema);
DataFileReader reader = new DataFileReader(file, datum);
GenericData.Record record = new GenericData.Record(newSchema);
while (reader.hasNext()) {
reader.next(record);
System.out.println("Name " + record.get("name") + " on "
+ record.get("Meetup_date") + " attending "
+ record.get("attendance") + " organized by "
+ record.get("organizer")
+ " at " + record.get("location"));
}
reader.close();
}
示例4: getSchema
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
@Override
public DatasetJsonRecord getSchema(Path targetFilePath)
throws IOException {
System.out.println("avro file path : " + targetFilePath.toUri().getPath());
SeekableInput sin = new FsInput(targetFilePath, fs.getConf());
DataFileReader<GenericRecord> reader =
new DataFileReader<GenericRecord>(sin, new GenericDatumReader<GenericRecord>());
String codec = reader.getMetaString("avro.codec");
long record_count = reader.getBlockCount();
String schemaString = reader.getSchema().toString();
String storage = STORAGE_TYPE;
String abstractPath = targetFilePath.toUri().getPath();
FileStatus fstat = fs.getFileStatus(targetFilePath);
DatasetJsonRecord datasetJsonRecord =
new DatasetJsonRecord(schemaString, abstractPath, fstat.getModificationTime(), fstat.getOwner(), fstat.getGroup(),
fstat.getPermission().toString(), codec, storage, "");
reader.close();
sin.close();
return datasetJsonRecord;
}
示例5: deserializeFromByte
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
public static Object deserializeFromByte(byte[] byteArray)
throws IOException {
SeekableByteArrayInput sin = new SeekableByteArrayInput(byteArray);
System.out.println("length of read input stream " + sin.length());
DatumReader<?> reader2 = new ReflectDatumReader<>();
DataFileReader<?> in = new DataFileReader<>(sin, reader2);
System.out.println(in.getSchema());
System.out.println(in.hasNext());
Object returnObject = null;
System.out.println(in.getSchema().getFullName() );
while (in.hasNext()) {
returnObject = in.next();
}
in.close();
return returnObject;
}
示例6: extractRecords
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
/**
* @param file
* where to read from
* @param schema
* Avro records structure
* @return all values from the file
* @throws IOException
*/
public static <T extends SpecificRecord> List<T> extractRecords(File file, Schema schema) throws IOException {
DatumReader<T> reader = new SpecificDatumReader<>(schema);
DataFileReader<T> fileReader = new DataFileReader<>(file, reader);
List<T> data = new ArrayList<T>();
try {
while (fileReader.hasNext()) {
data.add(fileReader.next());
}
} finally {
if(fileReader != null) {
fileReader.close();
}
}
return data;
}
示例7: getNextNonemptyReader
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
private DataFileReader<T> getNextNonemptyReader() throws IOException {
while (fileIterator != null && fileIterator.hasNext()) {
LocatedFileStatus currentFileStatus = fileIterator.next();
if (isValidFile(currentFileStatus)) {
FileSystemPath currPath = new FileSystemPath(
path.getFileSystem(), currentFileStatus.getPath());
DataFileReader<T> reader =
getSingleFileReader(currPath, readerSchema);
/** Check if the file contains at least one record */
if(reader.hasNext()){
return reader;
} else {
reader.close();
}
}
}
/** fallback */
return null;
}
示例8: loadMembersToPurge
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
private void loadMembersToPurge(String filename) throws IOException
{
// TODO: "memberId" column name should be configurable
DataFileReader<GenericRecord> dataFileReader =
createDataFileReader(filename, true);
while (dataFileReader.hasNext())
{
GenericRecord record = dataFileReader.next();
Integer memberId = (Integer) record.get("memberId");
if (memberId == null)
{
throw new NullPointerException("memberId is null");
}
membersToPurge.add(((Number) record.get("memberId")).intValue());
}
dataFileReader.close();
}
示例9: testConversion
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
private void testConversion(RestEntry<JsonObject> expected, WorkUnitState actualWorkUnitState) throws DataConversionException, IOException, JSONException {
Schema schema = new Schema.Parser().parse(getClass().getResourceAsStream("/converter/nested.avsc"));
GenericDatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>(schema);
File tmp = File.createTempFile(this.getClass().getSimpleName(), null);
tmp.deleteOnExit();
try {
FileUtils.copyInputStreamToFile(getClass().getResourceAsStream("/converter/nested.avro"), tmp);
DataFileReader<GenericRecord> dataFileReader = new DataFileReader<GenericRecord>(tmp, datumReader);
GenericRecord avroRecord = dataFileReader.next();
AvroToRestJsonEntryConverter converter = new AvroToRestJsonEntryConverter();
RestEntry<JsonObject> actual = converter.convertRecord(null, avroRecord, actualWorkUnitState).iterator().next();
Assert.assertEquals(actual.getResourcePath(), expected.getResourcePath());
JSONAssert.assertEquals(expected.getRestEntryVal().toString(), actual.getRestEntryVal().toString(), false);
converter.close();
dataFileReader.close();
} finally {
if (tmp != null) {
tmp.delete();
}
}
}
示例10: runOnPreview
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
static <T> T runOnPreview(byte[] bits, AvroPreviewProcessor<T> processor) throws IOException {
DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>();
SeekableByteArrayInput sbai = new SeekableByteArrayInput(bits);
DataFileReader<GenericRecord> dataFileReader = null;
try {
dataFileReader = new DataFileReader<>(sbai, datumReader);
int headerLen = (int) dataFileReader.previousSync();
byte[] header = Arrays.copyOf(bits, headerLen);
if (dataFileReader.hasNext()) {
GenericRecord gr = dataFileReader.next();
return processor.process(header, gr, dataFileReader.getBlockCount(), dataFileReader.getBlockSize());
} else {
throw new RuntimeException("Empty Avro file - cannot run preview! ");
}
} finally {
try { if (dataFileReader!=null) dataFileReader.close(); } catch (IOException safeToIgnore) {}
}
}
示例11: validateAvroFile
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
public void validateAvroFile(File file) throws IOException {
// read the events back using GenericRecord
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> fileReader =
new DataFileReader<GenericRecord>(file, reader);
GenericRecord record = new GenericData.Record(fileReader.getSchema());
int numEvents = 0;
while (fileReader.hasNext()) {
fileReader.next(record);
String bodyStr = record.get("message").toString();
System.out.println(bodyStr);
numEvents++;
}
fileReader.close();
Assert.assertEquals("Should have found a total of 3 events", 3, numEvents);
}
示例12: writeAvroFilesToStdout
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
public List<Object> writeAvroFilesToStdout(Job job, Path outputPath) throws IOException {
List<Object> records = new ArrayList<Object>();
// Check that the results from the MapReduce were as expected.
FileSystem fileSystem = FileSystem.get(job.getConfiguration());
FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
Assert.assertEquals(1, outputFiles.length);
DataFileReader<Object> reader = new DataFileReader<Object>(
new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
new ReflectDatumReader<Object>());
for (Object record : reader) {
records.add(record);
System.out.println(record);
}
reader.close();
return records;
}
示例13: assertOutputResults
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
public void assertOutputResults(Job job, Path outputPath, Object[] expectedOutputs) throws IOException {
List<Object> records = new ArrayList<Object>();
// Check that the results from the MapReduce were as expected.
FileSystem fileSystem = FileSystem.get(job.getConfiguration());
FileStatus[] outputFiles = fileSystem.globStatus(outputPath.suffix("/part-*"));
Assert.assertEquals(1, outputFiles.length);
DataFileReader<Object> reader = new DataFileReader<Object>(
new FsInput(outputFiles[0].getPath(), job.getConfiguration()),
new ReflectDatumReader<Object>());
for (Object record : reader) {
records.add(record);
System.out.println(record);
}
reader.close();
assertArrayEquals(expectedOutputs, records.toArray());
}
示例14: getSchema
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
@Override
public DatasetJsonRecord getSchema(Path targetFilePath)
throws IOException {
LOG.info("avro file path : " + targetFilePath.toUri().getPath());
try {
SeekableInput sin = new FsInput(targetFilePath, fs.getConf());
DataFileReader<GenericRecord> reader =
new DataFileReader<GenericRecord>(sin, new GenericDatumReader<GenericRecord>());
String codec = reader.getMetaString("avro.codec");
long record_count = reader.getBlockCount();
String schemaString = reader.getSchema().toString();
String storage = STORAGE_TYPE;
String abstractPath = targetFilePath.toUri().getPath();
System.out.println("the schema string is: " + schemaString);
System.out.println("the abstract path is: " + abstractPath);
FileStatus fstat = fs.getFileStatus(targetFilePath);
DatasetJsonRecord datasetJsonRecord =
new DatasetJsonRecord(schemaString, abstractPath, fstat.getModificationTime(), fstat.getOwner(), fstat.getGroup(),
fstat.getPermission().toString(), codec, storage, "");
reader.close();
sin.close();
LOG.info("Avro file datasetjsonrecorc get success, it is : " + datasetJsonRecord);
return datasetJsonRecord;
} catch (Exception e) {
LOG.info("AvroAnalyzer get datasetjson failure, and exception is " + e.getMessage());
return null;
}
}
示例15: validateAvroFile
import org.apache.avro.file.DataFileReader; //导入方法依赖的package包/类
public void validateAvroFile(File file) throws IOException {
// read the events back using GenericRecord
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> fileReader =
new DataFileReader<GenericRecord>(file, reader);
GenericRecord record = new GenericData.Record(fileReader.getSchema());
int numEvents = 0;
while (fileReader.hasNext()) {
fileReader.next(record);
ByteBuffer body = (ByteBuffer) record.get("body");
CharsetDecoder decoder = Charsets.UTF_8.newDecoder();
String bodyStr = decoder.decode(body).toString();
System.out.println(bodyStr);
numEvents++;
}
fileReader.close();
Assert.assertEquals("Should have found a total of 3 events", 3, numEvents);
}