当前位置: 首页>>代码示例>>Java>>正文


Java FileReader类代码示例

本文整理汇总了Java中org.apache.avro.file.FileReader的典型用法代码示例。如果您正苦于以下问题:Java FileReader类的具体用法?Java FileReader怎么用?Java FileReader使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


FileReader类属于org.apache.avro.file包,在下文中一共展示了FileReader类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testDeserializeToSpecificType

import org.apache.avro.file.FileReader; //导入依赖的package包/类
/**
 * This test validates proper serialization with specific (generated POJO) types.
 */
@Test
public void testDeserializeToSpecificType() throws IOException {

	DatumReader<User> datumReader = new SpecificDatumReader<User>(userSchema);

	try (FileReader<User> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
		User rec = dataFileReader.next();

		// check if record has been read correctly
		assertNotNull(rec);
		assertEquals("name not equal", TEST_NAME, rec.get("name").toString());
		assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString());

		// now serialize it with our framework:
		ExecutionConfig ec = new ExecutionConfig();
		TypeInformation<User> te = TypeExtractor.createTypeInfo(User.class);

		Assert.assertEquals(AvroTypeInfo.class, te.getClass());
		TypeSerializer<User> tser = te.createSerializer(ec);

		ByteArrayOutputStream out = new ByteArrayOutputStream();
		try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) {
			tser.serialize(rec, outView);
		}

		User newRec;
		try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(
				new ByteArrayInputStream(out.toByteArray())))
		{
			newRec = tser.deserialize(inView);
		}

		// check if it is still the same
		assertNotNull(newRec);
		assertEquals("name not equal", TEST_NAME, newRec.getName().toString());
		assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.getTypeEnum().toString());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:42,代码来源:AvroRecordInputFormatTest.java

示例2: testRowLevelPolicy

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Test(groups = {"ignore"})
public void testRowLevelPolicy()
    throws Exception {
  State state = new State();
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "gobblin.qualitychecker.TestRowLevelPolicy");
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "FAIL");

  RowLevelPolicyChecker checker =
      new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
  RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();

  FileReader<GenericRecord> fileReader = openFile(state);

  for (GenericRecord datum : fileReader) {
    Assert.assertTrue(checker.executePolicies(datum, results));
  }
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:18,代码来源:RowLevelQualityCheckerTest.java

示例3: testWriteToErrFile

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Test(groups = {"ignore"})
public void testWriteToErrFile()
    throws Exception {
  State state = new State();
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "gobblin.qualitychecker.TestRowLevelPolicyFail");
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "ERR_FILE");
  state.setProp(ConfigurationKeys.ROW_LEVEL_ERR_FILE, TestConstants.TEST_ERR_FILE);
  state.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, TestConstants.TEST_FS_URI);

  RowLevelPolicyChecker checker =
      new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
  RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();

  FileReader<GenericRecord> fileReader = openFile(state);

  for (GenericRecord datum : fileReader) {
    Assert.assertFalse(checker.executePolicies(datum, results));
  }

  FileSystem fs = FileSystem.get(new URI(TestConstants.TEST_FS_URI), new Configuration());
  Path outputPath = new Path(TestConstants.TEST_ERR_FILE,
      state.getProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST).replaceAll("\\.", "-") + ".err");
  Assert.assertTrue(fs.exists(outputPath));
  fs.delete(new Path(TestConstants.TEST_ERR_FILE), true);
}
 
开发者ID:Hanmourang,项目名称:Gobblin,代码行数:26,代码来源:RowLevelQualityCheckerTest.java

示例4: getSchema

import org.apache.avro.file.FileReader; //导入依赖的package包/类
/**
 *
 * @param file
 * @return
 * @throws IOException
 */
public Schema getSchema(File file) throws IOException {
	Schema schema = null;
	FileReader<IndexedRecord> fileReader = null;
	try {
		DatumReader<IndexedRecord> reader = new GenericDatumReader<>();
		fileReader = DataFileReader.openReader(file, reader);
		schema = fileReader.getSchema();
	}
	finally {
		if (fileReader != null) {
			fileReader.close();
		}
	}
	return schema;
}
 
开发者ID:conversant,项目名称:mara,代码行数:22,代码来源:AvroUnitTestHelper.java

示例5: testRowLevelPolicy

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Test(groups = {"ignore"})
public void testRowLevelPolicy()
    throws Exception {
  State state = new State();
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicy");
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "FAIL");

  RowLevelPolicyChecker checker =
      new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
  RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();

  FileReader<GenericRecord> fileReader = openFile(state);

  for (GenericRecord datum : fileReader) {
    Assert.assertTrue(checker.executePolicies(datum, results));
  }
}
 
开发者ID:apache,项目名称:incubator-gobblin,代码行数:18,代码来源:RowLevelQualityCheckerTest.java

示例6: testWriteToErrFile

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Test(groups = {"ignore"})
public void testWriteToErrFile()
    throws Exception {
  State state = new State();
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST, "org.apache.gobblin.qualitychecker.TestRowLevelPolicyFail");
  state.setProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST_TYPE, "ERR_FILE");
  state.setProp(ConfigurationKeys.ROW_LEVEL_ERR_FILE, TestConstants.TEST_ERR_FILE);
  state.setProp(ConfigurationKeys.WRITER_FILE_SYSTEM_URI, TestConstants.TEST_FS_URI);

  RowLevelPolicyChecker checker =
      new RowLevelPolicyCheckerBuilderFactory().newPolicyCheckerBuilder(state, -1).build();
  RowLevelPolicyCheckResults results = new RowLevelPolicyCheckResults();

  FileReader<GenericRecord> fileReader = openFile(state);

  for (GenericRecord datum : fileReader) {
    Assert.assertFalse(checker.executePolicies(datum, results));
  }

  FileSystem fs = FileSystem.get(new URI(TestConstants.TEST_FS_URI), new Configuration());
  Path outputPath = new Path(TestConstants.TEST_ERR_FILE,
      state.getProp(ConfigurationKeys.ROW_LEVEL_POLICY_LIST).replaceAll("\\.", "-") + ".err");
  Assert.assertTrue(fs.exists(outputPath));
  fs.delete(new Path(TestConstants.TEST_ERR_FILE), true);
}
 
开发者ID:apache,项目名称:incubator-gobblin,代码行数:26,代码来源:RowLevelQualityCheckerTest.java

示例7: readAndCheckResultsFromHdfs

import org.apache.avro.file.FileReader; //导入依赖的package包/类
private void readAndCheckResultsFromHdfs(RecordHeader header, List<TestLogData> testLogs) throws IOException {
  Path logsPath = new Path("/logs" + Path.SEPARATOR + applicationToken + Path.SEPARATOR + logSchemaVersion + Path.SEPARATOR + "data*");
  FileStatus[] statuses = fileSystem.globStatus(logsPath);
  List<TestLogData> resultTestLogs = new ArrayList<>();
  Schema wrapperSchema = RecordWrapperSchemaGenerator.generateRecordWrapperSchema(TestLogData.getClassSchema().toString());
  for (FileStatus status : statuses) {
    FileReader<GenericRecord> fileReader = null;
    try {
      SeekableInput input = new FsInput(status.getPath(), fileSystem.getConf());
      DatumReader<GenericRecord> datumReader = new SpecificDatumReader<>(wrapperSchema);
      fileReader = DataFileReader.openReader(input, datumReader);
      for (GenericRecord record : fileReader) {
        RecordHeader recordHeader = (RecordHeader) record.get(RecordWrapperSchemaGenerator.RECORD_HEADER_FIELD);
        Assert.assertEquals(header, recordHeader);
        TestLogData recordData = (TestLogData) record.get(RecordWrapperSchemaGenerator.RECORD_DATA_FIELD);
        resultTestLogs.add(recordData);
      }
    } finally {
      IOUtils.closeQuietly(fileReader);
    }
  }
  Assert.assertEquals(testLogs, resultTestLogs);
}
 
开发者ID:kaaproject,项目名称:kaa,代码行数:24,代码来源:TestKaaHdfsSink.java

示例8: decodePojo

import org.apache.avro.file.FileReader; //导入依赖的package包/类
private <T> T decodePojo(byte[] encodedData, Class type) {

    log.debug("Reflecting decode of {}, consider registering a converter", type);

    DatumReader<T> userDatumReader = RD.createDatumReader(RD.getSchema(type));

    try {
      FileReader<T> ts = DataFileReader.openReader(new SeekableByteArrayInput(encodedData), userDatumReader);
      T mine = (T) type.newInstance();

      return (T) ts.next(mine);
    } catch (IOException | IllegalAccessException | InstantiationException e) {
      e.printStackTrace();
    }

    return null;
  }
 
开发者ID:muoncore,项目名称:muon-java,代码行数:18,代码来源:AvroCodec.java

示例9: runTweetContainer

import org.apache.avro.file.FileReader; //导入依赖的package包/类
private void runTweetContainer(String morphlineConfigFile, String[] fieldNames) throws Exception {
  File file = new File(RESOURCES_DIR + "/test-documents/sample-statuses-20120906-141433-medium.avro");
  morphline = createMorphline(morphlineConfigFile);    
  for (int j = 0; j < 3; j++) { // also test reuse of objects and low level avro buffers
    Record record = new Record();
    byte[] body = Files.toByteArray(file);    
    record.put(Fields.ATTACHMENT_BODY, body);
    collector.reset();
    startSession();
    Notifications.notifyBeginTransaction(morphline);
    assertTrue(morphline.process(record));
    assertEquals(1, collector.getNumStartEvents());
    assertEquals(2104, collector.getRecords().size());
    
    FileReader<GenericData.Record> reader = new DataFileReader(file, new GenericDatumReader());
    int i = 0;
    while (reader.hasNext()) {
      Record actual = collector.getRecords().get(i);
      GenericData.Record expected = reader.next();
      assertTweetEquals(expected, actual, fieldNames, i);
      i++;
    }    
    assertEquals(collector.getRecords().size(), i);
  }
}
 
开发者ID:cloudera,项目名称:cdk,代码行数:26,代码来源:AvroMorphlineTest.java

示例10: convert

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Override
public FixFile convert(TestRun testRun, FixFile ff) throws IOException {
    byte[] bytes = IOUtils.toByteArray(ff.getContent());
    if (bytes.length == 0) {
        return ff;
    }
    ByteArrayOutputStream os = new ByteArrayOutputStream();
    GenericDatumReader<Object> reader = new GenericDatumReader<>();
    FileReader<Object> fileReader =  DataFileReader.openReader(new SeekableByteArrayInput(bytes), reader);
    try {
        Schema schema = fileReader.getSchema();
        DatumWriter<Object> writer = new GenericDatumWriter<>(schema);
        JsonEncoder encoder = EncoderFactory.get().jsonEncoder(schema, os);

        for (Object datum : fileReader) {
            writer.write(datum, encoder);
        }
        encoder.flush();
    } finally {
        fileReader.close();
    }
    return new FixFile(new ByteArrayInputStream(os.toByteArray()));
}
 
开发者ID:collectivemedia,项目名称:celos,代码行数:24,代码来源:AvroToJsonConverter.java

示例11: getSchema

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Override
public Schema getSchema(Configuration conf, Path path) throws IOException {
  SeekableInput input = new FsInput(path, conf);
  DatumReader<Object> reader = new GenericDatumReader<>();
  FileReader<Object> fileReader = DataFileReader.openReader(input, reader);
  org.apache.avro.Schema schema = fileReader.getSchema();
  fileReader.close();
  return avroData.toConnectSchema(schema);
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:10,代码来源:AvroFileReader.java

示例12: readData

import org.apache.avro.file.FileReader; //导入依赖的package包/类
@Override
public Collection<Object> readData(Configuration conf, Path path) throws IOException {
  ArrayList<Object> collection = new ArrayList<>();
  SeekableInput input = new FsInput(path, conf);
  DatumReader<Object> reader = new GenericDatumReader<>();
  FileReader<Object> fileReader = DataFileReader.openReader(input, reader);
  for (Object object: fileReader) {
    collection.add(object);
  }
  fileReader.close();
  return collection;
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:13,代码来源:AvroFileReader.java

示例13: getAvroSchema

import org.apache.avro.file.FileReader; //导入依赖的package包/类
/**
 * Get the schema of AVRO files stored in a directory
 */
public static Schema getAvroSchema(Path path, Configuration conf)
    throws IOException {
  FileSystem fs = path.getFileSystem(conf);
  Path fileToTest;
  if (fs.isDirectory(path)) {
    FileStatus[] fileStatuses = fs.listStatus(path, new PathFilter() {
      @Override
      public boolean accept(Path p) {
        String name = p.getName();
        return !name.startsWith("_") && !name.startsWith(".");
      }
    });
    if (fileStatuses.length == 0) {
      return null;
    }
    fileToTest = fileStatuses[0].getPath();
  } else {
    fileToTest = path;
  }

  SeekableInput input = new FsInput(fileToTest, conf);
  DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
  FileReader<GenericRecord> fileReader = DataFileReader.openReader(input, reader);

  Schema result = fileReader.getSchema();
  fileReader.close();
  return result;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:32,代码来源:AvroUtil.java

示例14: checkAvroFileForLine

import org.apache.avro.file.FileReader; //导入依赖的package包/类
private boolean checkAvroFileForLine(FileSystem fs, Path p, List<Integer> record)
    throws IOException {
  SeekableInput in = new FsInput(p, new Configuration());
  DatumReader<GenericRecord> datumReader = new GenericDatumReader<GenericRecord>();
  FileReader<GenericRecord> reader = DataFileReader.openReader(in, datumReader);
  reader.sync(0);

  while (reader.hasNext()) {
    if (valueMatches(reader.next(), record)) {
      return true;
    }
  }

  return false;
}
 
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:16,代码来源:TestMerge.java

示例15: testDeserializeToSpecificType

import org.apache.avro.file.FileReader; //导入依赖的package包/类
/**
 * This test validates proper serialization with specific (generated POJO) types.
 */
@Test
public void testDeserializeToSpecificType() throws IOException {

	DatumReader<User> datumReader = new SpecificDatumReader<User>(userSchema);

	try (FileReader<User> dataFileReader = DataFileReader.openReader(testFile, datumReader)) {
		User rec = dataFileReader.next();

		// check if record has been read correctly
		assertNotNull(rec);
		assertEquals("name not equal", TEST_NAME, rec.get("name").toString());
		assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), rec.get("type_enum").toString());

		// now serialize it with our framework:
		ExecutionConfig ec = new ExecutionConfig();
		TypeInformation<User> te = TypeExtractor.createTypeInfo(User.class);

		Assert.assertEquals(AvroTypeInfo.class, te.getClass());
		TypeSerializer<User> tser = te.createSerializer(ec);

		ByteArrayOutputStream out = new ByteArrayOutputStream();
		try (DataOutputViewStreamWrapper outView = new DataOutputViewStreamWrapper(out)) {
			tser.serialize(rec, outView);
		}

		User newRec;
		try (DataInputViewStreamWrapper inView = new DataInputViewStreamWrapper(
				new ByteArrayInputStream(out.toByteArray()))) {
			newRec = tser.deserialize(inView);
		}

		// check if it is still the same
		assertNotNull(newRec);
		assertEquals("name not equal", TEST_NAME, newRec.getName().toString());
		assertEquals("enum not equal", TEST_ENUM_COLOR.toString(), newRec.getTypeEnum().toString());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:41,代码来源:AvroRecordInputFormatTest.java


注:本文中的org.apache.avro.file.FileReader类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。