当前位置: 首页>>代码示例>>Java>>正文


Java DataFileWriter.flush方法代码示例

本文整理汇总了Java中org.apache.avro.file.DataFileWriter.flush方法的典型用法代码示例。如果您正苦于以下问题:Java DataFileWriter.flush方法的具体用法?Java DataFileWriter.flush怎么用?Java DataFileWriter.flush使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.avro.file.DataFileWriter的用法示例。


在下文中一共展示了DataFileWriter.flush方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: putRecords

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
public static byte[] putRecords(Collection<SinkRecord> records, AvroData avroData) throws IOException {
  final DataFileWriter<Object> writer = new DataFileWriter<>(new GenericDatumWriter<>());
  ByteArrayOutputStream out = new ByteArrayOutputStream();
  Schema schema = null;
  for (SinkRecord record : records) {
    if (schema == null) {
      schema = record.valueSchema();
      org.apache.avro.Schema avroSchema = avroData.fromConnectSchema(schema);
      writer.create(avroSchema, out);
    }
    Object value = avroData.fromConnectData(schema, record.value());
    // AvroData wraps primitive types so their schema can be included. We need to unwrap
    // NonRecordContainers to just their value to properly handle these types
    if (value instanceof NonRecordContainer) {
      value = ((NonRecordContainer) value).getValue();
    }
    writer.append(value);
  }
  writer.flush();
  return out.toByteArray();
}
 
开发者ID:confluentinc,项目名称:kafka-connect-storage-cloud,代码行数:22,代码来源:AvroUtils.java

示例2: serialise

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
@Override
public byte[] serialise(final Object object) throws SerialisationException {
    Schema schema = ReflectData.get().getSchema(object.getClass());
    DatumWriter<Object> datumWriter = new ReflectDatumWriter<>(schema);
    DataFileWriter<Object> dataFileWriter = new DataFileWriter<>(datumWriter);
    ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
    try {
        dataFileWriter.create(schema, byteOut);
        dataFileWriter.append(object);
        dataFileWriter.flush();
    } catch (final IOException e) {
        throw new SerialisationException("Unable to serialise given object of class: " + object.getClass().getName(), e);
    } finally {
        close(dataFileWriter);
    }
    return byteOut.toByteArray();
}
 
开发者ID:gchq,项目名称:Gaffer,代码行数:18,代码来源:AvroSerialiser.java

示例3: testGenerateAvro3

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
@Test
public void testGenerateAvro3() {
	try {
		Parser parser = new Schema.Parser();
		Schema peopleSchema = parser.parse(new File(getTestResource("people.avsc").toURI()));
		GenericDatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<GenericRecord>(peopleSchema);
		DataFileWriter<GenericRecord> dfw = new DataFileWriter<GenericRecord>(datumWriter);
		File tempfile = File.createTempFile("karma-people", "avro");
		
		tempfile.deleteOnExit();
		dfw.create(peopleSchema, new FileOutputStream(tempfile));
		JSONArray array = new JSONArray(IOUtils.toString(new FileInputStream(new File(getTestResource("people.json").toURI()))));
		for(int i = 0; i < array.length(); i++)
		{
			dfw.append(generatePersonRecord(peopleSchema, array.getJSONObject(i)));
		}
		dfw.flush();
		dfw.close();
	} catch (Exception e) {
		logger.error("testGenerateAvro3 failed:", e);
		fail("Execption: " + e.getMessage());
	}
}
 
开发者ID:therelaxist,项目名称:spring-usc,代码行数:24,代码来源:TestAvroRDFGenerator.java

示例4: createFileIfNotExists

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
public static void createFileIfNotExists(BlockSchema fileSchema, String path) throws IOException
{
    Configuration conf = new JobConf();
    FileSystem fs = FileSystem.get(conf);
    if (fs.exists(new Path(path)))
        return;

    Schema avroSchema = convertFromBlockSchema("CUBERT_MV_RECORD", fileSchema);
    System.out.println("Creating avro file with schema = " + avroSchema);
    GenericDatumWriter<GenericRecord> datumWriter =
            new GenericDatumWriter<GenericRecord>(avroSchema);
    DataFileWriter<GenericRecord> writer =
            new DataFileWriter<GenericRecord>(datumWriter);

    FSDataOutputStream fout =
            FileSystem.create(fs,
                              new Path(path),
                              new FsPermission(FsAction.ALL,
                                               FsAction.READ_EXECUTE,
                                               FsAction.READ_EXECUTE));
    writer.create(avroSchema, fout);
    writer.flush();
    writer.close();

}
 
开发者ID:linkedin,项目名称:Cubert,代码行数:26,代码来源:AvroUtils.java

示例5: serializeToByte

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
public static byte[] serializeToByte(Object testObject) throws IOException {
	Schema schema = ReflectData.AllowNull.get().getSchema(
			testObject.getClass());
	ByteArrayOutputStream baos = new ByteArrayOutputStream();
	DatumWriter<Object> writer = new ReflectDatumWriter<Object>(
			Object.class);
	dataFileWriter = new DataFileWriter<Object>(writer);
	DataFileWriter<Object> out = dataFileWriter.setCodec(
			CodecFactory.deflateCodec(9)).create(schema, baos);
	out.append(testObject);
	out.flush();
	return baos.toByteArray();
}
 
开发者ID:QualiMaster,项目名称:Infrastructure,代码行数:14,代码来源:AvroSerializationHelper.java

示例6: testIncorrectOffset

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
@Test
public void testIncorrectOffset() throws Exception {
  File avroDataFile = SdcAvroTestUtil.createAvroDataFile();
  avroDataFile.delete();
  Schema schema = new Schema.Parser().parse(AVRO_SCHEMA);
  DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
  DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
  dataFileWriter.create(schema, avroDataFile);
  for (int i = 0; i < 5; i++) {
    GenericRecord r = new GenericData.Record(schema);
    r.put("name", NAMES[i % NAMES.length]);
    r.put("id", i);
    dataFileWriter.setSyncInterval(1073741824);
    dataFileWriter.append(r);
    dataFileWriter.sync();
  }
  dataFileWriter.flush();
  dataFileWriter.close();
  DataParserFactoryBuilder dataParserFactoryBuilder = new DataParserFactoryBuilder(getContext(),
    DataParserFormat.AVRO);
  DataParserFactory factory = dataParserFactoryBuilder
      .setMaxDataLen(1024 * 1024)
      .setOverRunLimit(1000 * 1000)
      .setConfig(SCHEMA_SOURCE_KEY, SOURCE)
      .build();
  DataParser dataParser = factory.getParser(avroDataFile, null);
  Map<String, Record> records = new HashMap<>();
  Record record;
  while((record = dataParser.parse()) != null) {
    records.put(dataParser.getOffset(), record);
  }
  Assert.assertEquals(String.valueOf(records), 5, records.size());
  Assert.assertEquals(0, records.get("141::1").get("/id").getValueAsInteger());
  Assert.assertEquals(1, records.get("166::1").get("/id").getValueAsInteger());
  Assert.assertEquals(2, records.get("190::1").get("/id").getValueAsInteger());
  Assert.assertEquals(3, records.get("215::1").get("/id").getValueAsInteger());
  Assert.assertEquals(4, records.get("239::1").get("/id").getValueAsInteger());
}
 
开发者ID:streamsets,项目名称:datacollector,代码行数:39,代码来源:TestAvroDataFileParser.java

示例7: createAvroDataFile

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
public static File createAvroDataFile() throws Exception {
  File f = new File(createTestDir(), "file-0.avro");
  Schema schema = new Schema.Parser().parse(AVRO_SCHEMA);
  GenericRecord boss = new GenericData.Record(schema);
  boss.put("name", "boss");
  boss.put("age", 60);
  boss.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  boss.put("boss", null);

  GenericRecord e3 = new GenericData.Record(schema);
  e3.put("name", "c");
  e3.put("age", 50);
  e3.put("emails", ImmutableList.of("[email protected]", "[email protected]company.com"));
  e3.put("boss", boss);

  GenericRecord e2 = new GenericData.Record(schema);
  e2.put("name", "b");
  e2.put("age", 40);
  e2.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  e2.put("boss", boss);

  GenericRecord e1 = new GenericData.Record(schema);
  e1.put("name", "a");
  e1.put("age", 30);
  e1.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  e1.put("boss", boss);

  DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
  DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter);
  dataFileWriter.create(schema, f);
  dataFileWriter.append(e1);
  dataFileWriter.append(e2);
  dataFileWriter.append(e3);

  dataFileWriter.flush();
  dataFileWriter.close();

  return f;
}
 
开发者ID:streamsets,项目名称:datacollector,代码行数:40,代码来源:SdcAvroTestUtil.java

示例8: createAvroDataFile

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
private File createAvroDataFile() throws Exception {
  File f = new File(createTestDir(), "file-0.avro");
  Schema schema = new Schema.Parser().parse(AVRO_SCHEMA);
  GenericRecord boss = new GenericData.Record(schema);
  boss.put("name", "boss");
  boss.put("age", 60);
  boss.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  boss.put("boss", null);

  GenericRecord e3 = new GenericData.Record(schema);
  e3.put("name", "c");
  e3.put("age", 50);
  e3.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  e3.put("boss", boss);

  GenericRecord e2 = new GenericData.Record(schema);
  e2.put("name", "b");
  e2.put("age", 40);
  e2.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  e2.put("boss", boss);

  GenericRecord e1 = new GenericData.Record(schema);
  e1.put("name", "a");
  e1.put("age", 30);
  e1.put("emails", ImmutableList.of("[email protected]", "[email protected]"));
  e1.put("boss", boss);

  DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema);
  DataFileWriter<GenericRecord>dataFileWriter = new DataFileWriter<>(datumWriter);
  dataFileWriter.create(schema, f);
  dataFileWriter.append(e1);
  dataFileWriter.append(e2);
  dataFileWriter.append(e3);

  dataFileWriter.flush();
  dataFileWriter.close();

  return f;
}
 
开发者ID:streamsets,项目名称:datacollector,代码行数:40,代码来源:TestAvroSpoolDirSource.java

示例9: main

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
public static void main(String[] args) throws IOException {

        AvscSchemaBuilder asb = new AvscSchemaBuilder(Metric.class);
        final DataFileWriter<Metric2> dataFileWriter = new DataFileWriter<Metric2>(new SpecificDatumWriter<Metric2>(asb.createSchema()));
        dataFileWriter.create(asb.createSchema(), new File("v1.avro"));
        Path p = Paths.get("v1.log");
        Files.readLines(p.toFile(), Charset.defaultCharset(), new LineProcessor<String>() {
            @Override
            public boolean processLine(String line) throws IOException {
                String[] vals = line.split(" ");
                Metric2 mc =
                        Metric2.builder().dataVersion(Integer.valueOf(vals[0])).salt(Integer.valueOf(vals[1])).applicationId(Long.valueOf(vals[2]))
                                .timeScope(Integer.valueOf(vals[3])).metricTypeId(Long.valueOf(vals[4])).metricId(Long.valueOf(vals[5])).time(Integer.valueOf(vals[6]))
                                .agentRunId(Long.valueOf(vals[7])).uuid(vals[8]).num1(Double.valueOf(vals[9])).num2(Double.valueOf(vals[10])).num3(Double.valueOf(vals[11]))
                                .num4(Double.valueOf(vals[12])).num5(Double.valueOf(vals[13])).num6(Double.valueOf(vals[14])).timestamp(Long.valueOf(vals[15])).build();
                dataFileWriter.append(mc);
                return true;
            }

            @Override
            public String getResult() {
                return null;
            }
        });
        dataFileWriter.flush();
        dataFileWriter.close();
    }
 
开发者ID:peiliping,项目名称:excalibur,代码行数:28,代码来源:AppTest.java

示例10: main

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
	// generate only avro file
	if (args.length == 2) {
		ordersPath = args[0];
		outputOrderAvroPath = args[1];
		// Generate file for avro test
		DatumWriter<Order> orderDatumWriter = new SpecificDatumWriter<Order>(Order.class);
		DataFileWriter<Order> dataFileWriter = new DataFileWriter<Order>(orderDatumWriter);
		dataFileWriter.create(Order.getClassSchema(), new File(outputOrderAvroPath));
		Scanner s = new Scanner(new File(ordersPath));
		while (s.hasNextLine()) {
			@SuppressWarnings("resource")
			Scanner lineScanner = new Scanner(s.nextLine()).useDelimiter("\\|");

			Order o = new Order();
			o.setOOrderkey(lineScanner.nextInt());
			o.setOCustkey(lineScanner.nextInt());
			o.setOOrderstatus(lineScanner.next());
			o.setOTotalprice(lineScanner.nextFloat());
			o.setOOrderdate(lineScanner.next());
			o.setOOrderpriority(lineScanner.next());
			o.setOClerk(lineScanner.next());
			o.setOShipproprity(lineScanner.nextInt());
			o.setOComment(lineScanner.next());
			dataFileWriter.append(o);
			lineScanner.close();
		}
		dataFileWriter.flush();
		s.close();
		dataFileWriter.close();
		return;
	} else {
		System.err.println("Usage: <inputFilePath> <outputAvroPath>");
		System.exit(1);
	}
}
 
开发者ID:project-flink,项目名称:flink-perf,代码行数:37,代码来源:AvroFileGenerator.java

示例11: convert

import org.apache.avro.file.DataFileWriter; //导入方法依赖的package包/类
@Override
public FixFile convert(TestRun tr, FixFile ff) throws Exception {
    Schema schema = new Schema.Parser().parse(schemaCreator.create(tr).getContent());
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    InputStream input = ff.getContent();
    DataFileWriter<Object> writer;;
    try {
        DatumReader<Object> reader = new GenericDatumReader<>(schema);
        DataInputStream din = new DataInputStream(input);
        writer = new DataFileWriter<>(new GenericDatumWriter<>());
        writer.create(schema, baos);
        Decoder decoder = DecoderFactory.get().jsonDecoder(schema, din);
        Object datum;
        while (true) {
            try {
                datum = reader.read(null, decoder);
            } catch (EOFException eofe) {
                break;
            }
            writer.append(datum);
        }
        writer.flush();
    } finally {
        input.close();
    }
    return new FixFile(new ByteArrayInputStream(baos.toByteArray()));
}
 
开发者ID:collectivemedia,项目名称:celos,代码行数:28,代码来源:JsonToAvroConverter.java


注:本文中的org.apache.avro.file.DataFileWriter.flush方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。