本文整理汇总了Java中org.apache.flume.serialization.EventSerializer.write方法的典型用法代码示例。如果您正苦于以下问题:Java EventSerializer.write方法的具体用法?Java EventSerializer.write怎么用?Java EventSerializer.write使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flume.serialization.EventSerializer
的用法示例。
在下文中一共展示了EventSerializer.write方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testWithNewline
import org.apache.flume.serialization.EventSerializer; //导入方法依赖的package包/类
@Test
public void testWithNewline() throws FileNotFoundException, IOException {
Map<String, String> headers = new HashMap<String, String>();
headers.put("message", "message1");
OutputStream out = new FileOutputStream(testFile);
CustomLastfmHeaderAndBodyTextEventSerializer.Builder builder = CustomLastfmHeaderAndBodyTextEventSerializer.builder();
EventSerializer serializer = builder.build(new Context(), out);
serializer.afterCreate();
serializer.write(EventBuilder.withBody("messageBody", Charsets.UTF_8, headers));
serializer.flush();
serializer.beforeClose();
out.flush();
out.close();
BufferedReader reader = new BufferedReader(new FileReader(testFile));
Assert.assertEquals("message1", reader.readLine());
Assert.assertNull(reader.readLine());
reader.close();
FileUtils.forceDelete(testFile);
}
开发者ID:sequenceiq,项目名称:sequenceiq-samples,代码行数:23,代码来源:CustomLastfmHeaderAndBodyTextEventSerializerTest.java
示例2: testNoNewline
import org.apache.flume.serialization.EventSerializer; //导入方法依赖的package包/类
@Test
public void testNoNewline() throws FileNotFoundException, IOException {
Map<String, String> headers = new HashMap<String, String>();
headers.put("header1", "value1");
OutputStream out = new FileOutputStream(testFile);
Context context = new Context();
context.put("appendNewline", "false");
CustomLastfmHeaderAndBodyTextEventSerializer.Builder builder = CustomLastfmHeaderAndBodyTextEventSerializer.builder();
EventSerializer serializer = builder.build(new Context(), out);
serializer.afterCreate();
serializer.write(EventBuilder.withBody("event 1", Charsets.UTF_8, headers));
serializer.write(EventBuilder.withBody("event 2", Charsets.UTF_8, headers));
serializer.write(EventBuilder.withBody("event 3", Charsets.UTF_8, headers));
serializer.flush();
serializer.beforeClose();
out.flush();
out.close();
BufferedReader reader = new BufferedReader(new FileReader(testFile));
Assert.assertNull(reader.readLine());
reader.close();
FileUtils.forceDelete(testFile);
}
开发者ID:sequenceiq,项目名称:sequenceiq-samples,代码行数:27,代码来源:CustomLastfmHeaderAndBodyTextEventSerializerTest.java
示例3: createAvroFile
import org.apache.flume.serialization.EventSerializer; //导入方法依赖的package包/类
public void createAvroFile(File file, String codec, boolean useSchemaUrl,
boolean useStaticSchemaUrl) throws IOException {
// serialize a few events using the reflection-based avro serializer
OutputStream out = new FileOutputStream(file);
Context ctx = new Context();
if (codec != null) {
ctx.put("compressionCodec", codec);
}
Schema schema = Schema.createRecord("myrecord", null, null, false);
schema.setFields(Arrays.asList(new Schema.Field[]{
new Schema.Field("message", Schema.create(Schema.Type.STRING), null, null)
}));
GenericRecordBuilder recordBuilder = new GenericRecordBuilder(schema);
File schemaFile = null;
if (useSchemaUrl || useStaticSchemaUrl) {
schemaFile = File.createTempFile(getClass().getSimpleName(), ".avsc");
Files.write(schema.toString(), schemaFile, Charsets.UTF_8);
}
if (useStaticSchemaUrl) {
ctx.put(AvroEventSerializerConfigurationConstants.STATIC_SCHEMA_URL,
schemaFile.toURI().toURL().toExternalForm());
}
EventSerializer.Builder builder = new AvroEventSerializer.Builder();
EventSerializer serializer = builder.build(ctx, out);
serializer.afterCreate();
for (int i = 0; i < 3; i++) {
GenericRecord record = recordBuilder.set("message", "Hello " + i).build();
Event event = EventBuilder.withBody(serializeAvro(record, schema));
if (schemaFile == null && !useSchemaUrl) {
event.getHeaders().put(AvroEventSerializer.AVRO_SCHEMA_LITERAL_HEADER,
schema.toString());
} else if (useSchemaUrl) {
event.getHeaders().put(AvroEventSerializer.AVRO_SCHEMA_URL_HEADER,
schemaFile.toURI().toURL().toExternalForm());
}
serializer.write(event);
}
serializer.flush();
serializer.beforeClose();
out.flush();
out.close();
if (schemaFile != null ) {
schemaFile.delete();
}
}
示例4: test
import org.apache.flume.serialization.EventSerializer; //导入方法依赖的package包/类
@Test
public void test() throws FileNotFoundException, IOException {
// create the file, write some data
OutputStream out = new FileOutputStream(testFile);
String builderName = JavaLogAvroEventSerializer.Builder.class.getName();
Context ctx = new Context();
ctx.put("syncInterval", "4096");
EventSerializer serializer =
EventSerializerFactory.getInstance(builderName, ctx, out);
serializer.afterCreate(); // must call this when a file is newly created
List<Event> events = generateJavaEvents();
for (Event e : events) {
serializer.write(e);
}
serializer.flush();
serializer.beforeClose();
out.flush();
out.close();
// now try to read the file back
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> fileReader =
new DataFileReader<GenericRecord>(testFile, reader);
GenericRecord record = new GenericData.Record(fileReader.getSchema());
int numEvents = 0;
while (fileReader.hasNext()) {
fileReader.next(record);
long timestamp = (Long) record.get("timestamp");
String datetime = record.get("datetime").toString();
String classname = record.get("classname").toString();
String message = record.get("message").toString();
System.out.println(classname + ": " + message + " (at " + datetime + ")");
numEvents++;
}
fileReader.close();
Assert.assertEquals("Should have found a total of 4 events", 4, numEvents);
FileUtils.forceDelete(testFile);
}
示例5: test
import org.apache.flume.serialization.EventSerializer; //导入方法依赖的package包/类
@Test
public void test() throws FileNotFoundException, IOException {
// create the file, write some data
OutputStream out = new FileOutputStream(testFile);
String builderName = SyslogAvroEventSerializer.Builder.class.getName();
Context ctx = new Context();
ctx.put("syncInterval", "4096");
ctx.put("path", "src/test/resources/customerToHostsFile.txt");
EventSerializer serializer =
EventSerializerFactory.getInstance(builderName, ctx, out);
serializer.afterCreate(); // must call this when a file is newly created
List<Event> events = generateSyslogEvents();
for (Event e : events) {
serializer.write(e);
}
serializer.flush();
serializer.beforeClose();
out.flush();
out.close();
// now try to read the file back
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> fileReader =
new DataFileReader<GenericRecord>(testFile, reader);
GenericRecord record = new GenericData.Record(fileReader.getSchema());
int numEvents = 0;
while (fileReader.hasNext()) {
fileReader.next(record);
long timestamp = (Long) record.get("timestamp");
String datetime = record.get("datetime").toString();
String hostname = record.get("hostname").toString();
Map<String, String> headers = (Map<String, String>) record.get("headers");
String message = record.get("message").toString();
System.out.println(hostname + " (" + headers + ")" + ": " + message);
numEvents++;
}
fileReader.close();
Assert.assertEquals("Should have found a total of 6 events", 6, numEvents);
FileUtils.forceDelete(testFile);
}
示例6: test
import org.apache.flume.serialization.EventSerializer; //导入方法依赖的package包/类
@Test
public void test() throws FileNotFoundException, IOException {
// create the file, write some data
OutputStream out = new FileOutputStream(testFile);
String builderName = ApacheLogAvroEventSerializer.Builder.class.getName();
Context ctx = new Context();
ctx.put("syncInterval", "4096");
EventSerializer serializer =
EventSerializerFactory.getInstance(builderName, ctx, out);
serializer.afterCreate(); // must call this when a file is newly created
List<Event> events = generateApacheEvents();
for (Event e : events) {
serializer.write(e);
}
serializer.flush();
serializer.beforeClose();
out.flush();
out.close();
// now try to read the file back
DatumReader<GenericRecord> reader = new GenericDatumReader<GenericRecord>();
DataFileReader<GenericRecord> fileReader =
new DataFileReader<GenericRecord>(testFile, reader);
GenericRecord record = new GenericData.Record(fileReader.getSchema());
int numEvents = 0;
while (fileReader.hasNext()) {
fileReader.next(record);
String ip = record.get("ip").toString();
String uri = record.get("uri").toString();
Integer statuscode = (Integer) record.get("statuscode");
String original = record.get("original").toString();
String connectionstatus = record.get("connectionstatus").toString();
Assert.assertEquals("Ip should be 80.79.194.3", "80.79.194.3", ip);
System.out.println("IP " + ip + " requested: " + uri + " with status code " + statuscode + " and connectionstatus: " + connectionstatus);
System.out.println("Original logline: " + original);
numEvents++;
}
fileReader.close();
Assert.assertEquals("Should have found a total of 3 events", 2, numEvents);
FileUtils.forceDelete(testFile);
}