本文整理汇总了Java中org.apache.flume.Sink.start方法的典型用法代码示例。如果您正苦于以下问题:Java Sink.start方法的具体用法?Java Sink.start怎么用?Java Sink.start使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.flume.Sink
的用法示例。
在下文中一共展示了Sink.start方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testEmptyChannel
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Test
public void testEmptyChannel() throws UnsupportedEncodingException, EventDeliveryException {
Sink kafkaSink = new KafkaSink();
Context context = prepareDefaultContext();
Configurables.configure(kafkaSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
kafkaSink.setChannel(memoryChannel);
kafkaSink.start();
Sink.Status status = kafkaSink.process();
if (status != Sink.Status.BACKOFF) {
fail("Error Occurred");
}
assertNull(testUtil.getNextMessageFromConsumer(DEFAULT_TOPIC));
}
示例2: prepareAndSend
import org.apache.flume.Sink; //导入方法依赖的package包/类
private Sink.Status prepareAndSend(Context context, String msg)
throws EventDeliveryException {
Sink kafkaSink = new KafkaSink();
Configurables.configure(kafkaSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
kafkaSink.setChannel(memoryChannel);
kafkaSink.start();
Transaction tx = memoryChannel.getTransaction();
tx.begin();
Event event = EventBuilder.withBody(msg.getBytes());
memoryChannel.put(event);
tx.commit();
tx.close();
return kafkaSink.process();
}
示例3: start
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Override
public void start() {
for (Sink s : sinkList) {
s.start();
}
state = LifecycleState.START;
}
示例4: testIRCSinkMissingSplitLineProperty
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Test
public void testIRCSinkMissingSplitLineProperty() {
Sink ircSink = new IRCSink();
ircSink.setName("IRC Sink - " + UUID.randomUUID().toString());
Context context = new Context();
context.put("hostname", "localhost");
context.put("port", String.valueOf(ircServerPort));
context.put("nick", "flume");
context.put("password", "flume");
context.put("user", "flume");
context.put("name", "flume-dev");
context.put("chan", "flume");
context.put("splitchars", "false");
Configurables.configure(ircSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
ircSink.setChannel(memoryChannel);
ircSink.start();
Transaction txn = memoryChannel.getTransaction();
txn.begin();
Event event = EventBuilder.withBody("Dummy Event".getBytes());
memoryChannel.put(event);
txn.commit();
txn.close();
try {
Sink.Status status = ircSink.process();
if (status == Sink.Status.BACKOFF) {
fail("Error occured");
}
} catch (EventDeliveryException eDelExcp) {
// noop
}
}
示例5: testDefaultTopic
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Test
public void testDefaultTopic() {
Sink kafkaSink = new KafkaSink();
Context context = prepareDefaultContext();
Configurables.configure(kafkaSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
kafkaSink.setChannel(memoryChannel);
kafkaSink.start();
String msg = "default-topic-test";
Transaction tx = memoryChannel.getTransaction();
tx.begin();
Event event = EventBuilder.withBody(msg.getBytes());
memoryChannel.put(event);
tx.commit();
tx.close();
try {
Sink.Status status = kafkaSink.process();
if (status == Sink.Status.BACKOFF) {
fail("Error Occurred");
}
} catch (EventDeliveryException ex) {
// ignore
}
String fetchedMsg = new String((byte[]) testUtil.getNextMessageFromConsumer(DEFAULT_TOPIC)
.message());
assertEquals(msg, fetchedMsg);
}
示例6: testTopicAndKeyFromHeader
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Test
public void testTopicAndKeyFromHeader() throws UnsupportedEncodingException {
Sink kafkaSink = new KafkaSink();
Context context = prepareDefaultContext();
Configurables.configure(kafkaSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
kafkaSink.setChannel(memoryChannel);
kafkaSink.start();
String msg = "test-topic-and-key-from-header";
Map<String, String> headers = new HashMap<String, String>();
headers.put("topic", TestConstants.CUSTOM_TOPIC);
headers.put("key", TestConstants.CUSTOM_KEY);
Transaction tx = memoryChannel.getTransaction();
tx.begin();
Event event = EventBuilder.withBody(msg.getBytes(), headers);
memoryChannel.put(event);
tx.commit();
tx.close();
try {
Sink.Status status = kafkaSink.process();
if (status == Sink.Status.BACKOFF) {
fail("Error Occurred");
}
} catch (EventDeliveryException ex) {
// ignore
}
MessageAndMetadata fetchedMsg =
testUtil.getNextMessageFromConsumer(TestConstants.CUSTOM_TOPIC);
assertEquals(msg, new String((byte[]) fetchedMsg.message(), "UTF-8"));
assertEquals(TestConstants.CUSTOM_KEY,
new String((byte[]) fetchedMsg.key(), "UTF-8"));
}
示例7: testBasic
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Test
public void testBasic() throws Exception {
String consumerKey = System.getProperty("twitter.consumerKey");
Assume.assumeNotNull(consumerKey);
String consumerSecret = System.getProperty("twitter.consumerSecret");
Assume.assumeNotNull(consumerSecret);
String accessToken = System.getProperty("twitter.accessToken");
Assume.assumeNotNull(accessToken);
String accessTokenSecret = System.getProperty("twitter.accessTokenSecret");
Assume.assumeNotNull(accessTokenSecret);
Context context = new Context();
context.put("consumerKey", consumerKey);
context.put("consumerSecret", consumerSecret);
context.put("accessToken", accessToken);
context.put("accessTokenSecret", accessTokenSecret);
context.put("maxBatchDurationMillis", "1000");
TwitterSource source = new TwitterSource();
source.configure(context);
Map<String, String> channelContext = new HashMap();
channelContext.put("capacity", "1000000");
channelContext.put("keep-alive", "0"); // for faster tests
Channel channel = new MemoryChannel();
Configurables.configure(channel, new Context(channelContext));
Sink sink = new LoggerSink();
sink.setChannel(channel);
sink.start();
DefaultSinkProcessor proc = new DefaultSinkProcessor();
proc.setSinks(Collections.singletonList(sink));
SinkRunner sinkRunner = new SinkRunner(proc);
sinkRunner.start();
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(Collections.singletonList(channel));
ChannelProcessor chp = new ChannelProcessor(rcs);
source.setChannelProcessor(chp);
source.start();
Thread.sleep(5000);
source.stop();
sinkRunner.stop();
sink.stop();
}
示例8: testAvroEvent
import org.apache.flume.Sink; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
@Test
public void testAvroEvent() throws IOException {
Sink kafkaSink = new KafkaSink();
Context context = prepareDefaultContext();
context.put(AVRO_EVENT, "true");
Configurables.configure(kafkaSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
kafkaSink.setChannel(memoryChannel);
kafkaSink.start();
String msg = "test-avro-event";
Map<String, String> headers = new HashMap<String, String>();
headers.put("topic", TestConstants.CUSTOM_TOPIC);
headers.put("key", TestConstants.CUSTOM_KEY);
headers.put(TestConstants.HEADER_1_KEY, TestConstants.HEADER_1_VALUE);
Transaction tx = memoryChannel.getTransaction();
tx.begin();
Event event = EventBuilder.withBody(msg.getBytes(), headers);
memoryChannel.put(event);
tx.commit();
tx.close();
try {
Sink.Status status = kafkaSink.process();
if (status == Sink.Status.BACKOFF) {
fail("Error Occurred");
}
} catch (EventDeliveryException ex) {
// ignore
}
MessageAndMetadata fetchedMsg = testUtil.getNextMessageFromConsumer(TestConstants.CUSTOM_TOPIC);
ByteArrayInputStream in = new ByteArrayInputStream((byte[]) fetchedMsg.message());
BinaryDecoder decoder = DecoderFactory.get().directBinaryDecoder(in, null);
SpecificDatumReader<AvroFlumeEvent> reader =
new SpecificDatumReader<AvroFlumeEvent>(AvroFlumeEvent.class);
AvroFlumeEvent avroevent = reader.read(null, decoder);
String eventBody = new String(avroevent.getBody().array(), Charsets.UTF_8);
Map<CharSequence, CharSequence> eventHeaders = avroevent.getHeaders();
assertEquals(msg, eventBody);
assertEquals(TestConstants.CUSTOM_KEY, new String((byte[]) fetchedMsg.key(), "UTF-8"));
assertEquals(TestConstants.HEADER_1_VALUE,
eventHeaders.get(new Utf8(TestConstants.HEADER_1_KEY)).toString());
assertEquals(TestConstants.CUSTOM_KEY, eventHeaders.get(new Utf8("key")).toString());
}
示例9: doPartitionErrors
import org.apache.flume.Sink; //导入方法依赖的package包/类
/**
* This function tests three scenarios:
* 1. PartitionOption.VALIDBUTOUTOFRANGE: An integer partition is provided,
* however it exceeds the number of partitions available on the topic.
* Expected behaviour: ChannelException thrown.
*
* 2. PartitionOption.NOTSET: The partition header is not actually set.
* Expected behaviour: Exception is not thrown because the code avoids an NPE.
*
* 3. PartitionOption.NOTANUMBER: The partition header is set, but is not an Integer.
* Expected behaviour: ChannelExeption thrown.
*
* @param option
* @throws Exception
*/
private void doPartitionErrors(PartitionOption option) throws Exception {
Sink kafkaSink = new KafkaSink();
Context context = prepareDefaultContext();
context.put(KafkaSinkConstants.PARTITION_HEADER_NAME, "partition-header");
Configurables.configure(kafkaSink, context);
Channel memoryChannel = new MemoryChannel();
Configurables.configure(memoryChannel, context);
kafkaSink.setChannel(memoryChannel);
kafkaSink.start();
String topic = findUnusedTopic();
createTopic(topic, 5);
Transaction tx = memoryChannel.getTransaction();
tx.begin();
Map<String, String> headers = new HashMap<String, String>();
headers.put("topic", topic);
switch (option) {
case VALIDBUTOUTOFRANGE:
headers.put("partition-header", "9");
break;
case NOTSET:
headers.put("wrong-header", "2");
break;
case NOTANUMBER:
headers.put("partition-header", "not-a-number");
break;
default:
break;
}
Event event = EventBuilder.withBody(String.valueOf(9).getBytes(), headers);
memoryChannel.put(event);
tx.commit();
tx.close();
Sink.Status status = kafkaSink.process();
assertEquals(Sink.Status.READY, status);
deleteTopic(topic);
}
示例10: testBasic
import org.apache.flume.Sink; //导入方法依赖的package包/类
@Test
public void testBasic() throws Exception {
System.out.println(getTestTraceHead("[TwitterSourceTest.basic]")
+ "-------- Start source.");
Context context = new Context();
context.put("consumerKey", consumerKey);
context.put("consumerSecret", consumerSecret);
context.put("accessToken", accessToken);
context.put("accessTokenSecret", accessTokenSecret);
context.put("maxBatchDurationMillis", "1000");
TwitterSource source = new TwitterSource();
source.configure(context);
Map<String, String> channelContext = new HashMap();
channelContext.put("capacity", "1000000");
channelContext.put("keep-alive", "0"); // for faster tests
Channel channel = new MemoryChannel();
Configurables.configure(channel, new Context(channelContext));
Sink sink = new LoggerSink();
sink.setChannel(channel);
sink.start();
DefaultSinkProcessor proc = new DefaultSinkProcessor();
proc.setSinks(Collections.singletonList(sink));
SinkRunner sinkRunner = new SinkRunner(proc);
sinkRunner.start();
ChannelSelector rcs = new ReplicatingChannelSelector();
rcs.setChannels(Collections.singletonList(channel));
ChannelProcessor chp = new ChannelProcessor(rcs);
source.setChannelProcessor(chp);
try {
source.start();
Thread.sleep(500);
source.stop();
System.out.println(getTestTraceHead("[TwitterSourceTest.basic]")
+ "- OK - Twitter source started properly.");
} catch (AssertionError e) {
System.out.println(getTestTraceHead("[TwitterSourceTest.basic]")
+ "- FAIL - Twitter source could not start.");
throw e;
} // try catch
sinkRunner.stop();
sink.stop();
}