当前位置: 首页>>代码示例>>Java>>正文


Java FeederSpout类代码示例

本文整理汇总了Java中org.apache.storm.testing.FeederSpout的典型用法代码示例。如果您正苦于以下问题:Java FeederSpout类的具体用法?Java FeederSpout怎么用?Java FeederSpout使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


FeederSpout类属于org.apache.storm.testing包,在下文中一共展示了FeederSpout类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: BasicTickTest

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
@Test
public void BasicTickTest() throws IOException {
    System.out.println("==> Starting BasicTickTest");

    String spoutId = "feeder.spout";
    String boltId = "tick.bolt";
    String topoId = "TestTopology";


    TopologyBuilder builder = new TopologyBuilder();
    FeederSpout spout = TickBoltTest.createFeeder();
    builder.setSpout(spoutId, spout);
    SimpleStatefulTick tickBolt = new SimpleStatefulTick();
    builder.setBolt(boltId, tickBolt).shuffleGrouping(spoutId);
    LocalCluster cluster = new LocalCluster();
    cluster.submitTopology(topoId, TestUtils.stormConfig(), builder.createTopology());

    /* Let's Submit Stuff! */
    Uninterruptibles.sleepUninterruptibly(1, TimeUnit.SECONDS);
    spout.feed(Arrays.asList(new String[]{"key1", "msg1"}));

    /* And sleep some more */
    Uninterruptibles.sleepUninterruptibly(6, TimeUnit.SECONDS);

    // TODO: this test isn't great .. the number of lines in the file from ticks could vary
    int expectedLines = 3;
    Assert.assertTrue("We should have at least " + expectedLines + " lines in the test file.",
            expectedLines <= tickBolt.tickFile.numLines());
    Assert.assertEquals(1, tickBolt.workFile.numLines());

    cluster.killTopology(topoId);
    Utils.sleep(4 * 1000);
}
 
开发者ID:telstra,项目名称:open-kilda,代码行数:34,代码来源:TickBoltTest.java

示例2: main

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
//        if (!NimbusClient.isLocalOverride()) {
//            throw new IllegalStateException("This example only works in local mode.  "
//                    + "Run with storm local not storm jar");
//        }
        FeederSpout genderSpout = new FeederSpout(new Fields("id", "gender"));
        FeederSpout ageSpout = new FeederSpout(new Fields("id", "age"));

        TopologyBuilder builder = new TopologyBuilder();
        builder.setSpout("genderSpout", genderSpout);
        builder.setSpout("ageSpout", ageSpout);

        // inner join of 'age' and 'gender' records on 'id' field
        RealtimeJoinBolt joiner = new RealtimeJoinBolt(StreamKind.SOURCE)
                .select("genderSpout:id,ageSpout:id,gender,age")
                .from("genderSpout", 5, false)
                .outerJoin("ageSpout", Duration.ofSeconds(5), false, Cmp.equal("genderSpout:id", "ageSpout:id") )
                .withOutputStream("jstream");

        builder.setBolt("joiner", joiner)
                .fieldsGrouping("genderSpout", new Fields("id"))
                .fieldsGrouping("ageSpout", new Fields("id"))         ;

        builder.setBolt("printer", new PrinterBolt() ).shuffleGrouping("joiner", "jstream");

        Config conf = new Config();
        StormSubmitter.submitTopologyWithProgressBar("join-example", conf, builder.createTopology());

        generateGenderData(genderSpout);

        generateAgeData(ageSpout);
    }
 
开发者ID:hortonworks,项目名称:streamline,代码行数:33,代码来源:RTJoinExampleTopology.java

示例3: generateGenderData

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
private static void generateGenderData(FeederSpout genderSpout) {
    for (int i = 0; i < 10; i++) {
        String gender;
        if (i % 2 == 0) {
            gender = "male";
        }
        else {
            gender = "female";
        }
        genderSpout.feed(new Values(i, gender));
    }
}
 
开发者ID:hortonworks,项目名称:streamline,代码行数:13,代码来源:RTJoinExampleTopology.java

示例4: main

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
public static void main(String[] args) {
  FeederSpout genderSpout = new FeederSpout(new Fields("id", "gender"));
  FeederSpout ageSpout = new FeederSpout(new Fields("id", "age"));

  TopologyBuilder builder = new TopologyBuilder();
  builder.setSpout("gender", genderSpout);
  builder.setSpout("age", ageSpout);
  builder.setBolt("join", new SingleJoinBolt(new Fields("gender", "age"))).fieldsGrouping("gender", new Fields("id"))
      .fieldsGrouping("age", new Fields("id"));

  Config conf = new Config();
  conf.setDebug(true);

  LocalCluster cluster = new LocalCluster();
  cluster.submitTopology("join-example", conf, builder.createTopology());

  for (int i = 0; i < 10; i++) {
    String gender;
    if (i % 2 == 0) {
      gender = "male";
    }
    else {
      gender = "female";
    }
    genderSpout.feed(new Values(i, gender));
  }

  for (int i = 9; i >= 0; i--) {
    ageSpout.feed(new Values(i, i + 20));
  }

  Utils.sleep(2000);
  cluster.shutdown();
}
 
开发者ID:ziyunhx,项目名称:storm-net-adapter,代码行数:35,代码来源:SingleJoinExample.java

示例5: beforeMethod

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
@BeforeMethod
public void beforeMethod() throws Exception {
  // Fixtures
  alarmDefinitionDAO = mock(AlarmDefinitionDAO.class);

  // Mocks
  alarmDAO = new MockAlarmDAO();

  // Bindings
  Injector.reset();
  Injector.registerModules(new AbstractModule() {
    protected void configure() {
      bind(AlarmDAO.class).toInstance(alarmDAO);
      bind(AlarmDefinitionDAO.class).toInstance(alarmDefinitionDAO);
    }
  });

  // Config
  ThresholdingConfiguration threshConfig = new ThresholdingConfiguration();
  threshConfig.alarmDelay = 1;
  threshConfig.sporadicMetricNamespaces = new HashSet<String>();
  Serialization.registerTarget(KafkaProducerConfiguration.class);

  threshConfig.kafkaProducerConfig =
      Serialization
          .fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
  Config stormConfig = new Config();
  stormConfig.setMaxTaskParallelism(1);
  metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
  eventSpout = new FeederSpout(new Fields("event"));
  alarmEventForwarder = mock(AlarmEventForwarder.class);
  Injector
      .registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
  Injector.registerModules(new ProducerModule(alarmEventForwarder));

  // Evaluate alarm stats every 5 seconds
  System.setProperty(MetricAggregationBolt.TICK_TUPLE_SECONDS_KEY, "5");

  startTopology();
}
 
开发者ID:openstack,项目名称:monasca-thresh,代码行数:41,代码来源:ThresholdingEngineAlarmTest.java

示例6: createFeeder

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
public static FeederSpout createFeeder() {
    return new FeederSpout(new Fields("key", "message"));
}
 
开发者ID:telstra,项目名称:open-kilda,代码行数:4,代码来源:TickBoltTest.java

示例7: main

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
	final FeederSpout genderSpout = new FeederSpout(new Fields("id", "gender", "hobbies"));
	final FeederSpout ageSpout = new FeederSpout(new Fields("id", "age"));

	Config conf = new Config();
	TopologyBuilder builder = new TopologyBuilder();

	//  only required to stabilize integration test
	conf.put(FlinkLocalCluster.SUBMIT_BLOCKING, true);
	final NullTerminatingSpout finalGenderSpout = new NullTerminatingSpout(genderSpout);
	final NullTerminatingSpout finalAgeSpout  = new NullTerminatingSpout(ageSpout);

	builder.setSpout("gender", finalGenderSpout);
	builder.setSpout("age", finalAgeSpout);
	builder.setBolt("join", new SingleJoinBolt(new Fields("gender", "age")))
		.fieldsGrouping("gender", new Fields("id"))
		.fieldsGrouping("age", new Fields("id"));

	// emit result
	if (args.length > 0) {
		// read the text file from given input path
		builder.setBolt("fileOutput", new BoltFileSink(args[0], new TupleOutputFormatter()))
			.shuffleGrouping("join");
	} else {
		builder.setBolt("print", new PrinterBolt()).shuffleGrouping("join");
	}

	String[] hobbies = new String[] {"reading", "biking", "travelling", "watching tv"};

	for (int i = 0; i < 10; i++) {
		String gender;
		if (i % 2 == 0) {
			gender = "male";
		}
		else {
			gender = "female";
		}
		genderSpout.feed(new Values(i, gender, hobbies[i % hobbies.length]));
	}

	for (int i = 9; i >= 0; i--) {
		ageSpout.feed(new Values(i, i + 20));
	}

	final FlinkLocalCluster cluster = FlinkLocalCluster.getLocalCluster();
	cluster.submitTopology("joinTopology", conf, FlinkTopology.createTopology(builder));
	cluster.shutdown();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:49,代码来源:SingleJoinExample.java

示例8: generateAgeData

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
private static void generateAgeData(FeederSpout ageSpout) {
    for (int i = 9; i >= 0; i--) {
        ageSpout.feed(new Values(i, i + 20));
    }
}
 
开发者ID:hortonworks,项目名称:streamline,代码行数:6,代码来源:RTJoinExampleTopology.java

示例9: befortMethod

import org.apache.storm.testing.FeederSpout; //导入依赖的package包/类
@BeforeMethod
public void befortMethod() throws Exception {
  // Fixtures
  final AlarmExpression expression =
      new AlarmExpression("max(cpu{id=5}) >= 3 or max(mem{id=5}) >= 5");
  final AlarmExpression expression2 = AlarmExpression.of(
    "count(log.error{id=5},deterministic) >= 1 OR count(log.warning{id=5},deterministic) >= 1"
  );
  final AlarmExpression expression3 = AlarmExpression.of(
    "max(cpu{id=5}) >= 3 AND count(log.warning{id=5},deterministic) >= 1"
  );

  cpuMetricDef = expression.getSubExpressions().get(0).getMetricDefinition();
  memMetricDef = expression.getSubExpressions().get(1).getMetricDefinition();
  logErrorMetricDef = expression2.getSubExpressions().get(0).getMetricDefinition();
  logWarningMetricDef = expression2.getSubExpressions().get(1).getMetricDefinition();

  extraMemMetricDefDimensions = new HashMap<>(memMetricDef.dimensions);
  extraMemMetricDefDimensions.put("Group", "group A");

  alarmDefinition =
      new AlarmDefinition(TEST_ALARM_TENANT_ID, TEST_ALARM_NAME,
          TEST_ALARM_DESCRIPTION, expression, "LOW", true, new ArrayList<String>());
  this.deterministicAlarmDefinition = new AlarmDefinition(
    DET_TEST_ALARM_TENANT_ID,
    DET_TEST_ALARM_NAME,
    DET_TEST_ALARM_DESCRIPTION,
    expression2,
    "LOW",
    true,
    new ArrayList<String>()
  );
  this.mixedAlarmDefinition = new AlarmDefinition(
    MIXED_TEST_ALARM_TENANT_ID,
    MIXED_TEST_ALARM_NAME,
    MIXED_TEST_ALARM_DESCRIPTION,
    expression3,
    "LOW",
    true,
    new ArrayList<String>()
  );

  // Mocks
  alarmDAO = mock(AlarmDAO.class);
  alarmDefinitionDAO = mock(AlarmDefinitionDAO.class);

  // Bindings
  Injector.reset();
  Injector.registerModules(new AbstractModule() {
    protected void configure() {
      bind(AlarmDAO.class).toInstance(alarmDAO);
      bind(AlarmDefinitionDAO.class).toInstance(alarmDefinitionDAO);
    }
  });

  // Config
  ThresholdingConfiguration threshConfig = new ThresholdingConfiguration();
  threshConfig.alarmDelay = 1;
  threshConfig.sporadicMetricNamespaces = new HashSet<String>();
  Serialization.registerTarget(KafkaProducerConfiguration.class);

  threshConfig.kafkaProducerConfig =
      Serialization
          .fromJson("{\"KafkaProducerConfiguration\":{\"topic\":\"alarm-state-transitions\",\"metadataBrokerList\":\"192.168.10.10:9092\",\"requestRequiredAcks\":1,\"requestTimeoutMs\":10000,\"producerType\":\"sync\",\"serializerClass\":\"kafka.serializer.StringEncoder\",\"keySerializerClass\":\"\",\"partitionerClass\":\"\",\"compressionCodec\":\"none\",\"compressedTopics\":\"\",\"messageSendMaxRetries\":3,\"retryBackoffMs\":100,\"topicMetadataRefreshIntervalMs\":600000,\"queueBufferingMaxMs\":5000,\"queueBufferingMaxMessages\":10000,\"queueEnqueueTimeoutMs\":-1,\"batchNumMessages\":200,\"sendBufferBytes\":102400,\"clientId\":\"Threshold_Engine\"}}");
  Config stormConfig = new Config();
  stormConfig.setMaxTaskParallelism(1);
  metricSpout = new FeederSpout(new Fields(MetricSpout.FIELDS));
  eventSpout = new FeederSpout(new Fields("event"));
  alarmEventForwarder = mock(AlarmEventForwarder.class);
  Injector
      .registerModules(new TopologyModule(threshConfig, stormConfig, metricSpout, eventSpout));
  Injector.registerModules(new ProducerModule(alarmEventForwarder));

}
 
开发者ID:openstack,项目名称:monasca-thresh,代码行数:75,代码来源:ThresholdingEngineTest.java


注:本文中的org.apache.storm.testing.FeederSpout类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。