当前位置: 首页>>代码示例>>Java>>正文


Java DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode方法代码示例

本文整理汇总了Java中com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode方法的典型用法代码示例。如果您正苦于以下问题:Java DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode方法的具体用法?Java DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode怎么用?Java DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl的用法示例。


在下文中一共展示了DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: simpleInvocation

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void simpleInvocation()
{
    DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

    String                     name                       = "XSSF Row To JSON Data Processor";
    Map<String, String>        properties                 = Collections.emptyMap();
    XSSFRowToJSONDataProcessor xssfRowToJSONDataProcessor = new XSSFRowToJSONDataProcessor(name, properties);

    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfRowToJSONDataProcessor, null);

    File file = new File("Test01.xlsx");

    ((ObserverDataConsumer<File>) xssfRowToJSONDataProcessor.getDataConsumer(File.class)).consume(null, file);

    dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfRowToJSONDataProcessor);
}
 
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:18,代码来源:SimpleRowToJSONTest.java

示例2: simplestConversion

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void simplestConversion()
{
    DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

    DummyDataSource dummyDataSource        = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
    DataProcessor   shapeFileDataProcessor = new ShapeFileConverterDataProcessor("ShapeFile Converter Data Processor", Collections.<String, String>emptyMap());
    DummyDataSink   dummyDataSink          = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), shapeFileDataProcessor, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

    ((ObservableDataProvider<File>) dummyDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) shapeFileDataProcessor.getDataConsumer(File.class));
    ((ObservableDataProvider<String>) shapeFileDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) dummyDataSink.getDataConsumer(String.class));

    File testFile = new File("/tmp/Gully_point/Gully_point.shp");
    dummyDataSource.sendData(testFile);

    dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(shapeFileDataProcessor);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);

}
 
开发者ID:arjuna-technologies,项目名称:GeoAPI_DataBroker_PlugIn,代码行数:25,代码来源:ConverterTest.java

示例3: simplestChain

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void simplestChain()
{
	DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

    SimpleDataSource    simpleDataSource    = new SimpleDataSource("Simple Data Source", Collections.<String, String>emptyMap());
    SimpleDataProcessor simpleDataProcessor = new SimpleDataProcessor("Simple Data Processor", Collections.<String, String>emptyMap());
    SimpleDataSink      simpleDataSink      = new SimpleDataSink("Simple Data Sink", Collections.<String, String>emptyMap());

    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSource, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataProcessor, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSink, null);

    ((ObservableDataProvider<String>) simpleDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataProcessor.getDataConsumer(String.class));
    ((ObservableDataProvider<String>) simpleDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataSink.getDataConsumer(String.class));

    assertEquals("Simple DataSource count", 0, simpleDataSource.getCount());
    assertEquals("Simple DataProcessor count", 0, simpleDataProcessor.getCount());
    assertEquals("Simple DataSink count", 0, simpleDataSink.getCount());

    simpleDataSource.dummyGetData("Data Bundle 1");
    simpleDataSource.dummyGetData("Data Bundle 2");
    simpleDataSource.dummyGetData("Data Bundle 3");
    simpleDataSource.dummyGetData("Data Bundle 4");

    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSource);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataProcessor);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSink);

    assertArrayEquals("Unexpected history at DataSink", new String[]{"[Data Bundle 1]", "[Data Bundle 2]", "[Data Bundle 3]", "[Data Bundle 4]"}, simpleDataSink.getSentHistory().toArray());
    
    assertEquals("Simple DataSource count", 4, simpleDataSource.getCount());
    assertEquals("Simple DataProcessor count", 4, simpleDataProcessor.getCount());
    assertEquals("Simple DataSink count", 4, simpleDataSink.getCount());
}
 
开发者ID:arjuna-technologies,项目名称:Simple_DataBroker_PlugIn,代码行数:36,代码来源:ChainingTest.java

示例4: createResourceAsString

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void createResourceAsString()
{
    try
    {
        CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");

        if (! ckanAPIProperties.isLoaded())
        {
            logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceAsString', no propertiles file");
            return;
        }

        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        String              name       = "AppendFileStoreCKANDataService";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
        properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
        properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());

        DummyDataSource                dummyDataSource                = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
        AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);

        ((ObservableDataProvider<String>) dummyDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) appendFileStoreCKANDataService.getDataConsumer(String.class));

        dummyDataSource.sendData("Test Data, Test Text");

        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsString'", throwable);
        fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsString': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:41,代码来源:AppendFileStoreCKANDataServiceTest.java

示例5: createResourceAsBytes

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void createResourceAsBytes()
{
    try
    {
        CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");

        if (! ckanAPIProperties.isLoaded())
        {
            logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes', no propertiles file");
            return;
        }

        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        String              name       = "AppendFileStoreCKANDataService";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
        properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
        properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());

        DummyDataSource                dummyDataSource                = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
        AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);

        ((ObservableDataProvider<byte[]>) dummyDataSource.getDataProvider(byte[].class)).addDataConsumer((ObserverDataConsumer<byte[]>) appendFileStoreCKANDataService.getDataConsumer(byte[].class));

        dummyDataSource.sendData("Test Data, Test Text".getBytes());

        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes'", throwable);
        fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:41,代码来源:AppendFileStoreCKANDataServiceTest.java

示例6: simpleInvocation

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void simpleInvocation()
{
    DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

    AuthenticationProperties authenticationProperties = new AuthenticationProperties("authentication.properties");

    String              name       = "Twitter Data Source";
    Map<String, String> properties = new HashMap<String, String>();
    properties.put(TwitterDataSource.TWITTER_CONSUMERKEY_PROPERTYNAME,    authenticationProperties.getConsumerKey());
    properties.put(TwitterDataSource.TWITTER_CONSUMERSECRET_PROPERTYNAME, authenticationProperties.getConsumerSecret());
    properties.put(TwitterDataSource.TWITTER_TOKEN_PROPERTYNAME,          authenticationProperties.getToken());
    properties.put(TwitterDataSource.TWITTER_TOKENSECRET_PROPERTYNAME,    authenticationProperties.getTokenSecret());
    properties.put(TwitterDataSource.TWITTER_TRACKTERM_PROPERTYNAME,      "newcastle");
    properties.put(TwitterDataSource.POLLINTERVAL_PROPERTYNAME,           "5");

    TwitterDataSource  twitterDataSource  = new TwitterDataSource(name, properties);
    DummyDataProcessor dummyDataProcessor = new DummyDataProcessor("Dummy Data Processor", Collections.<String, String>emptyMap());

    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), twitterDataSource, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataProcessor, null);

    ((ObservableDataProvider<String>) twitterDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) dummyDataProcessor.getDataConsumer(String.class));

    try
    {
        Thread.sleep(60000);
    }
    catch (InterruptedException interruptedException)
    {
        fail("Interrupted during sleep");
    }

    dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataProcessor);

    assertTrue("Didn't receive any tweets", dummyDataProcessor.receivedData().size() > 0);
}
 
开发者ID:arjuna-technologies,项目名称:Twitter_DataBroker_PlugIn,代码行数:38,代码来源:SimpleTest.java

示例7: simpleDecryptInvocation

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
    @Test
    public void simpleDecryptInvocation()
    {
        try
        {
            int character = 0;
            int index     = 0;

            File         spreadsheetFile        = new File("Test02.xlsx");
            byte[]       spreadsheetData        = new byte[(int) spreadsheetFile.length()];
            InputStream  spreadsheetInputStream = new FileInputStream(spreadsheetFile);
            index     = 0;
            character = spreadsheetInputStream.read();
            while (character != -1)
            {
                spreadsheetData[index] = (byte) character;
                character = spreadsheetInputStream.read();
                index++;
            }
            spreadsheetInputStream.close();

            File         csvFile        = new File("Test01_s.csv");
            byte[]       csvData        = new byte[(int) csvFile.length()];
            InputStream  csvInputStream = new FileInputStream(csvFile);
            index     = 0;
            character = csvInputStream.read();
            while (character != -1)
            {
                csvData[index] = (byte) character;
                character = csvInputStream.read();
                index++;
            }
            csvInputStream.close();

            DataFlowNodeLifeCycleControl     dataFlowNodeLifeCycleControl     = new TestJEEDataFlowNodeLifeCycleControl();
//            DataFlowNodeLinkLifeCycleControl dataFlowNodeLinkLifeCycleControl = new TestJEEDataFlowNodeLinkLifeCycleControl();

            String                            name                              = "XSSF Stream Sheet To CSV Data Processor";
            Map<String, String>               properties                        = Collections.emptyMap();

            DummyDataSource                   dummyDataSource                   = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
            XSSFStreamSheetToCSVDataProcessor xssfStreamSheetToCSVDataProcessor = new XSSFStreamSheetToCSVDataProcessor(name, properties);
            DummyDataSink                     dummyDataSink                     = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfStreamSheetToCSVDataProcessor, null);
            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

            ((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) xssfStreamSheetToCSVDataProcessor.getDataConsumer(Map.class));
            ((ObservableDataProvider<Map>) xssfStreamSheetToCSVDataProcessor.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) dummyDataSink.getDataConsumer(Map.class));

            Map<String, Object> inputMap = new HashMap<String, Object>();
            inputMap.put("filename", "Test.xslx");
            inputMap.put("data", spreadsheetData);
            inputMap.put("password", "test");

            dummyDataSource.sendData(inputMap);

            List<Object> receivedData = dummyDataSink.receivedData();
            assertEquals("Unexpected received data", 1, receivedData.size());

            Map<String, Object> outputMap = (Map<String, Object>) receivedData.get(0);
            assertNotNull("Unexpected null received data", outputMap);
            assertEquals("Unexpected received map size", 3, outputMap.size());
            assertNotNull("Unexpected null 'filename' entry", outputMap.get("filename"));
            assertNotNull("Unexpected null 'data' entry", outputMap.get("data"));
            assertNotNull("Unexpected null 'resourceformat' entry", outputMap.get("resourceformat"));
            assertEquals("Unexpected value of 'filename' entry", "Test_1.csv", outputMap.get("filename"));
            assertArrayEquals("Unexpected value of 'data' entry", csvData, (byte[]) outputMap.get("data"));
            assertEquals("Unexpected value of 'resourceformat' entry", "csv", outputMap.get("resourceformat"));

            dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
            dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfStreamSheetToCSVDataProcessor);
            dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
        }
        catch (IOException ioException)
        {
            fail("IO Exception");
        }
    }
 
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:82,代码来源:SimpleStreamEncryptedSheetToCSVTest.java

示例8: simpleInvocation

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
    @Test
    public void simpleInvocation()
    {
        try
        {
            int character = 0;
            int index     = 0;

            File         spreadsheetFile        = new File("Test01.xlsx");
            byte[]       spreadsheetData        = new byte[(int) spreadsheetFile.length()];
            InputStream  spreadsheetInputStream = new FileInputStream(spreadsheetFile);
            index     = 0;
            character = spreadsheetInputStream.read();
            while (character != -1)
            {
                spreadsheetData[index] = (byte) character;
                character = spreadsheetInputStream.read();
                index++;
            }
            spreadsheetInputStream.close();

            File         csvFile        = new File("Test01_s.csv");
            byte[]       csvData        = new byte[(int) csvFile.length()];
            InputStream  csvInputStream = new FileInputStream(csvFile);
            index     = 0;
            character = csvInputStream.read();
            while (character != -1)
            {
                csvData[index] = (byte) character;
                character = csvInputStream.read();
                index++;
            }
            csvInputStream.close();

            DataFlowNodeLifeCycleControl     dataFlowNodeLifeCycleControl     = new TestJEEDataFlowNodeLifeCycleControl();
//            DataFlowNodeLinkLifeCycleControl dataFlowNodeLinkLifeCycleControl = new TestJEEDataFlowNodeLinkLifeCycleControl();

            String                            name                              = "XSSF Stream Sheet To CSV Data Processor";
            Map<String, String>               properties                        = Collections.emptyMap();

            DummyDataSource                   dummyDataSource                   = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
            XSSFStreamSheetToCSVDataProcessor xssfStreamSheetToCSVDataProcessor = new XSSFStreamSheetToCSVDataProcessor(name, properties);
            DummyDataSink                     dummyDataSink                     = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfStreamSheetToCSVDataProcessor, null);
            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

            ((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) xssfStreamSheetToCSVDataProcessor.getDataConsumer(Map.class));
            ((ObservableDataProvider<Map>) xssfStreamSheetToCSVDataProcessor.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) dummyDataSink.getDataConsumer(Map.class));

            Map<String, Object> inputMap = new HashMap<String, Object>();
            inputMap.put("filename", "Test.xslx");
            inputMap.put("data", spreadsheetData);

            dummyDataSource.sendData(inputMap);

            List<Object> receivedData = dummyDataSink.receivedData();
            assertEquals("Unexpected received data", 1, receivedData.size());

            Map<String, Object> outputMap = (Map<String, Object>) receivedData.get(0);
            assertNotNull("Unexpected null received data", outputMap);
            assertEquals("Unexpected received map size", 3, outputMap.size());
            assertNotNull("Unexpected null 'filename' entry", outputMap.get("filename"));
            assertNotNull("Unexpected null 'data' entry", outputMap.get("data"));
            assertNotNull("Unexpected null 'resourceformat' entry", outputMap.get("resourceformat"));
            assertEquals("Unexpected value of 'filename' entry", "Test_1.csv", outputMap.get("filename"));
            assertArrayEquals("Unexpected value of 'data' entry", csvData, (byte[]) outputMap.get("data"));
            assertEquals("Unexpected value of 'resourceformat' entry", "csv", outputMap.get("resourceformat"));

            dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
            dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfStreamSheetToCSVDataProcessor);
            dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
        }
        catch (IOException ioException)
        {
            fail("IO Exception");
        }
    }
 
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:81,代码来源:SimpleStreamSheetToCSVTest.java

示例9: simpleInvocation

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
    @Test
    public void simpleInvocation()
    {
        try
        {
            File         spreadsheetFile        = new File("Test01.xlsx");
            byte[]       spreadsheetData        = new byte[(int) spreadsheetFile.length()];
            InputStream  spreadsheetInputStream = new FileInputStream(spreadsheetFile);
            int          character              = spreadsheetInputStream.read();
            int          index                  = 0;
            while (character != -1)
            {
                spreadsheetData[index] = (byte) character;
                character = spreadsheetInputStream.read();
                index++;
            }
            spreadsheetInputStream.close();

            File         csvFile        = new File("Test01_o.csv");
            byte[]       csvData        = new byte[(int) csvFile.length()];
            InputStream  csvInputStream = new FileInputStream(csvFile);
            index     = 0;
            character = csvInputStream.read();
            while (character != -1)
            {
                csvData[index] = (byte) character;
                character = csvInputStream.read();
                index++;
            }
            csvInputStream.close();

            DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
//            DataFlowNodeLinkLifeCycleControl dataFlowNodeLinkLifeCycleControl = new TestJEEDataFlowNodeLinkLifeCycleControl();

            String                      name                        = "XSSF Sheet To CSV Data Processor";
            Map<String, String>         properties                  = Collections.emptyMap();

            DummyDataSource             dummyDataSource             = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
            XSSFSheetToCSVDataProcessor xssfSheetToCSVDataProcessor = new XSSFSheetToCSVDataProcessor(name, properties);
            DummyDataSink               dummyDataSink               = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfSheetToCSVDataProcessor, null);
            dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

            ((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) xssfSheetToCSVDataProcessor.getDataConsumer(Map.class));
            ((ObservableDataProvider<Map>) xssfSheetToCSVDataProcessor.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) dummyDataSink.getDataConsumer(Map.class));

            Map<String, Object> inputMap = new HashMap<String, Object>();
            inputMap.put("filename", "Test.xslx");
            inputMap.put("data", spreadsheetData);

            dummyDataSource.sendData(inputMap);

            List<Object> receivedData = dummyDataSink.receivedData();
            assertEquals("Unexpected received data", 1, receivedData.size());

            Map<String, Object> outputMap = (Map<String, Object>) receivedData.get(0);
            assertNotNull("Unexpected null received data", outputMap);
            assertEquals("Unexpected received map size", 3, outputMap.size());
            assertNotNull("Unexpected null 'filename' entry", outputMap.get("filename"));
            assertNotNull("Unexpected null 'data' entry", outputMap.get("data"));
            assertNotNull("Unexpected null 'resourceformat' entry", outputMap.get("resourceformat"));
            assertEquals("Unexpected value of 'filename' entry", "Test_Sheet1.csv", outputMap.get("filename"));
            assertArrayEquals("Unexpected value of 'data' entry", csvData, (byte[]) outputMap.get("data"));
            assertEquals("Unexpected value of 'resourceformat' entry", "csv", outputMap.get("resourceformat"));

            dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
            dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfSheetToCSVDataProcessor);
            dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
        }
        catch (IOException ioException)
        {
            fail("IO Exception");
        }
    }
 
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:78,代码来源:SimpleSheetToCSVTest.java

示例10: fileScanner01

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void fileScanner01()
{
    try
    {
        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        File testDirectory = createTemporaryDirectory("Scanner01");

        String              name       = "Polling File Change Data Source";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(PollingFileChangeDataSource.FILENAME_PROPERYNAME, testDirectory.toString() + File.separator + "Test02");
        properties.put(PollingFileChangeDataSource.POLLINTERVAL_PROPERYNAME, "1000");

        PollingFileChangeDataSource pollingFileChangeDataSource = new PollingFileChangeDataSource(name, properties);
        DummyDataSink               dummyDataSink               = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), pollingFileChangeDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

        ((ObservableDataProvider<File>) pollingFileChangeDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) dummyDataSink.getDataConsumer(File.class));

        Thread.sleep(1000);

        File testFile1 = new File(testDirectory, "Test01");
        testFile1.createNewFile();
        Thread.sleep(1000);

        File testFile2 = new File(testDirectory, "Test02");
        testFile2.createNewFile();
        Thread.sleep(1000);

        File testFile3 = new File(testDirectory, "Test03");
        testFile3.createNewFile();
        Thread.sleep(1000);

        dataFlowNodeLifeCycleControl.removeDataFlowNode(pollingFileChangeDataSource);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);

        assertEquals("Incorrect message number", 1, dummyDataSink.receivedData().size());
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'fileScanner01'", throwable);

        fail("Problem in 'fileScanner01': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:FileSystem_DataBroker_PlugIn,代码行数:49,代码来源:PollingFileChangeDataSourceTest.java

示例11: fileScanner01

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void fileScanner01()
{
    try
    {
        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        File testDirectory = createTemporaryDirectory("Scanner01");

        String              name       = "File Change Data Source";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(FileChangeDataSource.FILENAME_PROPERYNAME, testDirectory.toString() + File.separator + "Test02");

        FileChangeDataSource fileChangeDataSource = new FileChangeDataSource(name, properties);
        DummyDataSink        dummyDataSink        = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), fileChangeDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

        ((ObservableDataProvider<File>) fileChangeDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) dummyDataSink.getDataConsumer(File.class));

        Thread.sleep(1000);

        File testFile1 = new File(testDirectory, "Test01");
        testFile1.createNewFile();
        Thread.sleep(1000);

        File testFile2 = new File(testDirectory, "Test02");
        testFile2.createNewFile();
        Thread.sleep(1000);

        File testFile3 = new File(testDirectory, "Test03");
        testFile3.createNewFile();
        Thread.sleep(1000);

        dataFlowNodeLifeCycleControl.removeDataFlowNode(fileChangeDataSource);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);

        assertEquals("Incorrect message number", 1, dummyDataSink.receivedData().size());
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'fileScanner01'", throwable);

        fail("Problem in 'fileScanner01': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:FileSystem_DataBroker_PlugIn,代码行数:48,代码来源:FileChangeDataSourceTest.java

示例12: directoryScanner01

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void directoryScanner01()
{
    try
    {
        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        File testDirectory = createTemporaryDirectory("Scanner01");

        String              name       = "Directory Change Data Source";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(DirectoryChangeDataSource.DIRECTORYNAME_PROPERYNAME, testDirectory.toString());

        DirectoryChangeDataSource directoryChangeDataSource = new DirectoryChangeDataSource(name, properties);
        DummyDataSink             dummyDataSink             = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), directoryChangeDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

        ((ObservableDataProvider<File>) directoryChangeDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) dummyDataSink.getDataConsumer(File.class));

        Thread.sleep(1000);

        File testFile1 = new File(testDirectory, "Test01");
        testFile1.createNewFile();
        Thread.sleep(1000);

        File testFile2 = new File(testDirectory, "Test02");
        testFile2.createNewFile();
        Thread.sleep(1000);

        File testFile3 = new File(testDirectory, "Test03");
        testFile3.createNewFile();
        Thread.sleep(1000);

        dataFlowNodeLifeCycleControl.removeDataFlowNode(directoryChangeDataSource);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);

        assertEquals("Incorrect message number", 3, dummyDataSink.receivedData().size());
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'directoryScanner01'", throwable);
        fail("Problem in 'directoryScanner01': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:FileSystem_DataBroker_PlugIn,代码行数:47,代码来源:DirectoryChangeDataSourceTest.java

示例13: createResourceAsString

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void createResourceAsString()
{
    try
    {
        JSONWebServiceProperties jsonWebServiceProperties = new JSONWebServiceProperties("jsonwebservice.properties");

        if (! jsonWebServiceProperties.isLoaded())
        {
            logger.log(Level.INFO, "SKIPPING TEST 'createResourceAsString', no propertiles file");
            return;
        }

        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        String              name       = "PullJSONWebServiceDataSource";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(PullJSONWebServiceDataSource.SERVICEURL_PROPERTYNAME, jsonWebServiceProperties.getServiceURL());
        properties.put(PullJSONWebServiceDataSource.SCHEDULEDELAY_PROPERTYNAME, jsonWebServiceProperties.getScheduleDelay());
        properties.put(PullJSONWebServiceDataSource.SCHEDULEPERIOD_PROPERTYNAME, jsonWebServiceProperties.getSchedulePeriod());
        properties.put(PullJSONWebServiceDataSource.USERNAME_PROPERTYNAME, jsonWebServiceProperties.getUserName());
        properties.put(PullJSONWebServiceDataSource.PASSWORD_PROPERTYNAME, jsonWebServiceProperties.getPassword());

        PullJSONWebServiceDataSource pullJSONWebServiceDataSource = new PullJSONWebServiceDataSource(name, properties);
        DummyDataSink                dummyDataSink                = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), pullJSONWebServiceDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);

        ((ObservableDataProvider<String>) pullJSONWebServiceDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) dummyDataSink.getDataConsumer(String.class));

        Thread.sleep(10000);
        List<Object> receivedData = dummyDataSink.receivedData();

        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(pullJSONWebServiceDataSource);

        assertEquals("Unexpected received data number", 1, receivedData.size());
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'createResource'", throwable);
        fail("Problem in 'createResource': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:WebService_DataBroker_PlugIn,代码行数:46,代码来源:JSONWebServiceDataSourceTest.java

示例14: fullConnectedChain

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@Test
public void fullConnectedChain()
{
	DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

    SimpleDataSource    simpleDataSource    = new SimpleDataSource("Simple Data Source", Collections.<String, String>emptyMap());
    SimpleDataProcessor simpleDataProcessor = new SimpleDataProcessor("Simple Data Processor", Collections.<String, String>emptyMap());
    SimpleDataSink      simpleDataSink1     = new SimpleDataSink("Simple Data Sink 1", Collections.<String, String>emptyMap());
    SimpleDataSink      simpleDataSink2     = new SimpleDataSink("Simple Data Sink 2", Collections.<String, String>emptyMap());
    SimpleDataSink      simpleDataSink3     = new SimpleDataSink("Simple Data Sink 3", Collections.<String, String>emptyMap());
    SimpleDataService   simpleDataService   = new SimpleDataService("Simple Data Service", Collections.<String, String>emptyMap());
    SimpleDataStore     simpleDataStore     = new SimpleDataStore("Simple Data Store", Collections.<String, String>emptyMap());

    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSource, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataProcessor, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSink1, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSink2, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSink3, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataService, null);
    dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataStore, null);

    ((ObservableDataProvider<String>) simpleDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataService.getDataConsumer(String.class));
    ((ObservableDataProvider<String>) simpleDataService.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataSink1.getDataConsumer(String.class));

    ((ObservableDataProvider<String>) simpleDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataProcessor.getDataConsumer(String.class));
    ((ObservableDataProvider<String>) simpleDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataSink2.getDataConsumer(String.class));

    ((ObservableDataProvider<String>) simpleDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataService.getDataConsumer(String.class));
    ((ObservableDataProvider<String>) simpleDataStore.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataSink3.getDataConsumer(String.class));

    assertEquals("Simple DataSource count", 0, simpleDataSource.getCount());
    assertEquals("Simple DataProcessor count", 0, simpleDataProcessor.getCount());
    assertEquals("Simple DataSink 1 count", 0, simpleDataSink1.getCount());
    assertEquals("Simple DataSink 2 count", 0, simpleDataSink2.getCount());
    assertEquals("Simple DataSink 3 count", 0, simpleDataSink3.getCount());
    assertEquals("Simple DataService count", 0, simpleDataService.getCount());
    assertEquals("Simple DataStore count", 0, simpleDataStore.getCount());

    simpleDataService.dummyImport("Import Bundle 1");
    simpleDataSource.dummyGetData("Data Bundle 1");
    simpleDataStore.dummyQueryReport("Report Bundle 1");
    simpleDataService.dummyImport("Import Bundle 2");
    simpleDataSource.dummyGetData("Data Bundle 2");
    simpleDataStore.dummyQueryReport("Report Bundle 2");

    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSource);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataProcessor);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSink1);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSink2);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSink3);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataService);
    dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataStore);

    assertArrayEquals("Unexpected history at DataSink 1", new String[]{"Import Bundle 1", "Import Bundle 2"}, simpleDataSink1.getSentHistory().toArray());
    assertArrayEquals("Unexpected history at DataSink 2", new String[]{"[Data Bundle 1]", "[Data Bundle 2]"}, simpleDataSink2.getSentHistory().toArray());
    assertArrayEquals("Unexpected history at DataSink 3", new String[]{"Report Bundle 1", "Report Bundle 2"}, simpleDataSink3.getSentHistory().toArray());

    assertEquals("Simple DataSource count", 2, simpleDataSource.getCount());
    assertEquals("Simple DataProcessor count", 2, simpleDataProcessor.getCount());
    assertEquals("Simple DataSink 1 count", 2, simpleDataSink1.getCount());
    assertEquals("Simple DataSink 2 count", 2, simpleDataSink2.getCount());
    assertEquals("Simple DataSink 3 count", 2, simpleDataSink3.getCount());
    assertEquals("Simple DataService count", 2, simpleDataService.getCount());
    assertEquals("Simple DataStore count", 2, simpleDataStore.getCount());
}
 
开发者ID:arjuna-technologies,项目名称:Simple_DataBroker_PlugIn,代码行数:66,代码来源:ChainingTest.java

示例15: createResourceMap

import com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl; //导入方法依赖的package包/类
@SuppressWarnings("rawtypes")
@Test
public void createResourceMap()
{
    try
    {
        CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");

        if (! ckanAPIProperties.isLoaded())
        {
            logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceMap', no propertiles file");
            return;
        }

        DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();

        String              name       = "AppendFileStoreCKANDataService";
        Map<String, String> properties = new HashMap<String, String>();
        properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
        properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
        properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());

        DummyDataSource                dummyDataSource                = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
        AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);

        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
        dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);

        ((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) appendFileStoreCKANDataService.getDataConsumer(Map.class));

        Map<String, Object> dataMap = new HashMap<String, Object>();
        dataMap.put("data", "Test Data, Test Text".getBytes());
        dataMap.put("filename", "filename");
        dataMap.put("resourcename", "resourcename");
        dataMap.put("resourceformat", "TEXT");
        dataMap.put("resourcedescription", "A description");
        dummyDataSource.sendData(dataMap);

        dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
        dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
    }
    catch (Throwable throwable)
    {
        logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceMap'", throwable);
        fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceMap': " + throwable);
    }
}
 
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:48,代码来源:AppendFileStoreCKANDataServiceTest.java


注:本文中的com.arjuna.databroker.data.core.DataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。