本文整理汇总了Java中com.arjuna.databroker.data.connector.ObserverDataConsumer类的典型用法代码示例。如果您正苦于以下问题:Java ObserverDataConsumer类的具体用法?Java ObserverDataConsumer怎么用?Java ObserverDataConsumer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ObserverDataConsumer类属于com.arjuna.databroker.data.connector包,在下文中一共展示了ObserverDataConsumer类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: simpleInvocation
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simpleInvocation()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
String name = "XSSF Row To JSON Data Processor";
Map<String, String> properties = Collections.emptyMap();
XSSFRowToJSONDataProcessor xssfRowToJSONDataProcessor = new XSSFRowToJSONDataProcessor(name, properties);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfRowToJSONDataProcessor, null);
File file = new File("Test01.xlsx");
((ObserverDataConsumer<File>) xssfRowToJSONDataProcessor.getDataConsumer(File.class)).consume(null, file);
dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfRowToJSONDataProcessor);
}
示例2: simplestConversion
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simplestConversion()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
DataProcessor shapeFileDataProcessor = new ShapeFileConverterDataProcessor("ShapeFile Converter Data Processor", Collections.<String, String>emptyMap());
DummyDataSink dummyDataSink = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), shapeFileDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);
((ObservableDataProvider<File>) dummyDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) shapeFileDataProcessor.getDataConsumer(File.class));
((ObservableDataProvider<String>) shapeFileDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) dummyDataSink.getDataConsumer(String.class));
File testFile = new File("/tmp/Gully_point/Gully_point.shp");
dummyDataSource.sendData(testFile);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(shapeFileDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
}
示例3: startupJob
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@PostConstruct
public void startupJob()
{
logger.log(Level.INFO, "StartupJob.startupJob");
String name = "Test Job";
Map<String, String> properties = new HashMap<String, String>();
properties.put(BatchDataProcessor.JOBID_PROPERTYNAME, "testJob");
BatchDataProcessor batchDataProcessor = new BatchDataProcessor(name, properties);
ObserverDataConsumer<Object> dataConsumer = (ObserverDataConsumer<Object>) batchDataProcessor.getDataConsumer(Object.class);
dataConsumer.consume(null, "Test Data");
}
示例4: BatchDataProvider
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
public BatchDataProvider(DataFlowNode dataFlowNode)
{
logger.log(Level.FINE, "BatchDataProvider.BatchDataProvider");
_id = UUID.randomUUID().toString();
_dataFlowNode = dataFlowNode;
_dataConsumers = new LinkedList<ObserverDataConsumer<Object>>();
}
示例5: getDataConsumers
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public Collection<ObserverDataConsumer<Object>> getDataConsumers()
{
logger.log(Level.FINE, "BatchDataProvider.getDataConsumers");
return Collections.unmodifiableList(_dataConsumers);
}
示例6: addDataConsumer
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public void addDataConsumer(ObserverDataConsumer<Object> dataConsumer)
{
logger.log(Level.FINE, "BatchDataProvider.addDataConsumer");
_dataConsumers.add(dataConsumer);
}
示例7: removeDataConsumer
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public void removeDataConsumer(ObserverDataConsumer<Object> dataConsumer)
{
logger.log(Level.FINE, "BatchDataProvider.removeDataConsumer");
_dataConsumers.remove(dataConsumer);
}
示例8: produce
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public void produce(Object data)
{
logger.log(Level.FINE, "BatchDataProvider.produce");
for (ObserverDataConsumer<Object> dataConsumer: _dataConsumers)
dataConsumer.consume(this, data);
}
示例9: getDataFlowLinks
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
private List<DataFlowNodeLinkDTO> getDataFlowLinks(DataProvider<?> dataProducer)
{
List<DataFlowNodeLinkDTO> dataFlowLinks = new LinkedList<DataFlowNodeLinkDTO>();
if (dataProducer instanceof ObservableDataProvider<?>)
{
ObservableDataProvider<?> observableDataProvider = (ObservableDataProvider<?>) dataProducer;
for (ObserverDataConsumer<?> dataConsumer: observableDataProvider.getDataConsumers())
dataFlowLinks.add(new DataFlowNodeLinkDTO(dataProducer.getDataFlowNode().getName(), dataConsumer.getDataFlowNode().getName()));
}
return dataFlowLinks;
}
示例10: DefaultObservableDataProvider
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
public DefaultObservableDataProvider(DataFlowNode dataFlowNode)
{
logger.log(Level.FINE, "DefaultObservableDataProvider: " + dataFlowNode);
_dataFlowNode = dataFlowNode;
_dataConsumers = new LinkedList<ObserverDataConsumer<T>>();
}
示例11: DummyObservableDataProvider
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
public DummyObservableDataProvider(DataFlowNode dataFlowNode)
{
logger.log(Level.FINE, "DefaultObservableDataProvider: " + dataFlowNode);
_dataFlowNode = dataFlowNode;
_dataConsumers = new LinkedList<ObserverDataConsumer<T>>();
}
开发者ID:arjuna-technologies,项目名称:TestSupport_DataBroker_Utilities,代码行数:8,代码来源:DummyObservableDataProvider.java
示例12: simplestChain
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simplestChain()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
SimpleDataSource simpleDataSource = new SimpleDataSource("Simple Data Source", Collections.<String, String>emptyMap());
SimpleDataProcessor simpleDataProcessor = new SimpleDataProcessor("Simple Data Processor", Collections.<String, String>emptyMap());
SimpleDataSink simpleDataSink = new SimpleDataSink("Simple Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSink, null);
((ObservableDataProvider<String>) simpleDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataProcessor.getDataConsumer(String.class));
((ObservableDataProvider<String>) simpleDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataSink.getDataConsumer(String.class));
assertEquals("Simple DataSource count", 0, simpleDataSource.getCount());
assertEquals("Simple DataProcessor count", 0, simpleDataProcessor.getCount());
assertEquals("Simple DataSink count", 0, simpleDataSink.getCount());
simpleDataSource.dummyGetData("Data Bundle 1");
simpleDataSource.dummyGetData("Data Bundle 2");
simpleDataSource.dummyGetData("Data Bundle 3");
simpleDataSource.dummyGetData("Data Bundle 4");
dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSink);
assertArrayEquals("Unexpected history at DataSink", new String[]{"[Data Bundle 1]", "[Data Bundle 2]", "[Data Bundle 3]", "[Data Bundle 4]"}, simpleDataSink.getSentHistory().toArray());
assertEquals("Simple DataSource count", 4, simpleDataSource.getCount());
assertEquals("Simple DataProcessor count", 4, simpleDataProcessor.getCount());
assertEquals("Simple DataSink count", 4, simpleDataSink.getCount());
}
示例13: createResourceAsString
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void createResourceAsString()
{
try
{
CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");
if (! ckanAPIProperties.isLoaded())
{
logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceAsString', no propertiles file");
return;
}
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
String name = "AppendFileStoreCKANDataService";
Map<String, String> properties = new HashMap<String, String>();
properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);
((ObservableDataProvider<String>) dummyDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) appendFileStoreCKANDataService.getDataConsumer(String.class));
dummyDataSource.sendData("Test Data, Test Text");
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
}
catch (Throwable throwable)
{
logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsString'", throwable);
fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsString': " + throwable);
}
}
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:41,代码来源:AppendFileStoreCKANDataServiceTest.java
示例14: createResourceAsBytes
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void createResourceAsBytes()
{
try
{
CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");
if (! ckanAPIProperties.isLoaded())
{
logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes', no propertiles file");
return;
}
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
String name = "AppendFileStoreCKANDataService";
Map<String, String> properties = new HashMap<String, String>();
properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);
((ObservableDataProvider<byte[]>) dummyDataSource.getDataProvider(byte[].class)).addDataConsumer((ObserverDataConsumer<byte[]>) appendFileStoreCKANDataService.getDataConsumer(byte[].class));
dummyDataSource.sendData("Test Data, Test Text".getBytes());
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
}
catch (Throwable throwable)
{
logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes'", throwable);
fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes': " + throwable);
}
}
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:41,代码来源:AppendFileStoreCKANDataServiceTest.java
示例15: simplestConversion
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
@Ignore
public void simplestConversion()
{
ShapeFile2CSVConverterDataProcessor shapeFileDataProcessor = new ShapeFile2CSVConverterDataProcessor("ShapeFile Converter Data Processor", Collections.<String, String>emptyMap());
ObserverDataConsumer<File> dataConsumer = (ObserverDataConsumer<File>) shapeFileDataProcessor.getDataConsumer(File.class);
File testFile = new File("/tmp/test.shp");
dataConsumer.consume(null, testFile);
}