本文整理汇总了Java中com.arjuna.databroker.data.connector.ObserverDataConsumer类的典型用法代码示例。如果您正苦于以下问题:Java ObserverDataConsumer类的具体用法?Java ObserverDataConsumer怎么用?Java ObserverDataConsumer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ObserverDataConsumer类属于com.arjuna.databroker.data.connector包,在下文中一共展示了ObserverDataConsumer类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Java代码示例。
示例1: simpleInvocation
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simpleInvocation()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
String name = "XSSF Row To JSON Data Processor";
Map<String, String> properties = Collections.emptyMap();
XSSFRowToJSONDataProcessor xssfRowToJSONDataProcessor = new XSSFRowToJSONDataProcessor(name, properties);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfRowToJSONDataProcessor, null);
File file = new File("Test01.xlsx");
((ObserverDataConsumer<File>) xssfRowToJSONDataProcessor.getDataConsumer(File.class)).consume(null, file);
dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfRowToJSONDataProcessor);
}
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:18,代码来源:SimpleRowToJSONTest.java
示例2: simplestConversion
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simplestConversion()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
DataProcessor shapeFileDataProcessor = new ShapeFileConverterDataProcessor("ShapeFile Converter Data Processor", Collections.<String, String>emptyMap());
DummyDataSink dummyDataSink = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), shapeFileDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);
((ObservableDataProvider<File>) dummyDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) shapeFileDataProcessor.getDataConsumer(File.class));
((ObservableDataProvider<String>) shapeFileDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) dummyDataSink.getDataConsumer(String.class));
File testFile = new File("/tmp/Gully_point/Gully_point.shp");
dummyDataSource.sendData(testFile);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(shapeFileDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
}
开发者ID:arjuna-technologies,项目名称:GeoAPI_DataBroker_PlugIn,代码行数:25,代码来源:ConverterTest.java
示例3: startupJob
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@PostConstruct
public void startupJob()
{
logger.log(Level.INFO, "StartupJob.startupJob");
String name = "Test Job";
Map<String, String> properties = new HashMap<String, String>();
properties.put(BatchDataProcessor.JOBID_PROPERTYNAME, "testJob");
BatchDataProcessor batchDataProcessor = new BatchDataProcessor(name, properties);
ObserverDataConsumer<Object> dataConsumer = (ObserverDataConsumer<Object>) batchDataProcessor.getDataConsumer(Object.class);
dataConsumer.consume(null, "Test Data");
}
开发者ID:arjuna-technologies,项目名称:JEEBatch_DataBroker_Support,代码行数:14,代码来源:StartupJob.java
示例4: BatchDataProvider
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
public BatchDataProvider(DataFlowNode dataFlowNode)
{
logger.log(Level.FINE, "BatchDataProvider.BatchDataProvider");
_id = UUID.randomUUID().toString();
_dataFlowNode = dataFlowNode;
_dataConsumers = new LinkedList<ObserverDataConsumer<Object>>();
}
开发者ID:arjuna-technologies,项目名称:JEEBatch_DataBroker_Support,代码行数:9,代码来源:BatchDataProvider.java
示例5: getDataConsumers
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public Collection<ObserverDataConsumer<Object>> getDataConsumers()
{
logger.log(Level.FINE, "BatchDataProvider.getDataConsumers");
return Collections.unmodifiableList(_dataConsumers);
}
开发者ID:arjuna-technologies,项目名称:JEEBatch_DataBroker_Support,代码行数:8,代码来源:BatchDataProvider.java
示例6: addDataConsumer
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public void addDataConsumer(ObserverDataConsumer<Object> dataConsumer)
{
logger.log(Level.FINE, "BatchDataProvider.addDataConsumer");
_dataConsumers.add(dataConsumer);
}
开发者ID:arjuna-technologies,项目名称:JEEBatch_DataBroker_Support,代码行数:8,代码来源:BatchDataProvider.java
示例7: removeDataConsumer
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public void removeDataConsumer(ObserverDataConsumer<Object> dataConsumer)
{
logger.log(Level.FINE, "BatchDataProvider.removeDataConsumer");
_dataConsumers.remove(dataConsumer);
}
开发者ID:arjuna-technologies,项目名称:JEEBatch_DataBroker_Support,代码行数:8,代码来源:BatchDataProvider.java
示例8: produce
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Override
public void produce(Object data)
{
logger.log(Level.FINE, "BatchDataProvider.produce");
for (ObserverDataConsumer<Object> dataConsumer: _dataConsumers)
dataConsumer.consume(this, data);
}
开发者ID:arjuna-technologies,项目名称:JEEBatch_DataBroker_Support,代码行数:9,代码来源:BatchDataProvider.java
示例9: getDataFlowLinks
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
private List<DataFlowNodeLinkDTO> getDataFlowLinks(DataProvider<?> dataProducer)
{
List<DataFlowNodeLinkDTO> dataFlowLinks = new LinkedList<DataFlowNodeLinkDTO>();
if (dataProducer instanceof ObservableDataProvider<?>)
{
ObservableDataProvider<?> observableDataProvider = (ObservableDataProvider<?>) dataProducer;
for (ObserverDataConsumer<?> dataConsumer: observableDataProvider.getDataConsumers())
dataFlowLinks.add(new DataFlowNodeLinkDTO(dataProducer.getDataFlowNode().getName(), dataConsumer.getDataFlowNode().getName()));
}
return dataFlowLinks;
}
开发者ID:RISBIC,项目名称:DataBroker,代码行数:14,代码来源:DataFlowWS.java
示例10: DefaultObservableDataProvider
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
public DefaultObservableDataProvider(DataFlowNode dataFlowNode)
{
logger.log(Level.FINE, "DefaultObservableDataProvider: " + dataFlowNode);
_dataFlowNode = dataFlowNode;
_dataConsumers = new LinkedList<ObserverDataConsumer<T>>();
}
开发者ID:RISBIC,项目名称:DataBroker,代码行数:8,代码来源:DefaultObservableDataProvider.java
示例11: DummyObservableDataProvider
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
public DummyObservableDataProvider(DataFlowNode dataFlowNode)
{
logger.log(Level.FINE, "DefaultObservableDataProvider: " + dataFlowNode);
_dataFlowNode = dataFlowNode;
_dataConsumers = new LinkedList<ObserverDataConsumer<T>>();
}
开发者ID:arjuna-technologies,项目名称:TestSupport_DataBroker_Utilities,代码行数:8,代码来源:DummyObservableDataProvider.java
示例12: simplestChain
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simplestChain()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
SimpleDataSource simpleDataSource = new SimpleDataSource("Simple Data Source", Collections.<String, String>emptyMap());
SimpleDataProcessor simpleDataProcessor = new SimpleDataProcessor("Simple Data Processor", Collections.<String, String>emptyMap());
SimpleDataSink simpleDataSink = new SimpleDataSink("Simple Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), simpleDataSink, null);
((ObservableDataProvider<String>) simpleDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataProcessor.getDataConsumer(String.class));
((ObservableDataProvider<String>) simpleDataProcessor.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) simpleDataSink.getDataConsumer(String.class));
assertEquals("Simple DataSource count", 0, simpleDataSource.getCount());
assertEquals("Simple DataProcessor count", 0, simpleDataProcessor.getCount());
assertEquals("Simple DataSink count", 0, simpleDataSink.getCount());
simpleDataSource.dummyGetData("Data Bundle 1");
simpleDataSource.dummyGetData("Data Bundle 2");
simpleDataSource.dummyGetData("Data Bundle 3");
simpleDataSource.dummyGetData("Data Bundle 4");
dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(simpleDataSink);
assertArrayEquals("Unexpected history at DataSink", new String[]{"[Data Bundle 1]", "[Data Bundle 2]", "[Data Bundle 3]", "[Data Bundle 4]"}, simpleDataSink.getSentHistory().toArray());
assertEquals("Simple DataSource count", 4, simpleDataSource.getCount());
assertEquals("Simple DataProcessor count", 4, simpleDataProcessor.getCount());
assertEquals("Simple DataSink count", 4, simpleDataSink.getCount());
}
开发者ID:arjuna-technologies,项目名称:Simple_DataBroker_PlugIn,代码行数:36,代码来源:ChainingTest.java
示例13: createResourceAsString
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void createResourceAsString()
{
try
{
CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");
if (! ckanAPIProperties.isLoaded())
{
logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceAsString', no propertiles file");
return;
}
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
String name = "AppendFileStoreCKANDataService";
Map<String, String> properties = new HashMap<String, String>();
properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);
((ObservableDataProvider<String>) dummyDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) appendFileStoreCKANDataService.getDataConsumer(String.class));
dummyDataSource.sendData("Test Data, Test Text");
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
}
catch (Throwable throwable)
{
logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsString'", throwable);
fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsString': " + throwable);
}
}
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:41,代码来源:AppendFileStoreCKANDataServiceTest.java
示例14: createResourceAsBytes
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void createResourceAsBytes()
{
try
{
CKANAPIProperties ckanAPIProperties = new CKANAPIProperties("ckanapi.properties");
if (! ckanAPIProperties.isLoaded())
{
logger.log(Level.INFO, "SKIPPING TEST 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes', no propertiles file");
return;
}
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
String name = "AppendFileStoreCKANDataService";
Map<String, String> properties = new HashMap<String, String>();
properties.put(AppendFileStoreCKANDataService.CKANROOTURL_PROPERTYNAME, ckanAPIProperties.getCKANRootURL());
properties.put(AppendFileStoreCKANDataService.PACKAGEID_PROPERTYNAME, ckanAPIProperties.getPackageId());
properties.put(AppendFileStoreCKANDataService.APIKEY_PROPERTYNAME, ckanAPIProperties.getAPIKey());
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
AppendFileStoreCKANDataService appendFileStoreCKANDataService = new AppendFileStoreCKANDataService(name, properties);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), appendFileStoreCKANDataService, null);
((ObservableDataProvider<byte[]>) dummyDataSource.getDataProvider(byte[].class)).addDataConsumer((ObserverDataConsumer<byte[]>) appendFileStoreCKANDataService.getDataConsumer(byte[].class));
dummyDataSource.sendData("Test Data, Test Text".getBytes());
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(appendFileStoreCKANDataService);
}
catch (Throwable throwable)
{
logger.log(Level.WARNING, "Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes'", throwable);
fail("Problem in 'AppendFileStoreCKANDataServiceTest.createResourceAsBytes': " + throwable);
}
}
开发者ID:arjuna-technologies,项目名称:CKAN_DataBroker_PlugIn,代码行数:41,代码来源:AppendFileStoreCKANDataServiceTest.java
示例15: simplestConversion
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
@Ignore
public void simplestConversion()
{
ShapeFile2CSVConverterDataProcessor shapeFileDataProcessor = new ShapeFile2CSVConverterDataProcessor("ShapeFile Converter Data Processor", Collections.<String, String>emptyMap());
ObserverDataConsumer<File> dataConsumer = (ObserverDataConsumer<File>) shapeFileDataProcessor.getDataConsumer(File.class);
File testFile = new File("/tmp/test.shp");
dataConsumer.consume(null, testFile);
}
开发者ID:arjuna-technologies,项目名称:GeoTools_DataBroker_PlugIn,代码行数:12,代码来源:ConverterTest.java
示例16: simpleInvocation
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void simpleInvocation()
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
AuthenticationProperties authenticationProperties = new AuthenticationProperties("authentication.properties");
String name = "Twitter Data Source";
Map<String, String> properties = new HashMap<String, String>();
properties.put(TwitterDataSource.TWITTER_CONSUMERKEY_PROPERTYNAME, authenticationProperties.getConsumerKey());
properties.put(TwitterDataSource.TWITTER_CONSUMERSECRET_PROPERTYNAME, authenticationProperties.getConsumerSecret());
properties.put(TwitterDataSource.TWITTER_TOKEN_PROPERTYNAME, authenticationProperties.getToken());
properties.put(TwitterDataSource.TWITTER_TOKENSECRET_PROPERTYNAME, authenticationProperties.getTokenSecret());
properties.put(TwitterDataSource.TWITTER_TRACKTERM_PROPERTYNAME, "newcastle");
properties.put(TwitterDataSource.POLLINTERVAL_PROPERTYNAME, "5");
TwitterDataSource twitterDataSource = new TwitterDataSource(name, properties);
DummyDataProcessor dummyDataProcessor = new DummyDataProcessor("Dummy Data Processor", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), twitterDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataProcessor, null);
((ObservableDataProvider<String>) twitterDataSource.getDataProvider(String.class)).addDataConsumer((ObserverDataConsumer<String>) dummyDataProcessor.getDataConsumer(String.class));
try
{
Thread.sleep(60000);
}
catch (InterruptedException interruptedException)
{
fail("Interrupted during sleep");
}
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataProcessor);
assertTrue("Didn't receive any tweets", dummyDataProcessor.receivedData().size() > 0);
}
开发者ID:arjuna-technologies,项目名称:Twitter_DataBroker_PlugIn,代码行数:38,代码来源:SimpleTest.java
示例17: simpleDecryptInvocation
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@SuppressWarnings("rawtypes")
@Test
public void simpleDecryptInvocation()
{
try
{
int character = 0;
int index = 0;
File spreadsheetFile = new File("Test02.xlsx");
byte[] spreadsheetData = new byte[(int) spreadsheetFile.length()];
InputStream spreadsheetInputStream = new FileInputStream(spreadsheetFile);
index = 0;
character = spreadsheetInputStream.read();
while (character != -1)
{
spreadsheetData[index] = (byte) character;
character = spreadsheetInputStream.read();
index++;
}
spreadsheetInputStream.close();
File csvFile = new File("Test01_s.csv");
byte[] csvData = new byte[(int) csvFile.length()];
InputStream csvInputStream = new FileInputStream(csvFile);
index = 0;
character = csvInputStream.read();
while (character != -1)
{
csvData[index] = (byte) character;
character = csvInputStream.read();
index++;
}
csvInputStream.close();
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
// DataFlowNodeLinkLifeCycleControl dataFlowNodeLinkLifeCycleControl = new TestJEEDataFlowNodeLinkLifeCycleControl();
String name = "XSSF Stream Sheet To CSV Data Processor";
Map<String, String> properties = Collections.emptyMap();
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
XSSFStreamSheetToCSVDataProcessor xssfStreamSheetToCSVDataProcessor = new XSSFStreamSheetToCSVDataProcessor(name, properties);
DummyDataSink dummyDataSink = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfStreamSheetToCSVDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);
((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) xssfStreamSheetToCSVDataProcessor.getDataConsumer(Map.class));
((ObservableDataProvider<Map>) xssfStreamSheetToCSVDataProcessor.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) dummyDataSink.getDataConsumer(Map.class));
Map<String, Object> inputMap = new HashMap<String, Object>();
inputMap.put("filename", "Test.xslx");
inputMap.put("data", spreadsheetData);
inputMap.put("password", "test");
dummyDataSource.sendData(inputMap);
List<Object> receivedData = dummyDataSink.receivedData();
assertEquals("Unexpected received data", 1, receivedData.size());
Map<String, Object> outputMap = (Map<String, Object>) receivedData.get(0);
assertNotNull("Unexpected null received data", outputMap);
assertEquals("Unexpected received map size", 3, outputMap.size());
assertNotNull("Unexpected null 'filename' entry", outputMap.get("filename"));
assertNotNull("Unexpected null 'data' entry", outputMap.get("data"));
assertNotNull("Unexpected null 'resourceformat' entry", outputMap.get("resourceformat"));
assertEquals("Unexpected value of 'filename' entry", "Test_1.csv", outputMap.get("filename"));
assertArrayEquals("Unexpected value of 'data' entry", csvData, (byte[]) outputMap.get("data"));
assertEquals("Unexpected value of 'resourceformat' entry", "csv", outputMap.get("resourceformat"));
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfStreamSheetToCSVDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
}
catch (IOException ioException)
{
fail("IO Exception");
}
}
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:82,代码来源:SimpleStreamEncryptedSheetToCSVTest.java
示例18: simpleInvocation
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@SuppressWarnings("rawtypes")
@Test
public void simpleInvocation()
{
try
{
int character = 0;
int index = 0;
File spreadsheetFile = new File("Test01.xlsx");
byte[] spreadsheetData = new byte[(int) spreadsheetFile.length()];
InputStream spreadsheetInputStream = new FileInputStream(spreadsheetFile);
index = 0;
character = spreadsheetInputStream.read();
while (character != -1)
{
spreadsheetData[index] = (byte) character;
character = spreadsheetInputStream.read();
index++;
}
spreadsheetInputStream.close();
File csvFile = new File("Test01_s.csv");
byte[] csvData = new byte[(int) csvFile.length()];
InputStream csvInputStream = new FileInputStream(csvFile);
index = 0;
character = csvInputStream.read();
while (character != -1)
{
csvData[index] = (byte) character;
character = csvInputStream.read();
index++;
}
csvInputStream.close();
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
// DataFlowNodeLinkLifeCycleControl dataFlowNodeLinkLifeCycleControl = new TestJEEDataFlowNodeLinkLifeCycleControl();
String name = "XSSF Stream Sheet To CSV Data Processor";
Map<String, String> properties = Collections.emptyMap();
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
XSSFStreamSheetToCSVDataProcessor xssfStreamSheetToCSVDataProcessor = new XSSFStreamSheetToCSVDataProcessor(name, properties);
DummyDataSink dummyDataSink = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfStreamSheetToCSVDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);
((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) xssfStreamSheetToCSVDataProcessor.getDataConsumer(Map.class));
((ObservableDataProvider<Map>) xssfStreamSheetToCSVDataProcessor.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) dummyDataSink.getDataConsumer(Map.class));
Map<String, Object> inputMap = new HashMap<String, Object>();
inputMap.put("filename", "Test.xslx");
inputMap.put("data", spreadsheetData);
dummyDataSource.sendData(inputMap);
List<Object> receivedData = dummyDataSink.receivedData();
assertEquals("Unexpected received data", 1, receivedData.size());
Map<String, Object> outputMap = (Map<String, Object>) receivedData.get(0);
assertNotNull("Unexpected null received data", outputMap);
assertEquals("Unexpected received map size", 3, outputMap.size());
assertNotNull("Unexpected null 'filename' entry", outputMap.get("filename"));
assertNotNull("Unexpected null 'data' entry", outputMap.get("data"));
assertNotNull("Unexpected null 'resourceformat' entry", outputMap.get("resourceformat"));
assertEquals("Unexpected value of 'filename' entry", "Test_1.csv", outputMap.get("filename"));
assertArrayEquals("Unexpected value of 'data' entry", csvData, (byte[]) outputMap.get("data"));
assertEquals("Unexpected value of 'resourceformat' entry", "csv", outputMap.get("resourceformat"));
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfStreamSheetToCSVDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
}
catch (IOException ioException)
{
fail("IO Exception");
}
}
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:81,代码来源:SimpleStreamSheetToCSVTest.java
示例19: simpleInvocation
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@SuppressWarnings("rawtypes")
@Test
public void simpleInvocation()
{
try
{
File spreadsheetFile = new File("Test01.xlsx");
byte[] spreadsheetData = new byte[(int) spreadsheetFile.length()];
InputStream spreadsheetInputStream = new FileInputStream(spreadsheetFile);
int character = spreadsheetInputStream.read();
int index = 0;
while (character != -1)
{
spreadsheetData[index] = (byte) character;
character = spreadsheetInputStream.read();
index++;
}
spreadsheetInputStream.close();
File csvFile = new File("Test01_o.csv");
byte[] csvData = new byte[(int) csvFile.length()];
InputStream csvInputStream = new FileInputStream(csvFile);
index = 0;
character = csvInputStream.read();
while (character != -1)
{
csvData[index] = (byte) character;
character = csvInputStream.read();
index++;
}
csvInputStream.close();
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
// DataFlowNodeLinkLifeCycleControl dataFlowNodeLinkLifeCycleControl = new TestJEEDataFlowNodeLinkLifeCycleControl();
String name = "XSSF Sheet To CSV Data Processor";
Map<String, String> properties = Collections.emptyMap();
DummyDataSource dummyDataSource = new DummyDataSource("Dummy Data Source", Collections.<String, String>emptyMap());
XSSFSheetToCSVDataProcessor xssfSheetToCSVDataProcessor = new XSSFSheetToCSVDataProcessor(name, properties);
DummyDataSink dummyDataSink = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), xssfSheetToCSVDataProcessor, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);
((ObservableDataProvider<Map>) dummyDataSource.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) xssfSheetToCSVDataProcessor.getDataConsumer(Map.class));
((ObservableDataProvider<Map>) xssfSheetToCSVDataProcessor.getDataProvider(Map.class)).addDataConsumer((ObserverDataConsumer<Map>) dummyDataSink.getDataConsumer(Map.class));
Map<String, Object> inputMap = new HashMap<String, Object>();
inputMap.put("filename", "Test.xslx");
inputMap.put("data", spreadsheetData);
dummyDataSource.sendData(inputMap);
List<Object> receivedData = dummyDataSink.receivedData();
assertEquals("Unexpected received data", 1, receivedData.size());
Map<String, Object> outputMap = (Map<String, Object>) receivedData.get(0);
assertNotNull("Unexpected null received data", outputMap);
assertEquals("Unexpected received map size", 3, outputMap.size());
assertNotNull("Unexpected null 'filename' entry", outputMap.get("filename"));
assertNotNull("Unexpected null 'data' entry", outputMap.get("data"));
assertNotNull("Unexpected null 'resourceformat' entry", outputMap.get("resourceformat"));
assertEquals("Unexpected value of 'filename' entry", "Test_Sheet1.csv", outputMap.get("filename"));
assertArrayEquals("Unexpected value of 'data' entry", csvData, (byte[]) outputMap.get("data"));
assertEquals("Unexpected value of 'resourceformat' entry", "csv", outputMap.get("resourceformat"));
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(xssfSheetToCSVDataProcessor);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
}
catch (IOException ioException)
{
fail("IO Exception");
}
}
开发者ID:arjuna-technologies,项目名称:Apache-POI_DataBroker_PlugIn,代码行数:78,代码来源:SimpleSheetToCSVTest.java
示例20: fileScanner01
import com.arjuna.databroker.data.connector.ObserverDataConsumer; //导入依赖的package包/类
@Test
public void fileScanner01()
{
try
{
DataFlowNodeLifeCycleControl dataFlowNodeLifeCycleControl = new TestJEEDataFlowNodeLifeCycleControl();
File testDirectory = createTemporaryDirectory("Scanner01");
String name = "Polling File Change Data Source";
Map<String, String> properties = new HashMap<String, String>();
properties.put(PollingFileChangeDataSource.FILENAME_PROPERYNAME, testDirectory.toString() + File.separator + "Test02");
properties.put(PollingFileChangeDataSource.POLLINTERVAL_PROPERYNAME, "1000");
PollingFileChangeDataSource pollingFileChangeDataSource = new PollingFileChangeDataSource(name, properties);
DummyDataSink dummyDataSink = new DummyDataSink("Dummy Data Sink", Collections.<String, String>emptyMap());
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), pollingFileChangeDataSource, null);
dataFlowNodeLifeCycleControl.completeCreationAndActivateDataFlowNode(UUID.randomUUID().toString(), dummyDataSink, null);
((ObservableDataProvider<File>) pollingFileChangeDataSource.getDataProvider(File.class)).addDataConsumer((ObserverDataConsumer<File>) dummyDataSink.getDataConsumer(File.class));
Thread.sleep(1000);
File testFile1 = new File(testDirectory, "Test01");
testFile1.createNewFile();
Thread.sleep(1000);
File testFile2 = new File(testDirectory, "Test02");
testFile2.createNewFile();
Thread.sleep(1000);
File testFile3 = new File(testDirectory, "Test03");
testFile3.createNewFile();
Thread.sleep(1000);
dataFlowNodeLifeCycleControl.removeDataFlowNode(pollingFileChangeDataSource);
dataFlowNodeLifeCycleControl.removeDataFlowNode(dummyDataSink);
assertEquals("Incorrect message number", 1, dummyDataSink.receivedData().size());
}
catch (Throwable throwable)
{
logger.log(Level.WARNING, "Problem in 'fileScanner01'", throwable);
fail("Problem in 'fileScanner01': " + throwable);
}
}
开发者ID:arjuna-technologies,项目名称:FileSystem_DataBroker_PlugIn,代码行数:49,代码来源:PollingFileChangeDataSourceTest.java
注:本文中的com.arjuna.databroker.data.connector.ObserverDataConsumer类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论