本文整理汇总了Java中org.apache.metamodel.data.CachingDataSetHeader类的典型用法代码示例。如果您正苦于以下问题:Java CachingDataSetHeader类的具体用法?Java CachingDataSetHeader怎么用?Java CachingDataSetHeader使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
CachingDataSetHeader类属于org.apache.metamodel.data包,在下文中一共展示了CachingDataSetHeader类的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: materializeMainSchemaTable
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
@Override
protected DataSet materializeMainSchemaTable(Table table, List<Column> columns, int maxRows) {
if (table != _table) {
throw new IllegalArgumentException("Unknown table: " + table);
}
List<Row> rows = new ArrayList<Row>();
List<SelectItem> items = columns.stream().map(SelectItem::new).collect(Collectors.toList());
CachingDataSetHeader header = new CachingDataSetHeader(items);
for (final Object[] values : _values) {
Object[] rowValues = new Object[columns.size()];
for (int i = 0; i < columns.size(); i++) {
int columnNumber = columns.get(i).getColumnNumber();
rowValues[i] = values[columnNumber];
}
rows.add(new DefaultRow(header, rowValues));
}
if (rows.isEmpty()) {
return new EmptyDataSet(items);
}
return new InMemoryDataSet(header, rows);
}
示例2: testAndFilterItem
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
public void testAndFilterItem() throws Exception {
Column col1 = new MutableColumn("Col1", ColumnType.VARCHAR);
SelectItem s1 = new SelectItem(col1);
FilterItem c1 = new FilterItem(s1, OperatorType.LIKE, "foo%");
FilterItem c2 = new FilterItem(s1, OperatorType.LIKE, "%bar");
FilterItem c3 = new FilterItem(s1, OperatorType.DIFFERENT_FROM, "foobar");
FilterItem filter = new FilterItem(LogicalOperator.AND, c1, c2, c3);
assertEquals("(Col1 LIKE 'foo%' AND Col1 LIKE '%bar' AND Col1 <> 'foobar')", filter.toString());
CachingDataSetHeader header = new CachingDataSetHeader(Lists.newArrayList(s1));
assertTrue(filter.evaluate(new DefaultRow(header, new Object[] { "foo bar" })));
assertTrue(filter.evaluate(new DefaultRow(header, new Object[] { "foosenbar" })));
assertFalse(filter.evaluate(new DefaultRow(header, new Object[] { "foo" })));
assertFalse(filter.evaluate(new DefaultRow(header, new Object[] { "hello world" })));
assertFalse(filter.evaluate(new DefaultRow(header, new Object[] { "foobar" })));
}
示例3: materializeMainSchemaTable
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
@Override
protected DataSet materializeMainSchemaTable(Table table, List<Column> columns, int maxRows) {
if (_tableName.equals(table.getName())) {
final List<SelectItem> allSelectItems = table.getColumns().stream().map(SelectItem::new).collect(Collectors.toList());
final DataSetHeader header = new CachingDataSetHeader(allSelectItems);
final List<Row> data = new ArrayList<Row>();
data.add(new DefaultRow(header, new Object[] { "1", "hello", "world" }, null));
data.add(new DefaultRow(header, new Object[] { "2", _value, "world" }, null));
data.add(new DefaultRow(header, new Object[] { "3", "hi", _value }, null));
data.add(new DefaultRow(header, new Object[] { "4", "yo", "world" }, null));
final DataSet sourceDataSet = new InMemoryDataSet(header, data);
final List<SelectItem> columnSelectItems = columns.stream().map(SelectItem::new).collect(Collectors.toList());
final DataSet selectionDataSet = MetaModelHelper.getSelection(columnSelectItems, sourceDataSet);
return selectionDataSet;
} else if ("an_empty_table".equals(table.getName())) {
return new EmptyDataSet(columns.stream().map(SelectItem::new).collect(Collectors.toList()));
}
throw new UnsupportedOperationException();
}
示例4: OutputDataStreamRowCollector
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
public OutputDataStreamRowCollector(final RowProcessingPublisher publisher, final List<SelectItem> selectItems,
final ConsumeRowHandler consumeRowHandler) {
_publisher = publisher;
_dataSetHeader = new CachingDataSetHeader(selectItems);
_consumeRowHandler = consumeRowHandler;
_rowCounter = new AtomicInteger();
}
示例5: materializeMainSchemaTable
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
@Override
public DataSet materializeMainSchemaTable(Table table, List<Column> columns, int maxRows) {
try {
final com.healthmarketscience.jackcess.Table mdbTable = getDatabase().getTable(table.getName());
final List<SelectItem> selectItems = columns.stream().map(c -> new SelectItem(c)).collect(Collectors.toList());
final DataSetHeader header = new CachingDataSetHeader(selectItems);
int rowNum = 0;
final List<Row> data = new LinkedList<Row>();
final Iterator<com.healthmarketscience.jackcess.Row> it = mdbTable.iterator();
while (it.hasNext() && (maxRows < 0 || rowNum < maxRows)) {
rowNum++;
final com.healthmarketscience.jackcess.Row valueMap = it.next();
final Object[] values = new Object[columns.size()];
for (int j = 0; j < columns.size(); j++) {
values[j] = valueMap.get(columns.get(j).getName());
}
data.add(new DefaultRow(header, values));
}
return new InMemoryDataSet(header, data);
} catch (Exception e) {
throw new MetaModelException(e);
}
}
示例6: materializeMainSchemaTable
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
@Override
protected DataSet materializeMainSchemaTable(Table table, List<Column> columns, int maxRows) {
final DocumentConverter documentConverter = _schemaBuilder.getDocumentConverter(table);
final List<SelectItem> selectItems = columns.stream().map(SelectItem::new).collect(Collectors.toList());
final DataSetHeader header = new CachingDataSetHeader(selectItems);
final DocumentSource documentSource = getDocumentSourceForTable(table.getName());
DataSet dataSet = new DocumentSourceDataSet(header, documentSource, documentConverter);
if (maxRows > 0) {
dataSet = new MaxRowsDataSet(dataSet, maxRows);
}
return dataSet;
}
示例7: materializeMainSchemaTable
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
@Override
public DataSet materializeMainSchemaTable(Table table, List<Column> columnList, int maxRows) {
loadSchema();
List<Object[]> tableData = _tableData.get(table.getName());
if (tableData == null) {
throw new IllegalStateException("No such table name: '" + table.getName() + "'. Valid table names are: "
+ _tableData.keySet());
}
final List<SelectItem> selectItems = columnList.stream().map(SelectItem::new).collect(Collectors.toList());
final DataSetHeader header = new CachingDataSetHeader(selectItems);
final Column[] columns = columnList.toArray(new Column[columnList.size()]);
final List<Row> resultData = new ArrayList<Row>();
for (Object[] tableDataRow : tableData) {
if (maxRows == 0) {
break;
}
maxRows--;
Object[] dataValues = new Object[columns.length];
for (int i = 0; i < columns.length; i++) {
Column column = columns[i];
int columnNumber = column.getColumnNumber();
// Some rows may not contain values for all columns
// (attributes)
if (columnNumber < tableDataRow.length) {
dataValues[i] = tableDataRow[columnNumber];
} else {
dataValues[i] = null;
}
}
resultData.add(new DefaultRow(header, dataValues));
}
return new InMemoryDataSet(header, resultData);
}
示例8: nestedLoopJoin
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
/**
* Executes a simple nested loop join. The innerLoopDs will be copied in an in-memory dataset.
*
*/
public static InMemoryDataSet nestedLoopJoin(DataSet innerLoopDs, DataSet outerLoopDs,
Iterable<FilterItem> filtersIterable) {
List<FilterItem> filters = new ArrayList<>();
for (FilterItem fi : filtersIterable) {
filters.add(fi);
}
List<Row> innerRows = innerLoopDs.toRows();
List<SelectItem> allItems = new ArrayList<>(outerLoopDs.getSelectItems());
allItems.addAll(innerLoopDs.getSelectItems());
Set<FilterItem> applicableFilters = applicableFilters(filters, allItems);
DataSetHeader jointHeader = new CachingDataSetHeader(allItems);
List<Row> resultRows = new ArrayList<>();
for (Row outerRow : outerLoopDs) {
for (Row innerRow : innerRows) {
Object[] joinedRowObjects = new Object[outerRow.getValues().length + innerRow.getValues().length];
System.arraycopy(outerRow.getValues(), 0, joinedRowObjects, 0, outerRow.getValues().length);
System.arraycopy(innerRow.getValues(), 0, joinedRowObjects, outerRow.getValues().length,
innerRow.getValues().length);
Row joinedRow = new DefaultRow(jointHeader, joinedRowObjects);
if (applicableFilters.isEmpty() || applicableFilters.stream().allMatch(fi -> fi.accept(joinedRow))) {
resultRows.add(joinedRow);
}
}
}
return new InMemoryDataSet(jointHeader, resultRows);
}
示例9: testInOperandEvaluate
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
public void testInOperandEvaluate() throws Exception {
SelectItem selectItem = new SelectItem(new MutableColumn("foo", ColumnType.VARCHAR, null, 1, null, null, true,
null, false, null));
Object operand = new String[] { "foo", "bar" };
FilterItem filterItem = new FilterItem(selectItem, OperatorType.IN, operand);
DataSetHeader header = new CachingDataSetHeader(Lists.newArrayList(selectItem));
assertTrue(filterItem.evaluate(new DefaultRow(header, new Object[] { "foo" })));
assertTrue(filterItem.evaluate(new DefaultRow(header, new Object[] { "bar" })));
assertFalse(filterItem.evaluate(new DefaultRow(header, new Object[] { "foobar" })));
}
示例10: getLeftJoin
import org.apache.metamodel.data.CachingDataSetHeader; //导入依赖的package包/类
/**
* Performs a left join (aka left outer join) operation on two datasets.
*
* @param ds1 the left dataset
* @param ds2 the right dataset
* @param onConditions the conditions to join by
* @return the left joined result dataset
*/
public static DataSet getLeftJoin(DataSet ds1, DataSet ds2, FilterItem[] onConditions) {
if (ds1 == null) {
throw new IllegalArgumentException("Left DataSet cannot be null");
}
if (ds2 == null) {
throw new IllegalArgumentException("Right DataSet cannot be null");
}
List<SelectItem> si1 = ds1.getSelectItems();
List<SelectItem> si2 = ds2.getSelectItems();
List<SelectItem> selectItems = Stream.concat(si1.stream(), si2.stream()).collect(Collectors.toList());
List<Row> resultRows = new ArrayList<Row>();
List<Row> ds2data = readDataSetFull(ds2);
if (ds2data.isEmpty()) {
// no need to join, simply return a new view (with null values) on
// the previous dataset.
return getSelection(selectItems, ds1);
}
final DataSetHeader header = new CachingDataSetHeader(selectItems);
while (ds1.next()) {
// Construct a single-row dataset for making a carthesian product
// against ds2
Row ds1row = ds1.getRow();
List<Row> ds1rows = new ArrayList<Row>();
ds1rows.add(ds1row);
DataSet carthesianProduct =
getCarthesianProduct(new DataSet[] { new InMemoryDataSet(new CachingDataSetHeader(si1), ds1rows),
new InMemoryDataSet(new CachingDataSetHeader(si2), ds2data) }, onConditions);
List<Row> carthesianRows = readDataSetFull(carthesianProduct);
if (carthesianRows.size() > 0) {
resultRows.addAll(carthesianRows);
} else {
Object[] values = ds1row.getValues();
Object[] row = new Object[selectItems.size()];
System.arraycopy(values, 0, row, 0, values.length);
resultRows.add(new DefaultRow(header, row));
}
}
ds1.close();
if (resultRows.isEmpty()) {
return new EmptyDataSet(selectItems);
}
return new InMemoryDataSet(header, resultRows);
}