本文整理汇总了Java中org.apache.metamodel.data.Row类的典型用法代码示例。如果您正苦于以下问题:Java Row类的具体用法?Java Row怎么用?Java Row使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
Row类属于org.apache.metamodel.data包,在下文中一共展示了Row类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: insertScript
import org.apache.metamodel.data.Row; //导入依赖的package包/类
private UpdateScript insertScript() {
UpdateScript insertScrript = new UpdateScript() {
public void run(UpdateCallback callback) {
Object value = null;
RowInsertionBuilder insertData = callback.insertInto(table);
for(Row rows: row){
for (SelectItem column : rows.getSelectItems()) {
if (!column.getColumn().getType().isTimeBased()) {
value = rows.getValue(column);
} else {
value = ElementParser.getDate(rows.getValue(column));
}
insertData.value(column.getColumn(), value);
}
requests.mark();
insertData.execute();
}
}
};
return insertScrript;
}
示例2: insertRowIntoBlockingQueue
import org.apache.metamodel.data.Row; //导入依赖的package包/类
private void insertRowIntoBlockingQueue(DataSet dataset) throws InterruptedException {
hashedColumn = table.getColumnByName(columnName);
requests = metrics.meter("read thread");
while (dataset.next()) {
Row row = dataset.getRow();
if (horizontal && isKey(row)) {
queue.put(row);
requests.mark();
} else {
if(!horizontal){
queue.put(row);
requests.mark();
}
}
}
}
示例3: readComplexData
import org.apache.metamodel.data.Row; //导入依赖的package包/类
public void readComplexData() throws Exception {
Column primaryColumn = dataContext.getTableByQualifiedLabel(joinTable).getColumnByName(secondFkey);
System.out.println(dataContext.query().from(joinTable).select(primaryColumn).groupBy(primaryColumn).toQuery().toString());
DataSet dataSet = dataContext.query().from(joinTable).select(primaryColumn).groupBy(primaryColumn).execute();
while (dataSet.next()) {
arrayData = new ArrayList<Row>();
Object dataValue = dataSet.getRow().getValue(primaryColumn);
DataSet dataSetArray = dataContext.query().from(joinTable).select(forgeinKey).where(primaryColumn)
.eq(dataValue).execute();
System.out.println( dataContext.query().from(joinTable).select(forgeinKey).where(primaryColumn)
.eq(dataValue).toQuery().toSql());
readDataSet(dataSetArray);
ComplexData complexDataObject = new ComplexData(dataSet.getRow(), arrayData);
queueComplexData.put(complexDataObject);
dataSetArray.close();
// dataContext.executeUpdate();
}
insertPosion();
dataSet.close();
}
示例4: loadIntoMemory
import org.apache.metamodel.data.Row; //导入依赖的package包/类
public SimpleSynonymCatalog loadIntoMemory(final DatastoreConnection datastoreConnection) {
final Map<String, String> synonymMap = new HashMap<>();
final Column masterTermColumn = getMasterTermColumn(datastoreConnection);
final Column[] columns = getSynonymColumns(datastoreConnection);
try (DataSet dataSet = datastoreConnection.getDataContext().query().from(masterTermColumn.getTable().getName())
.select(masterTermColumn).select(columns).execute()) {
while (dataSet.next()) {
final Row row = dataSet.getRow();
final String masterTerm = getMasterTerm(row, masterTermColumn);
final String[] synonyms = getSynonyms(row, columns);
for (final String synonym : synonyms) {
synonymMap.put(synonym, masterTerm);
}
}
}
return new SimpleSynonymCatalog(getName(), synonymMap);
}
示例5: materializeMainSchemaTable
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Override
protected DataSet materializeMainSchemaTable(final Table table, final List<Column> columns, final int maxRows) {
final SelectItem[] tableSelectItems = MetaModelHelper.createSelectItems(table.getColumns().toArray(new Column[0]));
final SelectItem[] selectItems = MetaModelHelper.createSelectItems(columns.toArray(new Column[0]));
final SimpleDataSetHeader header = new SimpleDataSetHeader(tableSelectItems);
final List<Row> rows = new ArrayList<>();
final SimpleDataSetHeader subSelectionHeader = new SimpleDataSetHeader(selectItems);
rows.add(new DefaultRow(header, new Object[] { 1, "hello" }).getSubSelection(subSelectionHeader));
rows.add(new DefaultRow(header, new Object[] { 2, "there" }).getSubSelection(subSelectionHeader));
rows.add(new DefaultRow(header, new Object[] { 3, "big" }).getSubSelection(subSelectionHeader));
rows.add(new DefaultRow(header, new Object[] { 4, "wide" }).getSubSelection(subSelectionHeader));
rows.add(new DefaultRow(header, new Object[] { 5, "world" }).getSubSelection(subSelectionHeader));
return new InMemoryDataSet(rows);
}
示例6: getValues
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Override
public Collection<String> getValues() {
try (final DatastoreConnection con = _datastore.openConnection()) {
final DataContext dataContext = con.getDataContext();
final Query q = dataContext.query().from(_column.getTable()).select(_column).toQuery();
q.selectDistinct();
try (final DataSet dataSet = dataContext.executeQuery(q)) {
final List<String> values = new ArrayList<String>();
while (dataSet.next()) {
final Row row = dataSet.getRow();
Object value = row.getValue(0);
if (value != null) {
value = value.toString();
}
values.add((String) value);
}
return values;
}
}
}
示例7: testGetRendererByHierarchyDistance
import org.apache.metamodel.data.Row; //导入依赖的package包/类
public void testGetRendererByHierarchyDistance() throws Exception {
ClasspathScanDescriptorProvider descriptorProvider = new ClasspathScanDescriptorProvider().scanPackage(
"org.eobjects.analyzer.result.renderer", true);
RendererFactory rendererFactory = new RendererFactory(
new AnalyzerBeansConfigurationImpl().replace(descriptorProvider));
Renderer<?, ? extends CharSequence> r;
r = rendererFactory.getRenderer(new NumberResult(1), TextRenderingFormat.class);
assertEquals(ToStringTextRenderer.class, r.getClass());
r = rendererFactory.getRenderer(new CrosstabResult(null), TextRenderingFormat.class);
assertEquals(CrosstabTextRenderer.class, r.getClass());
r = rendererFactory.getRenderer(new DataSetResult(new LinkedList<Row>()), TextRenderingFormat.class);
assertEquals(MetricBasedResultTextRenderer.class, r.getClass());
}
示例8: getRow
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Override
public Row getRow() {
final DataSetHeader header = getHeader();
final Object[] values = new Object[header.size()];
if (row != null) {
for (int i = 0; i < header.size(); i++) {
final Column column = header.getSelectItem(i).getColumn();
final int kettleIndex = rowMeta.indexOfValue(column.getName());
final Object rawValue = row[kettleIndex];
try {
final ValueMetaInterface valueMeta = rowMeta.getValueMeta(kettleIndex);
values[i] = valueMeta.convertData(valueMeta, rawValue);
} catch (KettleValueException e) {
throw new MetaModelException(e);
}
}
}
return new DefaultRow(header, values);
}
示例9: execute
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Override
public void execute() throws MetaModelException {
Table table = getTable();
List<FilterItem> whereItems = getWhereItems();
CouchDbConnector connector = _updateCallback.getConnector(table.getName());
CouchDbDataContext dataContext = _updateCallback.getDataContext();
DataSet dataSet = dataContext.query().from(table)
.select(CouchDbDataContext.FIELD_ID, CouchDbDataContext.FIELD_REV).where(whereItems).execute();
try {
while (dataSet.next()) {
Row row = dataSet.getRow();
String id = (String) row.getValue(0);
String revision = (String) row.getValue(1);
connector.delete(id, revision);
}
} finally {
dataSet.close();
}
}
示例10: deleteFrom
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Override
public RowDeletionBuilder deleteFrom(Table table) throws IllegalArgumentException, IllegalStateException,
UnsupportedOperationException {
return new AbstractRowDeletionBuilder(table) {
@Override
public void execute() throws MetaModelException {
final DataSet dataSet = _dataContext.query().from(getTable()).select(getTable().getColumns()).execute();
final PojoDataSet<?> pojoDataSet = (PojoDataSet<?>) dataSet;
final List<FilterItem> whereItems = getWhereItems();
while (pojoDataSet.next()) {
boolean delete = true;
final Row row = pojoDataSet.getRow();
for (FilterItem whereItem : whereItems) {
if (!whereItem.evaluate(row)) {
delete = false;
break;
}
}
if (delete) {
pojoDataSet.remove();
}
}
}
};
}
示例11: testJoin
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Test
public void testJoin() {
Stopwatch duration = Stopwatch.createStarted();
CompositeDataContext compDcon = new CompositeDataContext(this.emp_dcon, this.dep_dcon);
DataSet ds = compDcon.query().from("employee").innerJoin("department").on("dep_id", "id").selectAll().execute();
int rowCount = 0;
while (ds.next()) {
Row row = ds.getRow();
Assert.assertNotNull(row);
rowCount++;
}
duration.stop();
logger.info("Test duration was {} ms", duration.elapsed(TimeUnit.MILLISECONDS));
Assert.assertEquals(employeeSize, rowCount);
}
示例12: testSelectiveJoin
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Test
public void testSelectiveJoin() {
Stopwatch duration = Stopwatch.createStarted();
CompositeDataContext compDcon = new CompositeDataContext(this.emp_dcon, this.dep_dcon);
DataSet ds = compDcon.query().from("employee").innerJoin("department").on("dep_id", "id").selectAll().where(
compDcon.getTableByQualifiedLabel("department").getColumnByName("id")).eq(1).execute();
int rowCount = 0;
while (ds.next()) {
Row row = ds.getRow();
Assert.assertNotNull(row);
rowCount++;
}
duration.stop();
logger.info("Test duration was {} ms", duration.elapsed(TimeUnit.MILLISECONDS));
Assert.assertEquals(employeesPerDepartment, rowCount);
}
示例13: testMaxAndOffset
import org.apache.metamodel.data.Row; //导入依赖的package包/类
public void testMaxAndOffset() throws Exception {
if (!isConfigured()) {
return;
}
final JdbcDataContext context = getDataContext();
final List<Row> onlyMaxRows =
context.query().from("HR", "EMPLOYEES").select("EMPLOYEE_ID").maxRows(10).execute().toRows();
assertEquals("Should limit size even without offset", 10, onlyMaxRows.size());
final List<Row> onlyOffset =
context.query().from("HR", "EMPLOYEES").select("EMPLOYEE_ID").orderBy("EMPLOYEE_ID").firstRow(5)
.execute().toRows();
assertEquals("Should offset first row", new BigDecimal(104), onlyOffset.get(0).getValue(0));
assertEquals("Should not limit size beyond offset", 103, onlyOffset.size());
final List<Row> maxRowsAndOffset =
context.query().from("HR", "EMPLOYEES").select("EMPLOYEE_ID").maxRows(20).orderBy("EMPLOYEE_ID")
.firstRow(20).execute().toRows();
assertEquals("Should offset first row", new BigDecimal(119), maxRowsAndOffset.get(0).getValue(0));
assertEquals("Should not limit size", 20, maxRowsAndOffset.size());
}
示例14: testMaxAndOffset
import org.apache.metamodel.data.Row; //导入依赖的package包/类
public void testMaxAndOffset() throws Exception {
if (!isConfigured()) {
return;
}
final JdbcDataContext context = getDataContext();
final List<Row> onlyMaxRows =
context.query().from("Person", "Person").select("BusinessEntityID").maxRows(10).execute().toRows();
assertEquals("Should limit size even without offset or order by", 10, onlyMaxRows.size());
final List<Row> onlyOffset =
context.query().from("Person", "Person").select("BusinessEntityID").orderBy("BusinessEntityID").firstRow(5)
.execute().toRows();
assertEquals("Should offset first row", 5, onlyOffset.get(0).getValue(0));
assertEquals("Should not limit size beyond offset", 19968, onlyOffset.size());
final List<Row> maxRowsAndOffset =
context.query().from("Person", "Person").select("BusinessEntityID").maxRows(20).orderBy("BusinessEntityID")
.firstRow(20).execute().toRows();
assertEquals("Should offset first row", 20, maxRowsAndOffset.get(0).getValue(0));
assertEquals("Should not limit size", 20, maxRowsAndOffset.size());
}
示例15: executePrimaryKeyLookupQuery
import org.apache.metamodel.data.Row; //导入依赖的package包/类
@Override
protected Row executePrimaryKeyLookupQuery(Table table, List<SelectItem> selectItems, Column primaryKeyColumn,
Object keyValue) {
final org.apache.hadoop.hbase.client.Table hTable = getHTable(table.getName());
final Get get = new Get(ByteUtils.toBytes(keyValue));
try {
final Result result = hTable.get(get);
final DataSetHeader header = new SimpleDataSetHeader(selectItems);
final Row row = new HBaseRow(header, result);
return row;
} catch (IOException e) {
throw new IllegalStateException("Failed to execute HBase get operation with " + primaryKeyColumn.getName()
+ " = " + keyValue, e);
} finally {
FileHelper.safeClose(hTable);
}
}