本文整理汇总了Java中org.pentaho.di.core.RowMetaAndData.getRowMeta方法的典型用法代码示例。如果您正苦于以下问题:Java RowMetaAndData.getRowMeta方法的具体用法?Java RowMetaAndData.getRowMeta怎么用?Java RowMetaAndData.getRowMeta使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.RowMetaAndData
的用法示例。
在下文中一共展示了RowMetaAndData.getRowMeta方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getIDWithValue
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
private synchronized long getIDWithValue(String tablename, String idfield, String lookupkey[], long key[]) throws KettleException
{
RowMetaAndData par = new RowMetaAndData();
String sql = "SELECT " + idfield + " FROM " + tablename + " ";
for (int i = 0; i < lookupkey.length; i++)
{
if (i == 0)
sql += "WHERE ";
else
sql += "AND ";
par.addValue(new ValueMeta(lookupkey[i], ValueMetaInterface.TYPE_INTEGER), new Long(key[i]));
sql += lookupkey[i] + " = ? ";
}
RowMetaAndData result = database.getOneRow(sql, par.getRowMeta(), par.getData());
if (result != null && result.getRowMeta() != null && result.getData() != null && result.isNumeric(0))
return result.getInteger(0, 0);
return -1;
}
示例2: processRow
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
if (data.resultFilesList == null || getLinesRead() >= data.resultFilesList.size())
{
setOutputDone();
return false;
}
ResultFile resultFile = (ResultFile) data.resultFilesList.get((int) getLinesRead());
RowMetaAndData r = resultFile.getRow();
data.outputRowMeta = r.getRowMeta();
smi.getFields(data.outputRowMeta, getStepname(), null, null, this);
incrementLinesRead();
putRow(data.outputRowMeta, r.getData()); // copy row to possible alternate
// rowset(s).
if (checkFeedback(getLinesRead()))
logBasic(Messages.getString("FilesFromResult.Log.LineNumber") + getLinesRead()); //$NON-NLS-1$
return true;
}
示例3: getIDWithValue
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public synchronized ObjectId getIDWithValue(String tablename, String idfield, String lookupkey[], ObjectId key[]) throws KettleException
{
RowMetaAndData par = new RowMetaAndData();
String sql = "SELECT " + idfield + " FROM " + tablename + " ";
for (int i = 0; i < lookupkey.length; i++)
{
if (i == 0)
sql += "WHERE ";
else
sql += "AND ";
par.addValue(new ValueMeta(lookupkey[i], ValueMetaInterface.TYPE_INTEGER), new LongObjectId(key[i]));
sql += lookupkey[i] + " = ? ";
}
RowMetaAndData result = getOneRow(sql, par.getRowMeta(), par.getData());
if (result != null && result.getRowMeta() != null && result.getData() != null && result.isNumeric(0)) {
return new LongObjectId(result.getInteger(0, 0));
}
return null;
}
开发者ID:yintaoxue,项目名称:read-open-source-code,代码行数:21,代码来源:KettleDatabaseRepositoryConnectionDelegate.java
示例4: processRow
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
Result previousResult = getTransMeta().getPreviousResult();
if (previousResult == null || getLinesRead() >= previousResult.getRows().size())
{
setOutputDone();
return false;
}
RowMetaAndData row = previousResult.getRows().get((int) getLinesRead());
incrementLinesRead();
data = (RowsFromResultData) sdi;
// We don't get the meta-data from the previous steps (there aren't any) but from the previous transformation or job
//
data.outputRowMeta = row.getRowMeta();
// copy row to possible alternate rowset(s).
//
putRow(data.outputRowMeta, row.getData());
if (checkFeedback(getLinesRead()))
{
if(log.isBasic()) logBasic(Messages.getString("RowsFromResult.Log.LineNumber") + getLinesRead()); //$NON-NLS-1$
}
return true;
}
示例5: init
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
meta=(RowGeneratorMeta)smi;
data=(RowGeneratorData)sdi;
if (super.init(smi, sdi))
{
// Determine the number of rows to generate...
data.rowLimit = Const.toLong(environmentSubstitute(meta.getRowLimit()), -1L);
data.rowsWritten = 0L;
if (data.rowLimit<0L) // Unable to parse
{
logError(Messages.getString("RowGenerator.Wrong.RowLimit.Number"));
return false; // fail
}
// Create a row (constants) with all the values in it...
List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); // stores the errors...
RowMetaAndData outputRow = buildRow(meta, remarks, getStepname());
if (!remarks.isEmpty())
{
for (int i=0;i<remarks.size();i++)
{
CheckResult cr = (CheckResult) remarks.get(i);
logError(cr.getText());
}
return false;
}
data.outputRowData = outputRow.getData();
data.outputRowMeta = outputRow.getRowMeta();
return true;
}
return false;
}
示例6: showLastImpactAnalyses
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public void showLastImpactAnalyses(TransMeta transMeta) {
if (transMeta == null)
return;
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation(transMeta);
if (transGraph == null)
return;
List<Object[]> rows = new ArrayList<Object[]>();
RowMetaInterface rowMeta = null;
for (int i = 0; i < transGraph.getImpact().size(); i++) {
DatabaseImpact ii = (DatabaseImpact) transGraph.getImpact().get(i);
RowMetaAndData row = ii.getRow();
rowMeta = row.getRowMeta();
rows.add(row.getData());
}
if (rows.size() > 0) {
// Display all the rows...
PreviewRowsDialog prd = new PreviewRowsDialog(shell, Variables.getADefaultVariableSpace(), SWT.NONE, "-", rowMeta, rows);
prd.setTitleMessage(Messages.getString("Spoon.Dialog.ImpactAnalyses.Title"), Messages.getString("Spoon.Dialog.ImpactAnalyses.Message"));// "Impact analyses"
// "Result of analyses:"
prd.open();
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION);
if (transGraph.isImpactFinished()) {
mb.setMessage(Messages.getString("Spoon.Dialog.TransformationNoImpactOnDatabase.Message"));// "As far as I can tell, this transformation has no impact on any database."
} else {
mb.setMessage(Messages.getString("Spoon.Dialog.RunImpactAnalysesFirst.Message"));// "Please run the impact analyses first on this transformation."
}
mb.setText(Messages.getString("Spoon.Dialog.ImpactAnalyses.Title"));// Impact
mb.open();
}
}
示例7: getData
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
/**
* Copy information from the input buffer to the dialog fields.
*/
private void getData()
{
for (int i=0;i<buffer.size();i++)
{
RowMetaAndData rowMetaAndData = buffer.get(i);
RowMetaInterface rowMeta = rowMetaAndData.getRowMeta();
Object[] rowData = rowMetaAndData.getData();
for (int c=0;c<rowMeta.size();c++)
{
ValueMetaInterface v=rowMeta.getValueMeta(c);
String show;
try
{
if (v.isNumeric())
{
show = v.getString(rowData[c]);
}
else
{
show = v.getString(rowData[c]);
}
}
catch (KettleValueException e)
{
show = "<conversion error>";
}
if (show!=null)
wFields.table.getItem(i).setText(c+1, show);
}
}
wFields.optWidth(true);
}
示例8: processRow
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
Result previousResult = getTransMeta().getPreviousResult();
if (previousResult == null || getLinesRead() >= previousResult.getRows().size())
{
setOutputDone();
return false;
}
RowMetaAndData row = previousResult.getRows().get((int) getLinesRead());
incrementLinesRead();
data = (RowsFromResultData) sdi;
// We don't get the meta-data from the previous steps (there aren't any) but from the previous transformation or job
//
data.outputRowMeta = row.getRowMeta();
// copy row to possible alternate rowset(s).
//
putRow(data.outputRowMeta, row.getData());
if (checkFeedback(getLinesRead()))
{
if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "RowsFromResult.Log.LineNumber") + getLinesRead()); //$NON-NLS-1$
}
return true;
}
示例9: init
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public boolean init(StepMetaInterface smi, StepDataInterface sdi)
{
meta=(RowGeneratorMeta)smi;
data=(RowGeneratorData)sdi;
if (super.init(smi, sdi))
{
// Determine the number of rows to generate...
data.rowLimit = Const.toLong(environmentSubstitute(meta.getRowLimit()), -1L);
data.rowsWritten = 0L;
if (data.rowLimit<0L) // Unable to parse
{
logError(BaseMessages.getString(PKG, "RowGenerator.Wrong.RowLimit.Number"));
return false; // fail
}
// Create a row (constants) with all the values in it...
List<CheckResultInterface> remarks = new ArrayList<CheckResultInterface>(); // stores the errors...
RowMetaAndData outputRow = buildRow(meta, remarks, getStepname());
if (!remarks.isEmpty())
{
for (int i=0;i<remarks.size();i++)
{
CheckResult cr = (CheckResult) remarks.get(i);
logError(cr.getText());
}
return false;
}
data.outputRowData = outputRow.getData();
data.outputRowMeta = outputRow.getRowMeta();
return true;
}
return false;
}
示例10: showLastImpactAnalyses
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public void showLastImpactAnalyses(TransMeta transMeta) {
if (transMeta == null)
return;
TransGraph transGraph = delegates.trans.findTransGraphOfTransformation(transMeta);
if (transGraph == null)
return;
List<Object[]> rows = new ArrayList<Object[]>();
RowMetaInterface rowMeta = null;
for (int i = 0; i < transGraph.getImpact().size(); i++) {
DatabaseImpact ii = (DatabaseImpact) transGraph.getImpact().get(i);
RowMetaAndData row = ii.getRow();
rowMeta = row.getRowMeta();
rows.add(row.getData());
}
if (rows.size() > 0) {
// Display all the rows...
PreviewRowsDialog prd = new PreviewRowsDialog(shell, Variables.getADefaultVariableSpace(), SWT.NONE, "-",
rowMeta, rows);
prd.setTitleMessage(
// "Impact analyses"
// "Result of analyses:"
BaseMessages.getString(PKG, "Spoon.Dialog.ImpactAnalyses.Title"), BaseMessages.getString(PKG,
"Spoon.Dialog.ImpactAnalyses.Message"));
prd.open();
} else {
MessageBox mb = new MessageBox(shell, SWT.OK | SWT.ICON_INFORMATION);
if (transGraph.isImpactFinished()) {
// "As far as I can tell, this transformation has no impact on any database."
mb.setMessage(BaseMessages.getString(PKG, "Spoon.Dialog.TransformationNoImpactOnDatabase.Message"));
} else {
// "Please run the impact analyses first on this transformation."
mb.setMessage(BaseMessages.getString(PKG, "Spoon.Dialog.RunImpactAnalysesFirst.Message"));
}
mb.setText(BaseMessages.getString(PKG, "Spoon.Dialog.ImpactAnalyses.Title"));// Impact
mb.open();
}
}
示例11: checkRows
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
/**
* Check the list, the list has to be sorted.
*/
public void checkRows(List<RowMetaAndData> rows, boolean ascending) throws Exception
{
String prev_key1 = null, prev_key2 = null;
int idx = 0;
for ( RowMetaAndData rm : rows ) {
Object[] r1 = rm.getData();
RowMetaInterface rmi = rm.getRowMeta();
String key1 = rmi.getString(r1, "KEY1", "");
String key2 = rmi.getString(r1, "KEY2", "");
if (prev_key1 != null && prev_key2 != null)
{
if ( ascending )
{
if (prev_key1.compareTo(key1) == 0)
{
if ( prev_key2.compareTo(key2) > 0 )
{
fail("error in sort");
}
}
else if (prev_key1.compareTo(key1) > 0)
{
fail("error in sort");
}
}
else
{
if (prev_key1.compareTo(key1) == 0)
{
if ( prev_key2.compareTo(key2) < 0 )
{
fail("error in sort");
}
}
else if (prev_key1.compareTo(key1) < 0)
{
fail("error in sort");
}
}
}
prev_key1 = key1;
prev_key2 = key2;
idx++;
}
if (idx != MAX_COUNT)
{
fail("less rows returned than expected: " + idx);
}
}
示例12: writeLogRecord
import org.pentaho.di.core.RowMetaAndData; //导入方法依赖的package包/类
public void writeLogRecord(LogTableInterface logTable, LogStatus status, Object subject, Object parent) throws KettleException {
try {
RowMetaAndData logRecord = logTable.getLogRecord(status, subject, parent);
if (logRecord==null) return;
boolean update = (logTable.getKeyField()!=null) && !status.equals(LogStatus.START);
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(
environmentSubstitute(logTable.getActualSchemaName()),
environmentSubstitute(logTable.getActualTableName())
);
RowMetaInterface rowMeta = logRecord.getRowMeta();
Object[] rowData = logRecord.getData();
if (update) {
RowMetaInterface updateRowMeta = new RowMeta();
Object[] updateRowData = new Object[rowMeta.size()];
ValueMetaInterface keyValueMeta = rowMeta.getValueMeta(0);
StringBuffer sqlBuff = new StringBuffer(250);
sqlBuff.append("UPDATE ").append( schemaTable ).append(" SET ");
for (int i = 1; i < rowMeta.size() ; i++) // Without ID_JOB or ID_BATCH
{
ValueMetaInterface valueMeta = rowMeta.getValueMeta(i);
if (i>1) {
sqlBuff.append(", ");
}
sqlBuff.append(databaseMeta.quoteField(valueMeta.getName())).append("=? ");
updateRowMeta.addValueMeta(valueMeta);
updateRowData[i-1] = rowData[i];
}
sqlBuff.append("WHERE ").append(databaseMeta.quoteField(keyValueMeta.getName())).append("=? ");
updateRowMeta.addValueMeta(keyValueMeta);
updateRowData[rowMeta.size()-1] = rowData[0];
String sql = sqlBuff.toString();
execStatement(sql, updateRowMeta, updateRowData);
} else {
insertRow(environmentSubstitute(logTable.getActualSchemaName()), environmentSubstitute(logTable.getActualTableName()), logRecord.getRowMeta(), logRecord.getData());
}
} catch(Exception e) {
throw new KettleDatabaseException("Unable to write log record to log table " + logTable.getActualTableName(), e);
}
}