本文整理汇总了Java中org.pentaho.di.core.row.ValueMetaInterface.clone方法的典型用法代码示例。如果您正苦于以下问题:Java ValueMetaInterface.clone方法的具体用法?Java ValueMetaInterface.clone怎么用?Java ValueMetaInterface.clone使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.row.ValueMetaInterface
的用法示例。
在下文中一共展示了ValueMetaInterface.clone方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getModifyColumnStatement
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
/**
* Generates the SQL statement to modify a column in the specified table
* @param tablename The table to add
* @param v The column defined as a value
* @param tk the name of the technical key field
* @param use_autoinc whether or not this field uses auto increment
* @param pk the name of the primary key field
* @param semicolon whether or not to add a semi-colon behind the statement.
* @return the SQL statement to modify a column in the specified table
*/
public String getModifyColumnStatement(String tablename, ValueMetaInterface v, String tk, boolean use_autoinc, String pk, boolean semicolon)
{
ValueMetaInterface tmpColumn = v.clone();
int threeoh = v.getName().length()>=30 ? 30 : v.getName().length();
tmpColumn.setName(v.getName().substring(0,threeoh)+"_KTL"); // should always be less then 35
String sql="";
// Create a new tmp column
sql+=getAddColumnStatement(tablename, tmpColumn, tk, use_autoinc, pk, semicolon)+";"+Const.CR;
// copy the old data over to the tmp column
sql+="UPDATE "+tablename+" SET "+tmpColumn.getName()+"="+v.getName()+";"+Const.CR;
// drop the old column
sql+=getDropColumnStatement(tablename, v, tk, use_autoinc, pk, semicolon)+";"+Const.CR;
// create the wanted column
sql+=getAddColumnStatement(tablename, v, tk, use_autoinc, pk, semicolon)+";"+Const.CR;
// copy the data from the tmp column to the wanted column (again)
// All this to avoid the rename clause as this is not supported on all Oracle versions
sql+="UPDATE "+tablename+" SET "+v.getName()+"="+tmpColumn.getName()+";"+Const.CR;
// drop the temp column
sql+=getDropColumnStatement(tablename, tmpColumn, tk, use_autoinc, pk, semicolon);
return sql;
}
示例2: getFields
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public void getFields(RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException
{
for (int i=0;i<fieldDefinition.length;i++) {
FixedFileInputField field = fieldDefinition[i];
ValueMetaInterface valueMeta = new ValueMeta(field.getName(), field.getType());
valueMeta.setConversionMask(field.getFormat());
valueMeta.setTrimType(field.getTrimType());
valueMeta.setLength(field.getLength());
valueMeta.setPrecision(field.getPrecision());
valueMeta.setConversionMask(field.getFormat());
valueMeta.setDecimalSymbol(field.getDecimal());
valueMeta.setGroupingSymbol(field.getGrouping());
valueMeta.setCurrencySymbol(field.getCurrency());
valueMeta.setStringEncoding(space.environmentSubstitute(encoding));
if (lazyConversionActive) valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
// In case we want to convert Strings...
//
ValueMetaInterface storageMetadata = valueMeta.clone();
storageMetadata.setType(ValueMetaInterface.TYPE_STRING);
storageMetadata.setStorageType(ValueMetaInterface.STORAGE_TYPE_NORMAL);
valueMeta.setStorageMetadata(storageMetadata);
valueMeta.setOrigin(origin);
rowMeta.addValueMeta(valueMeta);
}
}
示例3: getFields
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
@Override
public void getFields(RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException {
// Remove the key value (there will be different entries for each output row)
//
if (fieldName != null && fieldName.length() > 0)
{
int idx = row.indexOfValue(fieldName);
if (idx < 0) {
throw new KettleStepException(Messages.getString("FlattenerMeta.Exception.UnableToLocateFieldInInputFields", fieldName )); //$NON-NLS-1$ //$NON-NLS-2$
}
ValueMetaInterface v = row.getValueMeta(idx);
row.removeValueMeta(idx);
for (int i=0;i<targetField.length;i++)
{
ValueMetaInterface value = v.clone();
value.setName(targetField[i]);
value.setOrigin(name);
row.addValueMeta(value);
}
}
else
{
throw new KettleStepException(Messages.getString("FlattenerMeta.Exception.FlattenFieldRequired")); //$NON-NLS-1$
}
}
示例4: getModifyColumnStatement
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
/**
* Generates the SQL statement to modify a column in the specified table
* @param tablename The table to add
* @param v The column defined as a value
* @param tk the name of the technical key field
* @param use_autoinc whether or not this field uses auto increment
* @param pk the name of the primary key field
* @param semicolon whether or not to add a semi-colon behind the statement.
* @return the SQL statement to modify a column in the specified table
*/
public String getModifyColumnStatement(String tablename, ValueMetaInterface v, String tk, boolean use_autoinc, String pk, boolean semicolon) {
String retval = "";
ValueMetaInterface tmpColumn = v.clone();
String tmpName = v.getName();
boolean isQuoted = tmpName.startsWith(getStartQuote()) && tmpName.endsWith(getEndQuote());
if (isQuoted) {
// remove the quotes first.
//
tmpName = tmpName.substring(1, tmpName.length() - 1);
}
tmpName += "_KTL";
// put the quotes back if needed.
//
if (isQuoted) {
tmpName = getStartQuote() + tmpName + getEndQuote();
}
tmpColumn.setName(tmpName);
// Create a new tmp column
retval += getAddColumnStatement(tablename, tmpColumn, tk, use_autoinc, pk, semicolon) + ";" + Const.CR;
// copy the old data over to the tmp column
retval += "UPDATE " + tablename + " SET " + tmpColumn.getName() + "=" + v.getName() + ";" + Const.CR;
// drop the old column
retval += getDropColumnStatement(tablename, v, tk, use_autoinc, pk, semicolon) + ";" + Const.CR;
// rename the temp column to replace the removed column
retval += "ALTER TABLE " + tablename + " RENAME " + tmpColumn.getName() + " TO " + v.getName() + ";" + Const.CR;
return retval;
}
示例5: getFields
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
@Override
public void getFields(RowMetaInterface row, String name, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException {
// Remove the key value (there will be different entries for each output row)
//
if (fieldName != null && fieldName.length() > 0)
{
int idx = row.indexOfValue(fieldName);
if (idx < 0) {
throw new KettleStepException(BaseMessages.getString(PKG, "FlattenerMeta.Exception.UnableToLocateFieldInInputFields", fieldName )); //$NON-NLS-1$ //$NON-NLS-2$
}
ValueMetaInterface v = row.getValueMeta(idx);
row.removeValueMeta(idx);
for (int i=0;i<targetField.length;i++)
{
ValueMetaInterface value = v.clone();
value.setName(targetField[i]);
value.setOrigin(name);
row.addValueMeta(value);
}
}
else
{
throw new KettleStepException(BaseMessages.getString(PKG, "FlattenerMeta.Exception.FlattenFieldRequired")); //$NON-NLS-1$
}
}
示例6: getFields
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public void getFields(RowMetaInterface rowMeta, String origin, RowMetaInterface[] info,
StepMeta nextStep, VariableSpace space) throws KettleStepException {
rowMeta.clear(); // Start with a clean slate, eats the input
for (int i = 0; i < inputFields.length; i++) {
TextFileInputField field = inputFields[i];
ValueMetaInterface valueMeta = new ValueMeta(field.getName(), field.getType());
valueMeta.setConversionMask(field.getFormat());
valueMeta.setLength(field.getLength());
valueMeta.setPrecision(field.getPrecision());
valueMeta.setConversionMask(field.getFormat());
valueMeta.setDecimalSymbol(field.getDecimalSymbol());
valueMeta.setGroupingSymbol(field.getGroupSymbol());
valueMeta.setCurrencySymbol(field.getCurrencySymbol());
valueMeta.setTrimType(field.getTrimType());
if (lazyConversionActive) {
valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
}
valueMeta.setStringEncoding(space.environmentSubstitute(encoding));
// In case we want to convert Strings...
// Using a copy of the valueMeta object means that the inner and outer representation
// format is the same.
// Preview will show the data the same way as we read it.
// This layout is then taken further down the road by the metadata through the transformation.
//
ValueMetaInterface storageMetadata = valueMeta.clone();
storageMetadata.setType(ValueMetaInterface.TYPE_STRING);
storageMetadata.setStorageType(ValueMetaInterface.STORAGE_TYPE_NORMAL);
storageMetadata
.setLength(-1, -1); // we don't really know the lengths of the strings read in advance.
valueMeta.setStorageMetadata(storageMetadata);
valueMeta.setOrigin(origin);
rowMeta.addValueMeta(valueMeta);
}
if (!Const.isEmpty(filenameField) && includingFilename) {
ValueMetaInterface filenameMeta =
new ValueMeta(filenameField, ValueMetaInterface.TYPE_STRING);
filenameMeta.setOrigin(origin);
if (lazyConversionActive) {
filenameMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
filenameMeta
.setStorageMetadata(new ValueMeta(filenameField, ValueMetaInterface.TYPE_STRING));
}
rowMeta.addValueMeta(filenameMeta);
}
if (!Const.isEmpty(rowNumField)) {
ValueMetaInterface rowNumMeta = new ValueMeta(rowNumField, ValueMetaInterface.TYPE_INTEGER);
rowNumMeta.setLength(10);
rowNumMeta.setOrigin(origin);
rowMeta.addValueMeta(rowNumMeta);
}
}
示例7: generateRandomRows
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
/**
* Generate some random rows to send to python in the case where a single variable (data frame) is being extracted
* and we want to try and determine the types of the output fields
*
* @param inputMeta incoming row meta
* @param r Random instance to use
* @return a list of randomly generated rows with types matching the incoming row types.
* @throws KettleException if a problem occurs
*/
protected static List<Object[]> generateRandomRows( RowMetaInterface inputMeta, Random r ) throws KettleException {
List<Object[]> rows = new ArrayList<Object[]>( NUM_RANDOM_ROWS );
// ValueMetaInterface numericVM = new ValueMeta( "num", ValueMetaInterface.TYPE_NUMBER ); //$NON-NLS-1$
ValueMetaInterface numericVM = ValueMetaFactory.createValueMeta( "num", ValueMetaInterface.TYPE_NUMBER ); //$NON-NLS-1$
for ( int i = 0; i < NUM_RANDOM_ROWS; i++ ) {
Object[] currentRow = new Object[inputMeta.size()];
for ( int j = 0; j < inputMeta.size(); j++ ) {
ValueMetaInterface vm = inputMeta.getValueMeta( j );
ValueMetaInterface tempVM = vm.clone();
tempVM.setStorageType( ValueMetaInterface.STORAGE_TYPE_NORMAL );
Object newVal;
double d = r.nextDouble();
switch ( vm.getType() ) {
case ValueMetaInterface.TYPE_NUMBER:
case ValueMetaInterface.TYPE_INTEGER:
case ValueMetaInterface.TYPE_BIGNUMBER:
d *= 100.0;
newVal = d;
if ( vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_BINARY_STRING ) {
newVal = tempVM.convertData( numericVM, newVal );
}
currentRow[j] =
vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_NORMAL ? vm.convertData( numericVM, newVal ) :
tempVM.convertToBinaryStringStorageType( newVal );
break;
case ValueMetaInterface.TYPE_DATE:
newVal = new Date( new Date().getTime() + (long) ( d * 100000 ) );
currentRow[j] =
vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_NORMAL ? newVal :
tempVM.convertToBinaryStringStorageType( newVal );
break;
case ValueMetaInterface.TYPE_TIMESTAMP:
newVal = new Timestamp( new Date().getTime() + (long) ( d * 100000 ) );
currentRow[j] =
vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_NORMAL ? newVal :
tempVM.convertToBinaryStringStorageType( newVal );
break;
case ValueMetaInterface.TYPE_BOOLEAN:
newVal = r.nextBoolean();
currentRow[j] =
vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_NORMAL ? newVal :
tempVM.convertToBinaryStringStorageType( newVal );
break;
default:
newVal = d < 0.5 ? "value1" : "value2";
currentRow[j] =
vm.getStorageType() == ValueMetaInterface.STORAGE_TYPE_NORMAL ? newVal :
tempVM.convertToBinaryStringStorageType( newVal );
}
}
rows.add( currentRow );
}
return rows;
}
示例8: getSQLStatements
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev, Repository repository) throws KettleStepException
{
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do!
if (databaseMeta!=null)
{
if (prev!=null && prev.size()>0)
{
// Copy the row
RowMetaInterface tableFields = new RowMeta();
// Now change the field names
for (int i=0;i<fieldTable.length;i++)
{
ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]);
if (v!=null)
{
ValueMetaInterface tableField = v.clone();
tableField.setName(fieldTable[i]);
tableFields.addValueMeta(tableField);
}
else
{
throw new KettleStepException("Unable to find field ["+fieldStream[i]+"] in the input rows");
}
}
if (!Const.isEmpty(tableName))
{
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try
{
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName),
transMeta.environmentSubstitute(tableName));
String cr_table = db.getDDL(schemaTable,
tableFields,
null,
false,
null,
true
);
String sql = cr_table;
if (sql.length()==0) retval.setSQL(null); else retval.setSQL(sql);
}
catch(KettleException e)
{
retval.setError(BaseMessages.getString(PKG, "TeraDataBulkLoaderMeta.GetSQL.ErrorOccurred")+e.getMessage());
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "TeraDataBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection"));
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "TeraDataBulkLoaderMeta.GetSQL.NotReceivingAnyFields"));
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "TeraDataBulkLoaderMeta.GetSQL.NoConnectionDefined"));
}
return retval;
}
示例9: processRow
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(TableOutputMeta)smi;
data=(TableOutputData)sdi;
Object[] r=getRow(); // this also waits for a previous step to be finished.
if (r==null) // no more input to be expected...
{
return false;
}
if (first)
{
first=false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this);
if ( ! meta.specifyFields() ) {
// Just take the input row
data.insertRowMeta = getInputRowMeta().clone();
}
else {
data.insertRowMeta = new RowMeta();
//
// Cache the position of the compare fields in Row row
//
data.valuenrs = new int[meta.getFieldDatabase().length];
for (int i=0;i<meta.getFieldDatabase().length;i++)
{
data.valuenrs[i]=getInputRowMeta().indexOfValue(meta.getFieldStream()[i]);
if (data.valuenrs[i]<0)
{
throw new KettleStepException(Messages.getString("TableOutput.Exception.FieldRequired",meta.getFieldStream()[i])); //$NON-NLS-1$
}
}
for (int i=0;i<meta.getFieldDatabase().length;i++)
{
ValueMetaInterface insValue = getInputRowMeta().searchValueMeta( meta.getFieldStream()[i]);
if ( insValue != null )
{
ValueMetaInterface insertValue = insValue.clone();
insertValue.setName(meta.getFieldDatabase()[i]);
data.insertRowMeta.addValueMeta( insertValue );
}
else {
throw new KettleStepException(Messages.getString("TableOutput.Exception.FailedToFindField", meta.getFieldStream()[i])); //$NON-NLS-1$
}
}
}
}
try
{
Object[] outputRowData = writeToTable(getInputRowMeta(), r);
if (outputRowData!=null)
{
putRow(data.outputRowMeta, outputRowData); // in case we want it go further...
incrementLinesOutput();
}
if (checkFeedback(getLinesRead()))
{
if(log.isBasic()) logBasic("linenr "+getLinesRead()); //$NON-NLS-1$
}
}
catch(KettleException e)
{
logError("Because of an error, this step can't continue: ", e);
setErrors(1);
stopAll();
setOutputDone(); // signal end to receiver(s)
return false;
}
return true;
}
示例10: getFields
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public void getFields(RowMetaInterface rowMeta, String origin, RowMetaInterface[] info, StepMeta nextStep, VariableSpace space) throws KettleStepException
{
rowMeta.clear(); // Start with a clean slate, eats the input
for (int i=0;i<inputFields.length;i++) {
TextFileInputField field = inputFields[i];
ValueMetaInterface valueMeta = new ValueMeta(field.getName(), field.getType());
valueMeta.setConversionMask( field.getFormat() );
valueMeta.setLength( field.getLength() );
valueMeta.setPrecision( field.getPrecision() );
valueMeta.setConversionMask( field.getFormat() );
valueMeta.setDecimalSymbol( field.getDecimalSymbol() );
valueMeta.setGroupingSymbol( field.getGroupSymbol() );
valueMeta.setCurrencySymbol( field.getCurrencySymbol() );
valueMeta.setTrimType( field.getTrimType() );
if (lazyConversionActive) valueMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
valueMeta.setStringEncoding(space.environmentSubstitute(encoding));
// In case we want to convert Strings...
// Using a copy of the valueMeta object means that the inner and outer representation format is the same.
// Preview will show the data the same way as we read it.
// This layout is then taken further down the road by the metadata through the transformation.
//
ValueMetaInterface storageMetadata = valueMeta.clone();
storageMetadata.setType(ValueMetaInterface.TYPE_STRING);
storageMetadata.setStorageType(ValueMetaInterface.STORAGE_TYPE_NORMAL);
storageMetadata.setLength(-1,-1); // we don't really know the lengths of the strings read in advance.
valueMeta.setStorageMetadata(storageMetadata);
valueMeta.setOrigin(origin);
rowMeta.addValueMeta(valueMeta);
}
if (!Const.isEmpty(filenameField) && includingFilename) {
ValueMetaInterface filenameMeta = new ValueMeta(filenameField, ValueMetaInterface.TYPE_STRING);
filenameMeta.setOrigin(origin);
if (lazyConversionActive) {
filenameMeta.setStorageType(ValueMetaInterface.STORAGE_TYPE_BINARY_STRING);
filenameMeta.setStorageMetadata(new ValueMeta(filenameField, ValueMetaInterface.TYPE_STRING));
}
rowMeta.addValueMeta(filenameMeta);
}
if (!Const.isEmpty(rowNumField)) {
ValueMetaInterface rowNumMeta = new ValueMeta(rowNumField, ValueMetaInterface.TYPE_INTEGER);
rowNumMeta.setLength(10);
rowNumMeta.setOrigin(origin);
rowMeta.addValueMeta(rowNumMeta);
}
}
示例11: processRow
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(MySQLBulkLoaderMeta)smi;
data=(MySQLBulkLoaderData)sdi;
try
{
Object[] r=getRow(); // Get row from input rowset & set row busy!
if (r==null) // no more input to be expected...
{
setOutputDone();
closeOutput();
return false;
}
if (first)
{
first=false;
// Cache field indexes.
//
data.keynrs = new int[meta.getFieldStream().length];
for (int i=0;i<data.keynrs.length;i++) {
data.keynrs[i] = getInputRowMeta().indexOfValue(meta.getFieldStream()[i]);
}
data.bulkFormatMeta = new ValueMetaInterface[data.keynrs.length];
for (int i=0;i<data.keynrs.length;i++) {
ValueMetaInterface sourceMeta = getInputRowMeta().getValueMeta(data.keynrs[i]);
if (sourceMeta.isDate()) {
if (meta.getFieldFormatType()[i]==MySQLBulkLoaderMeta.FIELD_FORMAT_TYPE_DATE) {
data.bulkFormatMeta[i] = data.bulkDateMeta.clone();
} else if (meta.getFieldFormatType()[i]==MySQLBulkLoaderMeta.FIELD_FORMAT_TYPE_TIMESTAMP) {
data.bulkFormatMeta[i] = data.bulkTimestampMeta.clone(); // default to timestamp
}
} else if (sourceMeta.isNumeric() && meta.getFieldFormatType()[i]==MySQLBulkLoaderMeta.FIELD_FORMAT_TYPE_NUMBER) {
data.bulkFormatMeta[i] = data.bulkNumberMeta.clone();
}
if (data.bulkFormatMeta[i]==null && !sourceMeta.isStorageBinaryString()) {
data.bulkFormatMeta[i] = sourceMeta.clone();
}
}
// execute the client statement...
//
execute(meta);
}
// Every nr of rows we re-start the bulk load process to allow indexes etc to fit into the MySQL server memory
// Performance could degrade if we don't do this.
//
if (data.bulkSize>0 && getLinesOutput()>0 && (getLinesOutput()%data.bulkSize)==0) {
closeOutput();
executeLoadCommand();
}
writeRowToBulk(getInputRowMeta(), r);
putRow(getInputRowMeta(), r);
incrementLinesOutput();
return true;
}
catch(Exception e)
{
logError(Messages.getString("MySQLBulkLoader.Log.ErrorInStep"), e); //$NON-NLS-1$
setErrors(1);
stopAll();
setOutputDone(); // signal end to receiver(s)
return false;
}
}
示例12: getSQLStatements
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev) throws KettleStepException
{
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do!
if (databaseMeta!=null)
{
if (prev!=null && prev.size()>0)
{
// Copy the row
RowMetaInterface tableFields = new RowMeta();
// Now change the field names
for (int i=0;i<fieldTable.length;i++)
{
ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]);
if (v!=null)
{
ValueMetaInterface tableField = v.clone();
tableField.setName(fieldTable[i]);
tableFields.addValueMeta(tableField);
}
else
{
throw new KettleStepException("Unable to find field ["+fieldStream[i]+"] in the input rows");
}
}
if (!Const.isEmpty(tableName))
{
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try
{
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName),
transMeta.environmentSubstitute(tableName));
String sql = db.getDDL(schemaTable,
tableFields,
null,
false,
null,
true
);
if (Const.isEmpty(sql)) {
retval.setSQL(null);
} else {
retval.setSQL(sql);
}
}
catch(KettleException e)
{
retval.setError(BaseMessages.getString(PKG, "LucidDBBulkLoaderMeta.GetSQL.ErrorOccurred")+e.getMessage()); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "LucidDBBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection")); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "LucidDBBulkLoaderMeta.GetSQL.NotReceivingAnyFields")); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "LucidDBBulkLoaderMeta.GetSQL.NoConnectionDefined")); //$NON-NLS-1$
}
return retval;
}
示例13: processRow
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public boolean processRow(StepMetaInterface smi, StepDataInterface sdi) throws KettleException
{
meta=(TableOutputMeta)smi;
data=(TableOutputData)sdi;
Object[] r=getRow(); // this also waits for a previous step to be finished.
if (r==null) // no more input to be expected...
{
return false;
}
if (first)
{
first=false;
data.outputRowMeta = getInputRowMeta().clone();
meta.getFields(data.outputRowMeta, getStepname(), null, null, this);
if ( ! meta.specifyFields() ) {
// Just take the input row
data.insertRowMeta = getInputRowMeta().clone();
}
else {
data.insertRowMeta = new RowMeta();
//
// Cache the position of the compare fields in Row row
//
data.valuenrs = new int[meta.getFieldDatabase().length];
for (int i=0;i<meta.getFieldDatabase().length;i++)
{
data.valuenrs[i]=getInputRowMeta().indexOfValue(meta.getFieldStream()[i]);
if (data.valuenrs[i]<0)
{
throw new KettleStepException(BaseMessages.getString(PKG, "TableOutput.Exception.FieldRequired",meta.getFieldStream()[i])); //$NON-NLS-1$
}
}
for (int i=0;i<meta.getFieldDatabase().length;i++)
{
ValueMetaInterface insValue = getInputRowMeta().searchValueMeta( meta.getFieldStream()[i]);
if ( insValue != null )
{
ValueMetaInterface insertValue = insValue.clone();
insertValue.setName(meta.getFieldDatabase()[i]);
data.insertRowMeta.addValueMeta( insertValue );
}
else {
throw new KettleStepException(BaseMessages.getString(PKG, "TableOutput.Exception.FailedToFindField", meta.getFieldStream()[i])); //$NON-NLS-1$
}
}
}
}
try
{
Object[] outputRowData = writeToTable(getInputRowMeta(), r);
if (outputRowData!=null)
{
putRow(data.outputRowMeta, outputRowData); // in case we want it go further...
incrementLinesOutput();
}
if (checkFeedback(getLinesRead()))
{
if(log.isBasic()) logBasic("linenr "+getLinesRead()); //$NON-NLS-1$
}
}
catch(KettleException e)
{
logError("Because of an error, this step can't continue: ", e);
setErrors(1);
stopAll();
setOutputDone(); // signal end to receiver(s)
return false;
}
return true;
}
示例14: getSQLStatements
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev) throws KettleStepException
{
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do!
if (databaseMeta!=null)
{
if (prev!=null && prev.size()>0)
{
// Copy the row
RowMetaInterface tableFields = new RowMeta();
// Now change the field names
for (int i=0;i<fieldTable.length;i++)
{
ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]);
if (v!=null)
{
ValueMetaInterface tableField = v.clone();
tableField.setName(fieldTable[i]);
tableFields.addValueMeta(tableField);
}
else
{
throw new KettleStepException("Unable to find field ["+fieldStream[i]+"] in the input rows");
}
}
if (!Const.isEmpty(tableName))
{
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try
{
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName),
transMeta.environmentSubstitute(tableName));
String sql = db.getDDL(schemaTable,
tableFields,
null,
false,
null,
true
);
if (sql.length()==0) retval.setSQL(null); else retval.setSQL(sql);
}
catch(KettleException e)
{
retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.ErrorOccurred")+e.getMessage()); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection")); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.NotReceivingAnyFields")); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "GPBulkLoaderMeta.GetSQL.NoConnectionDefined")); //$NON-NLS-1$
}
return retval;
}
示例15: getSQLStatements
import org.pentaho.di.core.row.ValueMetaInterface; //导入方法依赖的package包/类
public SQLStatement getSQLStatements(TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev) throws KettleStepException
{
SQLStatement retval = new SQLStatement(stepMeta.getName(), databaseMeta, null); // default: nothing to do!
if (databaseMeta!=null)
{
if (prev!=null && prev.size()>0)
{
// Copy the row
RowMetaInterface tableFields = new RowMeta();
// Now change the field names
for (int i=0;i<fieldTable.length;i++)
{
ValueMetaInterface v = prev.searchValueMeta(fieldStream[i]);
if (v!=null)
{
ValueMetaInterface tableField = v.clone();
tableField.setName(fieldTable[i]);
tableFields.addValueMeta(tableField);
}
else
{
throw new KettleStepException("Unable to find field ["+fieldStream[i]+"] in the input rows");
}
}
if (!Const.isEmpty(tableName))
{
Database db = new Database(loggingObject, databaseMeta);
db.shareVariablesWith(transMeta);
try
{
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(transMeta.environmentSubstitute(schemaName),
transMeta.environmentSubstitute(tableName));
String sql = db.getDDL(schemaTable,
tableFields,
null,
false,
null,
true
);
if (sql.length()==0) retval.setSQL(null); else retval.setSQL(sql);
}
catch(KettleException e)
{
retval.setError(BaseMessages.getString(PKG, "OraBulkLoaderMeta.GetSQL.ErrorOccurred")+e.getMessage()); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "OraBulkLoaderMeta.GetSQL.NoTableDefinedOnConnection")); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "OraBulkLoaderMeta.GetSQL.NotReceivingAnyFields")); //$NON-NLS-1$
}
}
else
{
retval.setError(BaseMessages.getString(PKG, "OraBulkLoaderMeta.GetSQL.NoConnectionDefined")); //$NON-NLS-1$
}
return retval;
}