本文整理汇总了Java中org.pentaho.di.core.database.DatabaseMeta.findDatabase方法的典型用法代码示例。如果您正苦于以下问题:Java DatabaseMeta.findDatabase方法的具体用法?Java DatabaseMeta.findDatabase怎么用?Java DatabaseMeta.findDatabase使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.database.DatabaseMeta
的用法示例。
在下文中一共展示了DatabaseMeta.findDatabase方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadXML
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public void loadXML( Node entryNode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep ) throws KettleXMLException {
try {
super.loadXML( entryNode, databases, slaveServers );
String dbname = XMLHandler.getTagValue( entryNode, CONNECTION );
databaseMeta = DatabaseMeta.findDatabase( databases, dbname );
setManagementAction( XMLHandler.getTagValue( entryNode, MANAGEMENT_ACTION ) );
setReplace( "Y".equalsIgnoreCase( XMLHandler.getTagValue( entryNode, REPLACE ) ) );
setFailIfExists( "Y".equalsIgnoreCase( XMLHandler.getTagValue( entryNode, FAIL_IF_EXISTS ) ) );
setWarehouseName( XMLHandler.getTagValue( entryNode, WAREHOUSE_NAME ) );
setWarehouseSize( XMLHandler.getTagValue( entryNode, WAREHOUSE_SIZE ) );
setWarehouseType( XMLHandler.getTagValue( entryNode, WAREHOUSE_TYPE ) );
setMaxClusterCount( XMLHandler.getTagValue( entryNode, MAX_CLUSTER_COUNT ) );
setMinClusterCount( XMLHandler.getTagValue( entryNode, MIN_CLUSTER_COUNT ) );
setAutoSuspend( XMLHandler.getTagValue( entryNode, AUTO_SUSPEND ) );
setAutoResume( "Y".equalsIgnoreCase( XMLHandler.getTagValue( entryNode, AUTO_RESUME ) ) );
setInitiallySuspended( "Y".equalsIgnoreCase( XMLHandler.getTagValue( entryNode, INITIALLY_SUSPENDED ) ) );
setResourceMonitor( XMLHandler.getTagValue( entryNode, RESOURCE_MONITOR ) );
setComment( XMLHandler.getTagValue( entryNode, COMMENT ) );
setFailIfNotExists( "Y".equalsIgnoreCase( XMLHandler.getTagValue( entryNode, FAIL_IF_NOT_EXISTS ) ) );
} catch ( KettleXMLException dbe ) {
throw new KettleXMLException( BaseMessages.getString( PKG, "SnowflakeWarehouseManager.Error.Exception.UnableLoadXML" ), dbe );
}
}
示例2: loadXML
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases, slaveServers);
String dbname = XMLHandler.getTagValue(entrynode, "connection");
connection = DatabaseMeta.findDatabase(databases, dbname);
schemaname =XMLHandler.getTagValue(entrynode, "schemaname");
tablename =XMLHandler.getTagValue(entrynode, "tablename");
successCondition = getSucessConditionByCode(Const.NVL(XMLHandler.getTagValue(entrynode, "success_condition"), ""));
rowsCountValue = Const.NVL(XMLHandler.getTagValue(entrynode,"rows_count_value"), "0");
iscustomSQL = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "is_custom_sql"));
isUseVars = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "is_usevars"));
customSQL =XMLHandler.getTagValue(entrynode, "custom_sql");
isAddRowsResult = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_rows_result"));
maximumTimeout = XMLHandler.getTagValue(entrynode, "maximum_timeout");
checkCycleTime = XMLHandler.getTagValue(entrynode, "check_cycle_time");
successOnTimeout = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "success_on_timeout"));
isClearResultList = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "clear_result_rows"));
}
catch(KettleException e)
{
throw new KettleXMLException(Messages.getString("JobEntryWaitForSQL.UnableLoadXML"),e);
}
}
示例3: TransDependency
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public TransDependency(Repository rep, long id_dependency, List<DatabaseMeta> databases) throws KettleException
{
try
{
setID(id_dependency);
RowMetaAndData r = rep.getTransDependency(id_dependency);
if (r!=null)
{
long id_connection = r.getInteger("ID_DATABASE", 0); //$NON-NLS-1$
db = DatabaseMeta.findDatabase(databases, id_connection);
tablename = r.getString("TABLE_NAME", null); //$NON-NLS-1$
fieldname = r.getString("FIELD_NAME", null); //$NON-NLS-1$
}
}
catch(KettleException dbe)
{
throw new KettleException(Messages.getString("TransDependency.Exception.UnableToLoadTransformationDependency")+id_dependency, dbe); //$NON-NLS-1$
}
}
示例4: loadXML
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases, slaveServers);
schemaname = XMLHandler.getTagValue(entrynode, "schemaname");
tablename = XMLHandler.getTagValue(entrynode, "tablename");
filename = XMLHandler.getTagValue(entrynode, "filename");
separator = XMLHandler.getTagValue(entrynode, "separator");
enclosed = XMLHandler.getTagValue(entrynode, "enclosed");
escaped = XMLHandler.getTagValue(entrynode, "escaped");
linestarted = XMLHandler.getTagValue(entrynode, "linestarted");
lineterminated = XMLHandler.getTagValue(entrynode, "lineterminated");
replacedata = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "replacedata"));
ignorelines = XMLHandler.getTagValue(entrynode, "ignorelines");
listattribut = XMLHandler.getTagValue(entrynode, "listattribut");
localinfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "localinfile"));
prorityvalue = Const.toInt(XMLHandler.getTagValue(entrynode, "prorityvalue"), -1);
String dbname = XMLHandler.getTagValue(entrynode, "connection");
addfiletoresult = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "addfiletoresult"));
connection = DatabaseMeta.findDatabase(databases, dbname);
}
catch(KettleException e)
{
throw new KettleXMLException("Unable to load job entry of type 'Mysql bulk load' from XML node", e);
}
}
示例5: readData
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
private void readData(Node stepnode,List<? extends SharedObjectInterface> databases)
throws KettleXMLException
{
try
{
String csize;
int nrkeys;
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase(databases, con);
csize = XMLHandler.getTagValue(stepnode, "commit"); //$NON-NLS-1$
commitSize = Const.toInt(csize, 0);
schemaName = XMLHandler.getTagValue(stepnode, "lookup", "schema"); //$NON-NLS-1$ //$NON-NLS-2$
tableName = XMLHandler.getTagValue(stepnode, "lookup", "table"); //$NON-NLS-1$ //$NON-NLS-2$
Node lookup = XMLHandler.getSubNode(stepnode, "lookup"); //$NON-NLS-1$
nrkeys = XMLHandler.countNodes(lookup, "key"); //$NON-NLS-1$
allocate(nrkeys);
for (int i=0;i<nrkeys;i++)
{
Node knode = XMLHandler.getSubNodeByNr(lookup, "key", i); //$NON-NLS-1$
keyStream [i] = XMLHandler.getTagValue(knode, "name"); //$NON-NLS-1$
keyLookup [i] = XMLHandler.getTagValue(knode, "field"); //$NON-NLS-1$
keyCondition[i] = XMLHandler.getTagValue(knode, "condition"); //$NON-NLS-1$
if (keyCondition[i]==null) keyCondition[i]="="; //$NON-NLS-1$
keyStream2 [i] = XMLHandler.getTagValue(knode, "name2"); //$NON-NLS-1$
}
}
catch(Exception e)
{
throw new KettleXMLException(Messages.getString("DeleteMeta.Exception.UnableToReadStepInfoFromXML"), e); //$NON-NLS-1$
}
}
示例6: loadXML
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases, slaveServers);
String dbname = XMLHandler.getTagValue(entrynode, "connection");
this.connection = DatabaseMeta.findDatabase(databases, dbname);
this.argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
Node fields = XMLHandler.getSubNode(entrynode, "fields"); //$NON-NLS-1$
// How many field arguments?
int nrFields = XMLHandler.countNodes(fields, "field"); //$NON-NLS-1$
this.arguments = new String[nrFields];
this.schemaname = new String[nrFields];
// Read them all...
for (int i = 0; i < nrFields; i++) {
Node fnode = XMLHandler.getSubNodeByNr(fields, "field", i); //$NON-NLS-1$
this.arguments[i] = XMLHandler.getTagValue(fnode, "name"); //$NON-NLS-1$
this.schemaname[i] = XMLHandler.getTagValue(fnode, "schemaname"); //$NON-NLS-1$
}
}
catch(KettleException e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "JobEntryTruncateTables.UnableLoadXML"),e);
}
}
示例7: readData
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
private void readData(Node stepnode, List<? extends SharedObjectInterface> databases)
throws KettleXMLException
{
try
{
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase(databases, con);
bufferSize = XMLHandler.getTagValue(stepnode, "buffer_size"); //$NON-NLS-1$
schemaName = XMLHandler.getTagValue(stepnode, "schema"); //$NON-NLS-1$
tableName = XMLHandler.getTagValue(stepnode, "table"); //$NON-NLS-1$
mClientPath = XMLHandler.getTagValue(stepnode, "mclient_path"); //$NON-NLS-1$
logFile = XMLHandler.getTagValue(stepnode, "log_file"); //$NON-NLS-1$
encoding = XMLHandler.getTagValue(stepnode, "encoding"); //$NON-NLS-1$
truncate = "Y".equals(XMLHandler.getTagValue(stepnode, "truncate")); //$NON-NLS-1$
autoSchema = "Y".equals(XMLHandler.getTagValue(stepnode, "auto_schema")); //$NON-NLS-1$
autoStringWidths = "Y".equals(XMLHandler.getTagValue(stepnode, "auto_string_widths")); //$NON-NLS-1$
int nrvalues = XMLHandler.countNodes(stepnode, "mapping"); //$NON-NLS-1$
allocate(nrvalues);
for (int i=0;i<nrvalues;i++)
{
Node vnode = XMLHandler.getSubNodeByNr(stepnode, "mapping", i); //$NON-NLS-1$
fieldTable[i] = XMLHandler.getTagValue(vnode, "stream_name"); //$NON-NLS-1$
fieldStream[i] = XMLHandler.getTagValue(vnode, "field_name"); //$NON-NLS-1$
if (fieldStream[i]==null) fieldStream[i]=fieldTable[i]; // default: the same name!
fieldFormatOk[i] = "Y".equalsIgnoreCase(XMLHandler.getTagValue(vnode, "field_format_ok")); //$NON-NLS-1$
}
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "MonetDBBulkLoaderMeta.Exception.UnableToReadStepInfoFromXML"), e); //$NON-NLS-1$
}
}
示例8: readData
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
private void readData(Node stepnode,List<? extends SharedObjectInterface> databases)
throws KettleXMLException
{
try
{
String csize;
int nrkeys;
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase(databases, con);
csize = XMLHandler.getTagValue(stepnode, "commit"); //$NON-NLS-1$
commitSize = Const.toInt(csize, 0);
schemaName = XMLHandler.getTagValue(stepnode, "lookup", "schema"); //$NON-NLS-1$ //$NON-NLS-2$
tableName = XMLHandler.getTagValue(stepnode, "lookup", "table"); //$NON-NLS-1$ //$NON-NLS-2$
Node lookup = XMLHandler.getSubNode(stepnode, "lookup"); //$NON-NLS-1$
nrkeys = XMLHandler.countNodes(lookup, "key"); //$NON-NLS-1$
allocate(nrkeys);
for (int i=0;i<nrkeys;i++)
{
Node knode = XMLHandler.getSubNodeByNr(lookup, "key", i); //$NON-NLS-1$
keyStream [i] = XMLHandler.getTagValue(knode, "name"); //$NON-NLS-1$
keyLookup [i] = XMLHandler.getTagValue(knode, "field"); //$NON-NLS-1$
keyCondition[i] = XMLHandler.getTagValue(knode, "condition"); //$NON-NLS-1$
if (keyCondition[i]==null) keyCondition[i]="="; //$NON-NLS-1$
keyStream2 [i] = XMLHandler.getTagValue(knode, "name2"); //$NON-NLS-1$
}
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "DeleteMeta.Exception.UnableToReadStepInfoFromXML"), e); //$NON-NLS-1$
}
}
示例9: loadRep
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
limitlines = rep.getJobEntryAttributeString(id_jobentry, "limitlines");
listcolumn = rep.getJobEntryAttributeString(id_jobentry, "listcolumn");
highpriority=rep.getJobEntryAttributeBoolean(id_jobentry, "highpriority");
optionenclosed=rep.getJobEntryAttributeBoolean(id_jobentry, "optionenclosed");
outdumpvalue=(int) rep.getJobEntryAttributeInteger(id_jobentry, "outdumpvalue");
iffileexists=(int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
long id_db = rep.getJobEntryAttributeInteger(id_jobentry, "id_database");
if (id_db>0)
{
connection = DatabaseMeta.findDatabase(databases, id_db);
}
else
{
// This is were we end up in normally, the previous lines are for backward compatibility.
connection = DatabaseMeta.findDatabase(databases, rep.getJobEntryAttributeString(id_jobentry, "connection"));
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'table exists' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例10: getTargetDatabase
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public DatabaseMeta getTargetDatabase()
{
if (wTargetDB.getSelection().length==1)
{
String targetDbName = wTargetDB.getSelection()[0];
return DatabaseMeta.findDatabase(databases, targetDbName);
}
return null;
}
示例11: readData
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
private void readData(Node stepnode, List<? extends SharedObjectInterface> databases)
throws KettleXMLException
{
try
{
valuename = XMLHandler.getTagValue(stepnode, "valuename"); //$NON-NLS-1$
useDatabase = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "use_database")); //$NON-NLS-1$ //$NON-NLS-2$
String conn = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
database = DatabaseMeta.findDatabase(databases, conn);
schemaName = XMLHandler.getTagValue(stepnode, "schema"); //$NON-NLS-1$
sequenceName = XMLHandler.getTagValue(stepnode, "seqname"); //$NON-NLS-1$
useCounter = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "use_counter")); //$NON-NLS-1$ //$NON-NLS-2$
counterName = XMLHandler.getTagValue(stepnode, "counter_name"); //$NON-NLS-1$
startAt = XMLHandler.getTagValue(stepnode, "start_at"); //$NON-NLS-1$
incrementBy = XMLHandler.getTagValue(stepnode, "increment_by"); //$NON-NLS-1$
maxValue = XMLHandler.getTagValue(stepnode, "max_value"); //$NON-NLS-1$
// TODO startAt = Const.toLong(XMLHandler.getTagValue(stepnode, "start_at"), 1); //$NON-NLS-1$
// incrementBy = Const.toLong(XMLHandler.getTagValue(stepnode, "increment_by"), 1); //$NON-NLS-1$
// maxValue = Const.toLong(XMLHandler.getTagValue(stepnode, "max_value"), 999999999L); //$NON-NLS-1$
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "AddSequenceMeta.Exception.ErrorLoadingStepInfo"), e); //$NON-NLS-1$
}
}
示例12: getSourceDatabase
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public DatabaseMeta getSourceDatabase()
{
if (wSourceDB.getSelection().length==1)
{
String sourceDbName = wSourceDB.getSelection()[0];
return DatabaseMeta.findDatabase(databases, sourceDbName);
}
return null;
}
示例13: readRep
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
public void readRep(Repository rep, long id_step, List<DatabaseMeta> databases, Map<String, Counter> counters)
throws KettleException
{
try
{
long id_connection = rep.getStepAttributeInteger(id_step, "id_connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase( databases, id_connection);
maxErrors = (int)rep.getStepAttributeInteger(id_step, "errors"); //$NON-NLS-1$
bufferSize = rep.getStepAttributeString(id_step, "buffer_size"); //$NON-NLS-1$
schemaName = rep.getStepAttributeString(id_step, "schema"); //$NON-NLS-1$
tableName = rep.getStepAttributeString(id_step, "table"); //$NON-NLS-1$
encoding = rep.getStepAttributeString(id_step, "encoding"); //$NON-NLS-1$
fifoDirectory = rep.getStepAttributeString(id_step, "fifo_directory"); //$NON-NLS-1$
fifoServerName = rep.getStepAttributeString(id_step, "fifo_server_name"); //$NON-NLS-1$
int nrvalues = rep.countNrStepAttributes(id_step, "stream_name"); //$NON-NLS-1$
allocate(nrvalues);
for (int i=0;i<nrvalues;i++)
{
fieldTable[i] = rep.getStepAttributeString(id_step, i, "stream_name"); //$NON-NLS-1$
fieldStream[i] = rep.getStepAttributeString(id_step, i, "field_name"); //$NON-NLS-1$
if (fieldStream[i]==null) fieldStream[i]=fieldTable[i];
fieldFormatOk[i] = rep.getStepAttributeBoolean(id_step, i, "field_format_ok"); //$NON-NLS-1$
}
}
catch(Exception e)
{
throw new KettleException(Messages.getString("LucidDBBulkLoaderMeta.Exception.UnexpectedErrorReadingStepInfoFromRepository"), e); //$NON-NLS-1$
}
}
示例14: readData
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
private void readData(Node stepnode, List<DatabaseMeta> databases)
throws KettleXMLException
{
try
{
String con = XMLHandler.getTagValue(stepnode, "connection"); //$NON-NLS-1$
databaseMeta = DatabaseMeta.findDatabase(databases, con);
sql = XMLHandler.getTagValue(stepnode, "sql"); //$NON-NLS-1$
outerJoin = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "outer_join")); //$NON-NLS-1$ //$NON-NLS-2$
replacevars = "Y".equalsIgnoreCase(XMLHandler.getTagValue(stepnode, "replace_vars"));
rowLimit = Const.toInt(XMLHandler.getTagValue(stepnode, "rowlimit"), 0); //$NON-NLS-1$
Node param = XMLHandler.getSubNode(stepnode, "parameter"); //$NON-NLS-1$
int nrparam = XMLHandler.countNodes(param, "field"); //$NON-NLS-1$
allocate(nrparam);
for (int i=0;i<nrparam;i++)
{
Node pnode = XMLHandler.getSubNodeByNr(param, "field", i); //$NON-NLS-1$
parameterField [i] = XMLHandler.getTagValue(pnode, "name"); //$NON-NLS-1$
String ptype = XMLHandler.getTagValue(pnode, "type"); //$NON-NLS-1$
parameterType [i] = ValueMeta.getType(ptype);
}
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "DatabaseJoinMeta.Exception.UnableToLoadStepInfo"), e); //$NON-NLS-1$
}
}
示例15: readData
import org.pentaho.di.core.database.DatabaseMeta; //导入方法依赖的package包/类
/**
* Reads the XML data to get the step metadata
*
* @param stepNode The XML node for the step
* @throws KettleXMLException
*/
private void readData( Node stepNode, List<? extends SharedObjectInterface> databases ) throws KettleXMLException {
try {
databaseMeta = DatabaseMeta.findDatabase( databases, XMLHandler.getTagValue( stepNode, CONNECTION ) );
targetSchema = XMLHandler.getTagValue( stepNode, TARGET_SCHEMA );
targetTable = XMLHandler.getTagValue( stepNode, TARGET_TABLE );
locationType = XMLHandler.getTagValue( stepNode, LOCATION_TYPE );
stageName = XMLHandler.getTagValue( stepNode, STAGE_NAME );
workDirectory = XMLHandler.getTagValue( stepNode, WORK_DIRECTORY );
onError = XMLHandler.getTagValue( stepNode, ON_ERROR );
errorLimit = XMLHandler.getTagValue( stepNode, ERROR_LIMIT );
splitSize = XMLHandler.getTagValue( stepNode, SPLIT_SIZE );
removeFiles = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, REMOVE_FILES ) );
dataType = XMLHandler.getTagValue( stepNode, DATA_TYPE );
trimWhitespace = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, TRIM_WHITESPACE ) );
nullIf = XMLHandler.getTagValue( stepNode, NULL_IF );
errorColumnMismatch = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, ERROR_COLUMN_MISMATCH ) );
stripNull = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, STRIP_NULL ) );
ignoreUtf8 = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, IGNORE_UTF_8 ) );
allowDuplicateElements = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, ALLOW_DUPLICATE_ELEMENT ) );
enableOctal = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, ENABLE_OCTAL ) );
specifyFields = "Y".equalsIgnoreCase( XMLHandler.getTagValue( stepNode, SPECIFY_FIELDS ) );
jsonField = XMLHandler.getTagValue( stepNode, JSON_FIELD );
Node fields = XMLHandler.getSubNode( stepNode, FIELDS );
int nrfields = XMLHandler.countNodes( fields, FIELD );
allocate( nrfields );
for ( int i = 0; i < nrfields; i++ ) {
Node fnode = XMLHandler.getSubNodeByNr( fields, FIELD, i );
snowflakeBulkLoaderFields[i] = new SnowflakeBulkLoaderField();
snowflakeBulkLoaderFields[i].setStreamField( XMLHandler.getTagValue( fnode, STREAM_FIELD ) );
snowflakeBulkLoaderFields[i].setTableField( XMLHandler.getTagValue( fnode, TABLE_FIELD ) );
}
} catch ( Exception e ) {
throw new KettleXMLException( "Unable to load step info from XML", e );
}
}