本文整理汇总了Java中org.pentaho.di.core.Result.getRows方法的典型用法代码示例。如果您正苦于以下问题:Java Result.getRows方法的具体用法?Java Result.getRows怎么用?Java Result.getRows使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.Result
的用法示例。
在下文中一共展示了Result.getRows方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult( false );
NrErrors=0;
NrSuccess=0;
NrFilesToProcess=0;
continueProcessing=true;
limitFiles=Const.toInt(environmentSubstitute(getLimit()),10);
// Get source and destination files, also wildcard
String vsourceFilefolder[] = source_filefolder;
String vsourceWildcard[] = source_wildcard;
String vsourceDelimiter[] = delimiter;
String targetDb[] = target_Db;
String targetTable[] = target_table;
try
{
if (is_args_from_previous){
if (log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryMSAccessBulkLoad.Log.ArgFromPrevious.Found",(rows!=null?rows.size():0)+ ""));
}
if (is_args_from_previous && rows!=null){
for (int iteration=0;iteration<rows.size() && !parentJob.isStopped() && continueProcessing;iteration++) {
resultRow = rows.get(iteration);
// Get source and destination file names, also wildcard
String vSourceFileFolder_previous = resultRow.getString(0,null);
String vSourceWildcard_previous = resultRow.getString(1,null);
String vDelimiter_previous = resultRow.getString(2,null);
String vTargetDb_previous = resultRow.getString(3,null);
String vTargetTable_previous = resultRow.getString(4,null);
processOneRow(log, vSourceFileFolder_previous, vSourceWildcard_previous,vDelimiter_previous,
vTargetDb_previous, vTargetTable_previous, parentJob,result);
}
}
else if(vsourceFilefolder!=null && targetDb!=null && targetTable!=null)
{
for (int i=0;i<vsourceFilefolder.length && !parentJob.isStopped() && continueProcessing;i++)
{
// get real values
String realSourceFileFolder=environmentSubstitute(vsourceFilefolder[i]);
String realSourceWildcard=environmentSubstitute(vsourceWildcard[i]);
String realSourceDelimiter=environmentSubstitute(vsourceDelimiter[i]);
String realTargetDb=environmentSubstitute(targetDb[i]);
String realTargetTable=environmentSubstitute(targetTable[i]);
processOneRow(log, realSourceFileFolder, realSourceWildcard,realSourceDelimiter,
realTargetDb, realTargetTable, parentJob,result);
}
}
}
catch ( Exception e ){
incrErrors();
log.logError(toString(), Messages.getString("JobEntryMSAccessBulkLoad.UnexpectedError",e.getMessage()));
}
// Success Condition
result.setNrErrors(NrErrors);
result.setNrLinesInput(NrFilesToProcess);
result.setNrLinesWritten(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
displayResults(log);
return result;
}
示例2: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult(true);
NrErrors=0;
continueProcess=true;
NrSuccess=0;
if (argFromPrevious) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryTruncateTables.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
if(rows.size()==0) return result;
}
if (connection!=null)
{
Database db = new Database(connection);
db.shareVariablesWith(this);
try
{
db.connect();
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped() && continueProcess; iteration++) {
resultRow = rows.get(iteration);
// Get values from previous result
String tablename_previous = resultRow.getString(0, null);
String schemaname_previous = resultRow.getString(1, null);
if(!Const.isEmpty(tablename_previous))
{
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryTruncateTables.ProcessingRow", tablename_previous, schemaname_previous)); //$NON-NLS-1$
}else{
log.logError(toString(), Messages.getString("JobEntryTruncateTables.RowEmpty")); //$NON-NLS-1$
}
}
}else if (arguments!=null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped() && continueProcess; i++) {
String realTablename = environmentSubstitute(arguments[i]);
String realSchemaname = environmentSubstitute(schemaname[i]);
if(!Const.isEmpty(realTablename)) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryTruncateTables.ProcessingArg", arguments[i], schemaname[i])); //$NON-NLS-1$
// let's truncate table
if(TruncateTables(log, realTablename, realSchemaname, db))
updateSuccess();
else
updateErrors();
}else{
log.logError(toString(), Messages.getString("JobEntryTruncateTables.ArgEmpty", arguments[i], schemaname[i])); //$NON-NLS-1$
}
}
}
}
catch(Exception dbe){
result.setNrErrors(1);
log.logError(toString(), Messages.getString("JobEntryTruncateTables.Error.RunningEntry",dbe.getMessage()));
}finally{
if(db!=null) db.disconnect();
}
}
else {
result.setNrErrors(1);
log.logError(toString(),Messages.getString("JobEntryTruncateTables.NoDbConnection"));
}
result.setNrErrors(NrErrors);
result.setResult(NrErrors==0);
return result;
}
示例3: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int nrErrFiles = 0;
result.setResult(true);
if(deleteallbefore)
{
// clear result filenames
int size=result.getResultFiles().size();
if(log.isBasic()) log.logBasic(toString(),Messages.getString("JobEntryAddResultFilenames.log.FilesFound",""+size));
result.getResultFiles().clear();
if(log.isDetailed()) log.logDetailed(toString(),Messages.getString("JobEntryAddResultFilenames.log.DeletedFiles",""+size));
}
if (argFromPrevious)
{
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryAddResultFilenames.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
resultRow = rows.get(iteration);
// Get values from previous result
String filefolder_previous = resultRow.getString(0,null);
String fmasks_previous = resultRow.getString(1,null);
// ok we can process this file/folder
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryAddResultFilenames.ProcessingRow", filefolder_previous, fmasks_previous)); //$NON-NLS-1$
if (!ProcessFile(filefolder_previous, fmasks_previous,parentJob,result, log)) {
nrErrFiles++;
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
// ok we can process this file/folder
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryAddResultFilenames.ProcessingArg", arguments[i], filemasks[i])); //$NON-NLS-1$
if (!ProcessFile(arguments[i], filemasks[i],parentJob,result, log)) {
nrErrFiles++;
}
}
}
if (nrErrFiles>0)
{
result.setResult(false);
result.setNrErrors(nrErrFiles);
}
return result;
}
示例4: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setNrErrors(1);
result.setResult(false);
NrErrors=0;
NrSuccess=0;
successConditionBroken=false;
successConditionBrokenExit=false;
limitFolders=Const.toInt(environmentSubstitute(getLimitFolders()),10);
if (argFromPrevious) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFolders.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null){
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
if(successConditionBroken){
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
if(!Const.isEmpty(args_previous)){
if(deleteFolder(args_previous)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.EmptyLine"));
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
if(successConditionBroken)
{
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
String realfilename=environmentSubstitute(arguments[i]);
if(!Const.isEmpty(realfilename))
{
if(deleteFolder(realfilename)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.EmptyLine"));
}
}
}
if(log.isDetailed()){
log.logDetailed(toString(), "=======================================");
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFolders.Log.Info.NrError","" + NrErrors));
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFolders.Log.Info.NrDeletedFolders","" + NrSuccess));
log.logDetailed(toString(), "=======================================");
}
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
return result;
}
示例5: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int NrErrFiles = 0;
result.setResult(false);
result.setNrErrors(1);
if (argFromPrevious) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
String fmasks_previous = resultRow.getString(1, null);
// ok we can process this file/folder
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.ProcessingRow", args_previous, fmasks_previous)); //$NON-NLS-1$
if (!ProcessFile(args_previous, fmasks_previous,parentJob)) {
NrErrFiles++;
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
// ok we can process this file/folder
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.ProcessingArg", arguments[i], filemasks[i])); //$NON-NLS-1$
if (!ProcessFile(arguments[i], filemasks[i],parentJob)) {
NrErrFiles++;
}
}
}
if (NrErrFiles==0)
{
result.setResult(true);
result.setNrErrors(0);
}else
{
result.setNrErrors(NrErrFiles);
result.setResult(false);
}
return result;
}
示例6: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr)
{
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult( false );
NrErrors=0;
NrSuccess=0;
NrFilesToProcess=0;
continueProcessing=true;
limitFiles=Const.toInt(environmentSubstitute(getLimit()),10);
// Get source and destination files, also wildcard
String vsourceFilefolder[] = source_filefolder;
String vsourceWildcard[] = source_wildcard;
String vsourceDelimiter[] = delimiter;
String targetDb[] = target_Db;
String targetTable[] = target_table;
try
{
if (is_args_from_previous){
if (log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryMSAccessBulkLoad.Log.ArgFromPrevious.Found",(rows!=null?rows.size():0)+ ""));
}
if (is_args_from_previous && rows!=null){
for (int iteration=0;iteration<rows.size() && !parentJob.isStopped() && continueProcessing;iteration++) {
resultRow = rows.get(iteration);
// Get source and destination file names, also wildcard
String vSourceFileFolder_previous = resultRow.getString(0,null);
String vSourceWildcard_previous = resultRow.getString(1,null);
String vDelimiter_previous = resultRow.getString(2,null);
String vTargetDb_previous = resultRow.getString(3,null);
String vTargetTable_previous = resultRow.getString(4,null);
processOneRow(vSourceFileFolder_previous, vSourceWildcard_previous,vDelimiter_previous,
vTargetDb_previous, vTargetTable_previous, parentJob,result);
}
}
else if(vsourceFilefolder!=null && targetDb!=null && targetTable!=null)
{
for (int i=0;i<vsourceFilefolder.length && !parentJob.isStopped() && continueProcessing;i++)
{
// get real values
String realSourceFileFolder=environmentSubstitute(vsourceFilefolder[i]);
String realSourceWildcard=environmentSubstitute(vsourceWildcard[i]);
String realSourceDelimiter=environmentSubstitute(vsourceDelimiter[i]);
String realTargetDb=environmentSubstitute(targetDb[i]);
String realTargetTable=environmentSubstitute(targetTable[i]);
processOneRow(realSourceFileFolder, realSourceWildcard,realSourceDelimiter,
realTargetDb, realTargetTable, parentJob,result);
}
}
}
catch ( Exception e ){
incrErrors();
logError(BaseMessages.getString(PKG, "JobEntryMSAccessBulkLoad.UnexpectedError",e.getMessage()));
}
// Success Condition
result.setNrErrors(NrErrors);
result.setNrLinesInput(NrFilesToProcess);
result.setNrLinesWritten(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
displayResults();
return result;
}
示例7: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result result, int nr) throws KettleException {
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int nrErrFiles = 0;
result.setResult(true);
if(deleteallbefore)
{
// clear result filenames
int size=result.getResultFiles().size();
if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.log.FilesFound",""+size));
result.getResultFiles().clear();
if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.log.DeletedFiles",""+size));
}
if (argFromPrevious)
{
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
resultRow = rows.get(iteration);
// Get values from previous result
String filefolder_previous = resultRow.getString(0,null);
String fmasks_previous = resultRow.getString(1,null);
// ok we can process this file/folder
if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.ProcessingRow", filefolder_previous, fmasks_previous)); //$NON-NLS-1$
if (!processFile(filefolder_previous, fmasks_previous,parentJob,result)) {
nrErrFiles++;
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
// ok we can process this file/folder
if(log.isDetailed()) logDetailed(BaseMessages.getString(PKG, "JobEntryAddResultFilenames.ProcessingArg", arguments[i], filemasks[i])); //$NON-NLS-1$
if (!processFile(arguments[i], filemasks[i],parentJob,result)) {
nrErrFiles++;
}
}
}
if (nrErrFiles>0)
{
result.setResult(false);
result.setNrErrors(nrErrFiles);
}
return result;
}
示例8: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result result, int nr) throws KettleException {
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setNrErrors(1);
result.setResult(false);
NrErrors=0;
NrSuccess=0;
successConditionBroken=false;
successConditionBrokenExit=false;
limitFolders=Const.toInt(environmentSubstitute(getLimitFolders()),10);
if (argFromPrevious) {
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null){
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
if(successConditionBroken){
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
if(!Const.isEmpty(args_previous)){
if(deleteFolder(args_previous)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.EmptyLine"));
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
if(successConditionBroken)
{
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
String realfilename=environmentSubstitute(arguments[i]);
if(!Const.isEmpty(realfilename))
{
if(deleteFolder(realfilename)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.EmptyLine"));
}
}
}
if(log.isDetailed()){
logDetailed("=======================================");
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Log.Info.NrError","" + NrErrors));
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Log.Info.NrDeletedFolders","" + NrSuccess));
logDetailed("=======================================");
}
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
return result;
}
示例9: execute
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
public Result execute(Result result, int nr) throws KettleException {
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int NrErrFiles = 0;
result.setResult(false);
result.setNrErrors(1);
if (argFromPrevious) {
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFiles.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
String fmasks_previous = resultRow.getString(1, null);
// ok we can process this file/folder
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFiles.ProcessingRow", args_previous, fmasks_previous)); //$NON-NLS-1$
if (!ProcessFile(args_previous, fmasks_previous,parentJob)) {
NrErrFiles++;
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
// ok we can process this file/folder
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFiles.ProcessingArg", arguments[i], filemasks[i])); //$NON-NLS-1$
if (!ProcessFile(arguments[i], filemasks[i],parentJob)) {
NrErrFiles++;
}
}
}
if (NrErrFiles==0)
{
result.setResult(true);
result.setNrErrors(0);
}else
{
result.setNrErrors(NrErrFiles);
result.setResult(false);
}
return result;
}
示例10: getLogRecord
import org.pentaho.di.core.Result; //导入方法依赖的package包/类
/**
* This method calculates all the values that are required
* @param id the id to use or -1 if no id is needed
* @param status the log status to use
* @param subject the object to log
* @param parent the parent to which the object belongs
*/
public RowMetaAndData getLogRecord(LogStatus status, Object subject, Object parent) {
if (subject==null || subject instanceof JobEntryCopy) {
JobEntryCopy jobEntryCopy = (JobEntryCopy) subject;
Job parentJob = (Job) parent;
RowMetaAndData row = new RowMetaAndData();
for (LogTableField field : fields) {
if (field.isEnabled()) {
Object value = null;
if (subject!=null) {
JobEntryInterface jobEntry = jobEntryCopy.getEntry();
JobTracker jobTracker = parentJob.getJobTracker();
JobTracker entryTracker = jobTracker.findJobTracker(jobEntryCopy);
JobEntryResult jobEntryResult = null;
if (entryTracker!=null) {
jobEntryResult = entryTracker.getJobEntryResult();
}
Result result = null;
if (jobEntryResult!=null) {
result = jobEntryResult.getResult();
}
switch(ID.valueOf(field.getId())){
case ID_BATCH : value = new Long(parentJob.getBatchId()); break;
case CHANNEL_ID : value = jobEntry.getLogChannel().getLogChannelId(); break;
case LOG_DATE : value = new Date(); break;
case JOBNAME : value = parentJob.getJobname(); break;
case JOBENTRYNAME : value = jobEntry.getName(); break;
case LINES_READ : value = new Long(result!=null ? result.getNrLinesRead() : 0); break;
case LINES_WRITTEN : value = new Long(result!=null ? result.getNrLinesWritten() : 0); break;
case LINES_UPDATED : value = new Long(result!=null ? result.getNrLinesUpdated() : 0); break;
case LINES_INPUT : value = new Long(result!=null ? result.getNrLinesInput() : 0); break;
case LINES_OUTPUT : value = new Long(result!=null ? result.getNrLinesOutput() : 0); break;
case LINES_REJECTED : value = new Long(result!=null ? result.getNrLinesRejected() : 0); break;
case ERRORS : value = new Long(result!=null ? result.getNrErrors() : 0); break;
case RESULT : value = new Boolean(result!=null ? result.getResult() : false); break;
case NR_RESULT_FILES : value = new Long(result!=null && result.getResultFiles()!=null ? result.getResultFiles().size() : 0); break;
case NR_RESULT_ROWS : value = new Long(result!=null && result.getRows()!=null ? result.getRows().size() : 0); break;
case LOG_FIELD :
if (result!=null) {
value = result.getLogText();
}
break;
case COPY_NR : value = new Long(jobEntryCopy.getNr()); break;
}
}
row.addValue(field.getFieldName(), field.getDataType(), value);
row.getRowMeta().getValueMeta(row.size()-1).setLength(field.getLength());
}
}
return row;
}
else {
return null;
}
}