本文整理汇总了Java中org.pentaho.di.job.Job.isStopped方法的典型用法代码示例。如果您正苦于以下问题:Java Job.isStopped方法的具体用法?Java Job.isStopped怎么用?Java Job.isStopped使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.job.Job
的用法示例。
在下文中一共展示了Job.isStopped方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: GetFiles
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
/**
* copy a directory from the remote host to the local one.
*
* @param sourceLocation the source directory on the remote host
* @param targetLocation the target directory on the local host
* @param sftpClient is an instance of SFTPv3Client that makes SFTP client connection over SSH-2
* @return the number of files successfully copied
* @throws Exception
*/
@SuppressWarnings("unchecked")
private void GetFiles(String sourceLocation, String targetLocation,
SFTPv3Client sftpClient,Pattern pattern, Job parentJob) throws Exception
{
String sourceFolder=".";
if (!Const.isEmpty(sourceLocation))
sourceFolder=sourceLocation + FTPUtils.FILE_SEPARATOR;
else
sourceFolder+=FTPUtils.FILE_SEPARATOR;
Vector<SFTPv3DirectoryEntry> filelist = sftpClient.ls(sourceFolder);
if(filelist!=null)
{
Iterator<SFTPv3DirectoryEntry> iterator = filelist.iterator();
while (iterator.hasNext() && !parentJob.isStopped())
{
SFTPv3DirectoryEntry dirEntry = iterator.next();
if (dirEntry == null) continue;
if (dirEntry.filename.equals(".")
|| dirEntry.filename.equals("..") || isDirectory(sftpClient, sourceFolder+dirEntry.filename))
continue;
if(getFileWildcard(dirEntry.filename,pattern))
{
// Copy file from remote host
copyFile(sourceFolder + dirEntry.filename, targetLocation + FTPUtils.FILE_SEPARATOR + dirEntry.filename, sftpClient);
}
}
}
}
示例2: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob) throws KettleJobException
{
Result result = previousResult;
if (isStart())
{
try {
long sleepTime = getNextExecutionTime();
if (sleepTime>0) {
parentJob.getLog().logBasic(parentJob.getJobname(), "Sleeping: " + (sleepTime/1000/60) + " minutes (sleep time="+sleepTime+")");
long totalSleep = 0L;
while (totalSleep<sleepTime && !parentJob.isStopped()) {
Thread.sleep(1000L);
totalSleep+=1000L;
}
}
} catch (InterruptedException e) {
throw new KettleJobException(e);
}
result = previousResult;
result.setResult( true );
}
else
if (isDummy())
{
result = previousResult;
}
return result;
}
示例3: processOneFile
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
private boolean processOneFile(LogWriter log, Result result,Job parentJob,
FileObject fileObject,String realTargetdirectory,
String realWildcard,String realWildcardExclude, FileObject movetodir,String realMovetodirectory,
String realWildcardSource)
{
boolean retval=false;
try{
if(fileObject.getType().equals(FileType.FILE))
{
// We have to unzip one zip file
if(!unzipFile(log, fileObject, realTargetdirectory,realWildcard,
realWildcardExclude,result, parentJob, fileObject, movetodir,realMovetodirectory))
updateErrors();
else
updateSuccess();
}else
{
// Folder..let's see wildcard
FileObject[] children = fileObject.getChildren();
for (int i=0; i<children.length && !parentJob.isStopped(); i++)
{
if(successConditionBroken){
if(!successConditionBrokenExit){
log.logError(toString(), Messages.getString("JobUnZip.Error.SuccessConditionbroken",""+NrErrors));
successConditionBrokenExit=true;
}
return false;
}
// Get only file!
if (!children[i].getType().equals(FileType.FOLDER))
{
boolean unzip=true;
String filename=children[i].getName().getPath();
Pattern patternSource = null;
if (!Const.isEmpty(realWildcardSource))
patternSource = Pattern.compile(realWildcardSource);
// First see if the file matches the regular expression!
if (patternSource!=null)
{
Matcher matcher = patternSource.matcher(filename);
unzip = matcher.matches();
}
if(unzip)
{
if(!unzipFile(log,children[i],realTargetdirectory,realWildcard,
realWildcardExclude,result, parentJob, fileObject,movetodir,
realMovetodirectory))
updateErrors();
else
updateSuccess();
}
}
}
}
}catch(Exception e)
{
updateErrors();
log.logError(toString(), Messages.getString("JobUnZip.Error.Label",e.getMessage()));
}finally
{
if ( fileObject != null )
{
try {
fileObject.close();
}catch ( IOException ex ) {};
}
}
return retval;
}
示例4: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
result.setResult(false);
if(previousResult!=null)
{
try
{
int size=previousResult.getResultFiles().size();
if(log.isBasic())
log.logBasic(toString(),Messages.getString("JobEntryDeleteResultFilenames.log.FilesFound",""+size));
if(!specifywildcard)
{
// Delete all files
previousResult.getResultFiles().clear();
if(log.isDetailed()) log.logDetailed(toString(),Messages.getString("JobEntryDeleteResultFilenames.log.DeletedFiles",""+size));
}
else
{
List <ResultFile> resultFiles = result.getResultFilesList();
if (resultFiles != null && resultFiles.size() > 0)
{
for (Iterator <ResultFile> it = resultFiles.iterator(); it.hasNext() && !parentJob.isStopped();)
{
ResultFile resultFile = (ResultFile) it.next();
FileObject file = resultFile.getFile();
if (file != null && file.exists())
{
if(CheckFileWildcard(file.getName().getBaseName(), environmentSubstitute(wildcard),true)
&& !CheckFileWildcard(file.getName().getBaseName(), environmentSubstitute(wildcardexclude),false))
{
// Remove file from result files list
result.getResultFiles().remove(resultFile.getFile().toString());
if(log.isDetailed()) log.logDetailed(toString(),Messages.getString("JobEntryDeleteResultFilenames.log.DeletedFile",file.toString()));
}
}
}
}
}
result.setResult(true);
}
catch(Exception e)
{
log.logError(toString(), Messages.getString("JobEntryDeleteResultFilenames.Error",e.toString()));
}
}
return result;
}
示例5: processOneRow
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
private boolean processOneRow(LogWriter log, String sourceFileFolder, String SourceWildcard,
String Delimiter,String targetDb, String targetTable, Job parentJob,Result result)
{
boolean retval=false;
try{
File sourcefilefolder=new File(sourceFileFolder);
if(!sourcefilefolder.exists())
{
log.logError(toString(),Messages.getString("JobEntryMSAccessBulkLoad.Error.CanNotFindFile",sourceFileFolder));
return retval;
}
if(sourcefilefolder.isFile())
{
// source is a file
retval=importFile(sourceFileFolder, Delimiter, targetDb,targetTable,log,result,parentJob);
}else if(sourcefilefolder.isDirectory())
{
// source is a folder
File[] listFiles=sourcefilefolder.listFiles();
int nrFiles=listFiles.length;
if(nrFiles>0)
{
// let's fetch children...
for(int i=0;i<nrFiles && !parentJob.isStopped() && continueProcessing;i++)
{
File child=listFiles[i];
String childFullName=child.getAbsolutePath();
if(child.isFile())
{
if(Const.isEmpty(SourceWildcard)){
retval=importFile(childFullName, Delimiter, targetDb,targetTable,log,result,parentJob);
}else{
if(GetFileWildcard(childFullName,SourceWildcard)){
retval=importFile(childFullName, Delimiter, targetDb,targetTable,log,result,parentJob);
}
}
}else {
// let's run process for this folder
if(include_subfolders){
processOneRow(log, childFullName, SourceWildcard,Delimiter, targetDb,
targetTable,parentJob,result);
}
}
}
}else
{
log.logBasic(toString(), Messages.getString("JobEntryMSAccessBulkLoad.Log.FolderEmpty",sourceFileFolder));
}
}else
log.logError(toString(), Messages.getString("JobEntryMSAccessBulkLoad.Log.UnknowType",sourceFileFolder));
}catch(Exception e){
log.logError(toString(), e.getMessage());
incrErrors();
}
return retval;
}
示例6: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult( false );
NrErrors=0;
NrSuccess=0;
NrFilesToProcess=0;
continueProcessing=true;
limitFiles=Const.toInt(environmentSubstitute(getLimit()),10);
// Get source and destination files, also wildcard
String vsourceFilefolder[] = source_filefolder;
String vsourceWildcard[] = source_wildcard;
String vsourceDelimiter[] = delimiter;
String targetDb[] = target_Db;
String targetTable[] = target_table;
try
{
if (is_args_from_previous){
if (log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryMSAccessBulkLoad.Log.ArgFromPrevious.Found",(rows!=null?rows.size():0)+ ""));
}
if (is_args_from_previous && rows!=null){
for (int iteration=0;iteration<rows.size() && !parentJob.isStopped() && continueProcessing;iteration++) {
resultRow = rows.get(iteration);
// Get source and destination file names, also wildcard
String vSourceFileFolder_previous = resultRow.getString(0,null);
String vSourceWildcard_previous = resultRow.getString(1,null);
String vDelimiter_previous = resultRow.getString(2,null);
String vTargetDb_previous = resultRow.getString(3,null);
String vTargetTable_previous = resultRow.getString(4,null);
processOneRow(log, vSourceFileFolder_previous, vSourceWildcard_previous,vDelimiter_previous,
vTargetDb_previous, vTargetTable_previous, parentJob,result);
}
}
else if(vsourceFilefolder!=null && targetDb!=null && targetTable!=null)
{
for (int i=0;i<vsourceFilefolder.length && !parentJob.isStopped() && continueProcessing;i++)
{
// get real values
String realSourceFileFolder=environmentSubstitute(vsourceFilefolder[i]);
String realSourceWildcard=environmentSubstitute(vsourceWildcard[i]);
String realSourceDelimiter=environmentSubstitute(vsourceDelimiter[i]);
String realTargetDb=environmentSubstitute(targetDb[i]);
String realTargetTable=environmentSubstitute(targetTable[i]);
processOneRow(log, realSourceFileFolder, realSourceWildcard,realSourceDelimiter,
realTargetDb, realTargetTable, parentJob,result);
}
}
}
catch ( Exception e ){
incrErrors();
log.logError(toString(), Messages.getString("JobEntryMSAccessBulkLoad.UnexpectedError",e.getMessage()));
}
// Success Condition
result.setNrErrors(NrErrors);
result.setNrLinesInput(NrFilesToProcess);
result.setNrLinesWritten(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
displayResults(log);
return result;
}
示例7: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setResult(true);
NrErrors=0;
continueProcess=true;
NrSuccess=0;
if (argFromPrevious) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryTruncateTables.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
if(rows.size()==0) return result;
}
if (connection!=null)
{
Database db = new Database(connection);
db.shareVariablesWith(this);
try
{
db.connect();
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped() && continueProcess; iteration++) {
resultRow = rows.get(iteration);
// Get values from previous result
String tablename_previous = resultRow.getString(0, null);
String schemaname_previous = resultRow.getString(1, null);
if(!Const.isEmpty(tablename_previous))
{
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryTruncateTables.ProcessingRow", tablename_previous, schemaname_previous)); //$NON-NLS-1$
}else{
log.logError(toString(), Messages.getString("JobEntryTruncateTables.RowEmpty")); //$NON-NLS-1$
}
}
}else if (arguments!=null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped() && continueProcess; i++) {
String realTablename = environmentSubstitute(arguments[i]);
String realSchemaname = environmentSubstitute(schemaname[i]);
if(!Const.isEmpty(realTablename)) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryTruncateTables.ProcessingArg", arguments[i], schemaname[i])); //$NON-NLS-1$
// let's truncate table
if(TruncateTables(log, realTablename, realSchemaname, db))
updateSuccess();
else
updateErrors();
}else{
log.logError(toString(), Messages.getString("JobEntryTruncateTables.ArgEmpty", arguments[i], schemaname[i])); //$NON-NLS-1$
}
}
}
}
catch(Exception dbe){
result.setNrErrors(1);
log.logError(toString(), Messages.getString("JobEntryTruncateTables.Error.RunningEntry",dbe.getMessage()));
}finally{
if(db!=null) db.disconnect();
}
}
else {
result.setNrErrors(1);
log.logError(toString(),Messages.getString("JobEntryTruncateTables.NoDbConnection"));
}
result.setNrErrors(NrErrors);
result.setResult(NrErrors==0);
return result;
}
示例8: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
result.setResult(false);
result.setNrErrors(1);
try{
if (variableName != null) {
for (int i = 0; i < variableName.length && !parentJob.isStopped(); i++) {
String varname=variableName[i];
String value=variableValue[i];
if(replaceVars){
varname=environmentSubstitute(varname);
value=environmentSubstitute(value);
}
// OK, where do we set this value...
switch(getVariableType()[i]) {
case VARIABLE_TYPE_JVM: {
System.setProperty(varname, value);
setVariable(varname, value);
Job parentjob=parentJob;
while (parentjob!=null) {
parentjob.setVariable(varname, value);
parentjob = parentJob.getParentJob();
}
}
break;
case VARIABLE_TYPE_ROOT_JOB: {
// set variable in this job entry
setVariable(varname, value);
Job rootJob = parentJob;
while (rootJob!=null)
{
rootJob.setVariable(varname, value);
rootJob = rootJob.getParentJob();
}
}
break;
case VARIABLE_TYPE_CURRENT_JOB: {
setVariable(varname, value);
if (parentJob!=null)
parentJob.setVariable(varname, value);
else
throw new KettleJobException(Messages.getString("JobEntrySetVariables.Error.UnableSetVariableCurrentJob",varname));
}
break;
case VARIABLE_TYPE_PARENT_JOB: {
setVariable(varname, value);
if (parentJob!=null) {
parentJob.setVariable(varname, value);
Job gpJob = parentJob.getParentJob();
if (gpJob!=null)
gpJob.setVariable(varname, value);
else
throw new KettleJobException(Messages.getString("JobEntrySetVariables.Error.UnableSetVariableParentJob",varname));
}
else {
throw new KettleJobException(Messages.getString("JobEntrySetVariables.Error.UnableSetVariableCurrentJob",varname));
}
}
break;
}
result.setResult(true);
result.setNrErrors(0);
// ok we can process this line
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntrySetVariables.Log.SetVariableToValue", varname, value)); //$NON-NLS-1$
}
}
}catch(Exception e)
{
log.logError(toString(),Messages.getString("JobEntrySetVariables.UnExcpectedError",e.getMessage()));
}
return result;
}
示例9: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int nrErrFiles = 0;
result.setResult(true);
if(deleteallbefore)
{
// clear result filenames
int size=result.getResultFiles().size();
if(log.isBasic()) log.logBasic(toString(),Messages.getString("JobEntryAddResultFilenames.log.FilesFound",""+size));
result.getResultFiles().clear();
if(log.isDetailed()) log.logDetailed(toString(),Messages.getString("JobEntryAddResultFilenames.log.DeletedFiles",""+size));
}
if (argFromPrevious)
{
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryAddResultFilenames.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
resultRow = rows.get(iteration);
// Get values from previous result
String filefolder_previous = resultRow.getString(0,null);
String fmasks_previous = resultRow.getString(1,null);
// ok we can process this file/folder
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryAddResultFilenames.ProcessingRow", filefolder_previous, fmasks_previous)); //$NON-NLS-1$
if (!ProcessFile(filefolder_previous, fmasks_previous,parentJob,result, log)) {
nrErrFiles++;
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
// ok we can process this file/folder
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryAddResultFilenames.ProcessingArg", arguments[i], filemasks[i])); //$NON-NLS-1$
if (!ProcessFile(arguments[i], filemasks[i],parentJob,result, log)) {
nrErrFiles++;
}
}
}
if (nrErrFiles>0)
{
result.setResult(false);
result.setNrErrors(nrErrFiles);
}
return result;
}
示例10: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setNrErrors(1);
result.setResult(false);
NrErrors=0;
NrSuccess=0;
successConditionBroken=false;
successConditionBrokenExit=false;
limitFolders=Const.toInt(environmentSubstitute(getLimitFolders()),10);
if (argFromPrevious) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFolders.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null){
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
if(successConditionBroken){
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
if(!Const.isEmpty(args_previous)){
if(deleteFolder(args_previous)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.EmptyLine"));
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
if(successConditionBroken)
{
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
String realfilename=environmentSubstitute(arguments[i]);
if(!Const.isEmpty(realfilename))
{
if(deleteFolder(realfilename)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
log.logError(toString(), Messages.getString("JobEntryDeleteFolders.Error.EmptyLine"));
}
}
}
if(log.isDetailed()){
log.logDetailed(toString(), "=======================================");
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFolders.Log.Info.NrError","" + NrErrors));
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFolders.Log.Info.NrDeletedFolders","" + NrSuccess));
log.logDetailed(toString(), "=======================================");
}
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
return result;
}
示例11: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
result.setResult( false );
int missingfiles=0;
if (arguments != null)
{
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++)
{
FileObject file =null;
try
{
String realFilefoldername = environmentSubstitute(arguments[i]);
file = KettleVFS.getFileObject(realFilefoldername);
if (file.exists() && file.isReadable())
{
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryFilesExist.File_Exists", realFilefoldername)); //$NON-NLS-1$
}
else
{
missingfiles ++;
result.setNrErrors(missingfiles);
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryFilesExist.File_Does_Not_Exist", realFilefoldername)); //$NON-NLS-1$
}
}
catch (IOException e)
{
missingfiles ++;
result.setNrErrors(missingfiles);
log.logError(toString(), Messages.getString("JobEntryFilesExist.ERROR_0004_IO_Exception", e.toString())); //$NON-NLS-1$
}
finally
{
if (file != null) {try {file.close();} catch (IOException ex) {};}
}
}
}
if(missingfiles==0)
result.setResult(true);
return result;
}
示例12: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result result, int nr, Repository rep, Job parentJob) throws KettleException {
LogWriter log = LogWriter.getInstance();
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
int NrErrFiles = 0;
result.setResult(false);
result.setNrErrors(1);
if (argFromPrevious) {
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null) // Copy the input row to the (command line) arguments
{
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
String fmasks_previous = resultRow.getString(1, null);
// ok we can process this file/folder
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.ProcessingRow", args_previous, fmasks_previous)); //$NON-NLS-1$
if (!ProcessFile(args_previous, fmasks_previous,parentJob)) {
NrErrFiles++;
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
// ok we can process this file/folder
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.ProcessingArg", arguments[i], filemasks[i])); //$NON-NLS-1$
if (!ProcessFile(arguments[i], filemasks[i],parentJob)) {
NrErrFiles++;
}
}
}
if (NrErrFiles==0)
{
result.setResult(true);
result.setNrErrors(0);
}else
{
result.setNrErrors(NrErrFiles);
result.setResult(false);
}
return result;
}
示例13: execute
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
result.setResult(false);
result.setNrErrors(1);
int nrexistcolums=0;
int nrnotexistcolums=0;
if(Const.isEmpty(tablename))
{
log.logError(toString(), Messages.getString("JobEntryColumnsExist.Error.TablenameEmpty"));
return result;
}
if(arguments == null)
{
log.logError(toString(), Messages.getString("JobEntryColumnsExist.Error.ColumnameEmpty"));
return result;
}
if (connection!=null)
{
Database db = new Database(connection);
try
{
String realSchemaname = environmentSubstitute(schemaname);
String realTablename = environmentSubstitute(tablename);
if(!Const.isEmpty(realSchemaname))
realTablename = db.getDatabaseMeta().getQuotedSchemaTableCombination(realSchemaname, realTablename);
else
realTablename = db.getDatabaseMeta().quoteField(realTablename);
db.connect();
if (db.checkTableExists(realTablename))
{
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryColumnsExist.Log.TableExists",realTablename));
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++)
{
String realColumnname = environmentSubstitute(arguments[i]);
realColumnname=db.getDatabaseMeta().quoteField(realColumnname);
if (db.checkColumnExists(realColumnname,realTablename))
{
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobEntryColumnsExist.Log.ColumnExists",realColumnname,realTablename));
nrexistcolums++;
}else
{
log.logError(toString(), Messages.getString("JobEntryColumnsExist.Log.ColumnNotExists",realColumnname,realTablename));
nrnotexistcolums++;
}
}
}
else
{
log.logError(toString(), Messages.getString("JobEntryColumnsExist.Log.TableNotExists",realTablename));
}
}
catch(KettleDatabaseException dbe)
{
log.logError(toString(), Messages.getString("JobEntryColumnsExist.Error.UnexpectedError",dbe.getMessage()));
}finally
{
if(db!=null) try{db.disconnect();}catch(Exception e){};
}
}
else
{
log.logError(toString(), Messages.getString("JobEntryColumnsExist.Error.NoDbConnection"));
}
result.setEntryNr(nrnotexistcolums);
result.setNrLinesWritten(nrexistcolums);
if(nrnotexistcolums==0) result.setResult(true);
return result;
}
示例14: processOneFile
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
private boolean processOneFile(Result result,Job parentJob,
FileObject fileObject,String realTargetdirectory,
String realWildcard,String realWildcardExclude, FileObject movetodir,String realMovetodirectory,
String realWildcardSource)
{
boolean retval=false;
try{
if(fileObject.getType().equals(FileType.FILE))
{
// We have to unzip one zip file
if(!unzipFile(fileObject, realTargetdirectory,realWildcard,
realWildcardExclude,result, parentJob, fileObject, movetodir,realMovetodirectory))
updateErrors();
else
updateSuccess();
}else
{
// Folder..let's see wildcard
FileObject[] children = fileObject.getChildren();
for (int i=0; i<children.length && !parentJob.isStopped(); i++)
{
if(successConditionBroken){
if(!successConditionBrokenExit){
logError(BaseMessages.getString(PKG, "JobUnZip.Error.SuccessConditionbroken",""+NrErrors));
successConditionBrokenExit=true;
}
return false;
}
// Get only file!
if (!children[i].getType().equals(FileType.FOLDER))
{
boolean unzip=true;
String filename=children[i].getName().getPath();
Pattern patternSource = null;
if (!Const.isEmpty(realWildcardSource))
patternSource = Pattern.compile(realWildcardSource);
// First see if the file matches the regular expression!
if (patternSource!=null)
{
Matcher matcher = patternSource.matcher(filename);
unzip = matcher.matches();
}
if(unzip)
{
if(!unzipFile(children[i],realTargetdirectory,realWildcard,
realWildcardExclude,result, parentJob, fileObject,movetodir,
realMovetodirectory))
updateErrors();
else
updateSuccess();
}
}
}
}
}catch(Exception e)
{
updateErrors();
logError(BaseMessages.getString(PKG, "JobUnZip.Error.Label",e.getMessage()));
}finally
{
if ( fileObject != null )
{
try {
fileObject.close();
}catch ( IOException ex ) {};
}
}
return retval;
}
示例15: processOneRow
import org.pentaho.di.job.Job; //导入方法依赖的package包/类
private boolean processOneRow(String sourceFileFolder, String SourceWildcard,
String Delimiter,String targetDb, String targetTable, Job parentJob,Result result)
{
boolean retval=false;
try{
File sourcefilefolder=new File(sourceFileFolder);
if(!sourcefilefolder.exists())
{
logError(BaseMessages.getString(PKG, "JobEntryMSAccessBulkLoad.Error.CanNotFindFile",sourceFileFolder));
return retval;
}
if(sourcefilefolder.isFile())
{
// source is a file
retval=importFile(sourceFileFolder, Delimiter, targetDb, targetTable, result, parentJob);
}else if(sourcefilefolder.isDirectory())
{
// source is a folder
File[] listFiles=sourcefilefolder.listFiles();
int nrFiles=listFiles.length;
if(nrFiles>0)
{
// let's fetch children...
for(int i=0;i<nrFiles && !parentJob.isStopped() && continueProcessing;i++)
{
File child=listFiles[i];
String childFullName=child.getAbsolutePath();
if(child.isFile())
{
if(Const.isEmpty(SourceWildcard)){
retval=importFile(childFullName, Delimiter, targetDb, targetTable, result, parentJob);
}else{
if(GetFileWildcard(childFullName,SourceWildcard)){
retval=importFile(childFullName, Delimiter, targetDb, targetTable, result, parentJob);
}
}
}else {
// let's run process for this folder
if(include_subfolders){
processOneRow(childFullName, SourceWildcard, Delimiter, targetDb, targetTable, parentJob, result);
}
}
}
}else
{
logBasic(BaseMessages.getString(PKG, "JobEntryMSAccessBulkLoad.Log.FolderEmpty",sourceFileFolder));
}
}else
logError(BaseMessages.getString(PKG, "JobEntryMSAccessBulkLoad.Log.UnknowType",sourceFileFolder));
}catch(Exception e){
logError(e.getMessage());
incrErrors();
}
return retval;
}