本文整理汇总了Java中org.pentaho.di.repository.Repository.getJobEntryAttributeInteger方法的典型用法代码示例。如果您正苦于以下问题:Java Repository.getJobEntryAttributeInteger方法的具体用法?Java Repository.getJobEntryAttributeInteger怎么用?Java Repository.getJobEntryAttributeInteger使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.repository.Repository
的用法示例。
在下文中一共展示了Repository.getJobEntryAttributeInteger方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
start = rep.getJobEntryAttributeBoolean(id_jobentry, "start");
dummy = rep.getJobEntryAttributeBoolean(id_jobentry, "dummy");
repeat = rep.getJobEntryAttributeBoolean(id_jobentry, "repeat");
schedulerType = (int)rep.getJobEntryAttributeInteger(id_jobentry, "schedulerType");
intervalSeconds = (int)rep.getJobEntryAttributeInteger(id_jobentry, "intervalSeconds");
intervalMinutes = (int)rep.getJobEntryAttributeInteger(id_jobentry, "intervalMinutes");
hour = (int)rep.getJobEntryAttributeInteger(id_jobentry, "hour");
minutes = (int)rep.getJobEntryAttributeInteger(id_jobentry, "minutes");
weekDay = (int)rep.getJobEntryAttributeInteger(id_jobentry, "weekDay");
dayOfMonth = (int)rep.getJobEntryAttributeInteger(id_jobentry, "dayOfMonth");
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'special' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例2: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
zipFilename = rep.getJobEntryAttributeString(id_jobentry, "zipfilename");
compressionrate = (int) rep.getJobEntryAttributeInteger(id_jobentry, "compressionrate");
ifzipfileexists = (int) rep.getJobEntryAttributeInteger(id_jobentry, "ifzipfileexists");
afterzip = (int) rep.getJobEntryAttributeInteger(id_jobentry, "afterzip");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
wildcardexclude = rep.getJobEntryAttributeString(id_jobentry, "wildcardexclude");
sourcedirectory = rep.getJobEntryAttributeString(id_jobentry, "sourcedirectory");
movetodirectory = rep.getJobEntryAttributeString(id_jobentry, "movetodirectory");
addfiletoresult = rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
isfromprevious = rep.getJobEntryAttributeBoolean(id_jobentry, "isfromprevious");
createparentfolder = rep.getJobEntryAttributeBoolean(id_jobentry, "createparentfolder");
adddate = rep.getJobEntryAttributeBoolean(id_jobentry, "adddate");
addtime = rep.getJobEntryAttributeBoolean(id_jobentry, "addtime");
SpecifyFormat = rep.getJobEntryAttributeBoolean(id_jobentry, "SpecifyFormat");
date_time_format = rep.getJobEntryAttributeString(id_jobentry, "date_time_format");
createMoveToDirectory = rep.getJobEntryAttributeBoolean(id_jobentry, "createMoveToDirectory");
includingSubFolders=rep.getJobEntryAttributeBoolean(id_jobentry, "include_subfolders");
} catch (KettleException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryZipFile.UnableLoadJobEntryRep", "" + id_jobentry), dbe);
}
}
示例3: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
start = rep.getJobEntryAttributeBoolean(id_jobentry, "start");
dummy = rep.getJobEntryAttributeBoolean(id_jobentry, "dummy");
repeat = rep.getJobEntryAttributeBoolean(id_jobentry, "repeat");
schedulerType = (int)rep.getJobEntryAttributeInteger(id_jobentry, "schedulerType");
intervalSeconds = (int)rep.getJobEntryAttributeInteger(id_jobentry, "intervalSeconds");
intervalMinutes = (int)rep.getJobEntryAttributeInteger(id_jobentry, "intervalMinutes");
hour = (int)rep.getJobEntryAttributeInteger(id_jobentry, "hour");
minutes = (int)rep.getJobEntryAttributeInteger(id_jobentry, "minutes");
weekDay = (int)rep.getJobEntryAttributeInteger(id_jobentry, "weekDay");
dayOfMonth = (int)rep.getJobEntryAttributeInteger(id_jobentry, "dayOfMonth");
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'special' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例4: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
zipFilename = rep.getJobEntryAttributeString(id_jobentry, "zipfilename");
afterunzip=(int) rep.getJobEntryAttributeInteger(id_jobentry, "afterunzip");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
wildcardexclude = rep.getJobEntryAttributeString(id_jobentry, "wildcardexclude");
targetdirectory = rep.getJobEntryAttributeString(id_jobentry, "targetdirectory");
movetodirectory = rep.getJobEntryAttributeString(id_jobentry, "movetodirectory");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
isfromprevious=rep.getJobEntryAttributeBoolean(id_jobentry, "isfromprevious");
adddate=rep.getJobEntryAttributeBoolean(id_jobentry, "adddate");
addtime=rep.getJobEntryAttributeBoolean(id_jobentry, "adddate");
SpecifyFormat=rep.getJobEntryAttributeBoolean(id_jobentry, "SpecifyFormat");
date_time_format = rep.getJobEntryAttributeString(id_jobentry, "date_time_format");
rootzip=rep.getJobEntryAttributeBoolean(id_jobentry, "rootzip");
createfolder=rep.getJobEntryAttributeBoolean(id_jobentry, "createfolder");
nr_limit=rep.getJobEntryAttributeString(id_jobentry, "nr_limit");
wildcardSource=rep.getJobEntryAttributeString(id_jobentry, "wildcardSource");
success_condition = rep.getJobEntryAttributeString(id_jobentry, "success_condition");
if(Const.isEmpty(success_condition)) success_condition=SUCCESS_IF_NO_ERRORS;
iffileexist = getIfFileExistsInt(rep.getJobEntryAttributeString(id_jobentry,"iffileexists") );
createMoveToDirectory=rep.getJobEntryAttributeBoolean(id_jobentry, "create_move_to_directory");
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'unzip' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例5: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
escaped = rep.getJobEntryAttributeString(id_jobentry, "escaped");
linestarted = rep.getJobEntryAttributeString(id_jobentry, "linestarted");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
replacedata = rep.getJobEntryAttributeBoolean(id_jobentry, "replacedata");
ignorelines = rep.getJobEntryAttributeString(id_jobentry, "ignorelines");
listattribut = rep.getJobEntryAttributeString(id_jobentry, "listattribut");
localinfile = rep.getJobEntryAttributeBoolean(id_jobentry, "localinfile");
prorityvalue =(int) rep.getJobEntryAttributeInteger(id_jobentry, "prorityvalue");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
long id_db = rep.getJobEntryAttributeInteger(id_jobentry, "id_database");
if (id_db>0)
{
connection = DatabaseMeta.findDatabase(databases, id_db);
}
else
{
// This is were we end up in normally, the previous lines are for backward compatibility.
connection = DatabaseMeta.findDatabase(databases, rep.getJobEntryAttributeString(id_jobentry, "connection"));
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'Mysql bulk load' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例6: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
serverName = rep.getJobEntryAttributeString(id_jobentry, "servername");
int intServerPort = (int)rep.getJobEntryAttributeInteger(id_jobentry, "serverport");
serverPort = rep.getJobEntryAttributeString(id_jobentry, "serverport"); // backward compatible.
if (intServerPort>0 && Const.isEmpty(serverPort)) serverPort = Integer.toString(intServerPort);
userName = rep.getJobEntryAttributeString(id_jobentry, "username");
password = Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString(id_jobentry, "password") );
sftpDirectory = rep.getJobEntryAttributeString(id_jobentry, "sftpdirectory");
localDirectory = rep.getJobEntryAttributeString(id_jobentry, "localdirectory");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
remove = rep.getJobEntryAttributeBoolean(id_jobentry, "remove");
copyprevious = rep.getJobEntryAttributeBoolean(id_jobentry, "copyprevious");
addFilenameResut = rep.getJobEntryAttributeBoolean(id_jobentry, "addFilenameResut");
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'SFTPPUT' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例7: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
maximumTimeout = rep.getJobEntryAttributeString(id_jobentry, "maximumTimeout"); //$NON-NLS-1$
scaleTime = (int) rep.getJobEntryAttributeInteger(id_jobentry, "scaletime"); //$NON-NLS-1$
} catch (KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryDelay.UnableToLoadFromRepo.Label") //$NON-NLS-1$
+ id_jobentry, dbe);
}
}
示例8: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
long id_db = rep.getJobEntryAttributeInteger(id_jobentry, "id_database");
if (id_db>0)
{
connection = DatabaseMeta.findDatabase(databases, id_db);
}
else
{
// This is were we end up in normally, the previous lines are for backward compatibility.
connection = DatabaseMeta.findDatabase(databases, rep.getJobEntryAttributeString(id_jobentry, "connection"));
}
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
successCondition = getSuccessConditionByCode(Const.NVL(rep.getStepAttributeString(id_jobentry,"success_condition"), ""));
limit = rep.getJobEntryAttributeString(id_jobentry, "limit");
iscustomSQL = rep.getJobEntryAttributeBoolean(id_jobentry, "is_custom_sql");
isUseVars = rep.getJobEntryAttributeBoolean(id_jobentry, "is_usevars");
isAddRowsResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_rows_result");
isClearResultList = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_result_rows");
customSQL = rep.getJobEntryAttributeString(id_jobentry, "custom_sql");
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(Messages.getString("JobEntryEvalTableContent.UnableLoadRep",""+id_jobentry), dbe);
}
}
示例9: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
serverName = rep.getJobEntryAttributeString(id_jobentry, "servername");
int intServerPort = (int)rep.getJobEntryAttributeInteger(id_jobentry, "serverport");
serverPort = rep.getJobEntryAttributeString(id_jobentry, "serverport"); // backward compatible.
if (intServerPort>0 && Const.isEmpty(serverPort)) serverPort = Integer.toString(intServerPort);
userName = rep.getJobEntryAttributeString(id_jobentry, "username");
password = Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString(id_jobentry, "password") );
remoteDirectory = rep.getJobEntryAttributeString(id_jobentry, "remoteDirectory");
localDirectory = rep.getJobEntryAttributeString(id_jobentry, "localDirectory");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
binaryMode = rep.getJobEntryAttributeBoolean(id_jobentry, "binary");
timeout = (int)rep.getJobEntryAttributeInteger(id_jobentry, "timeout");
remove = rep.getJobEntryAttributeBoolean(id_jobentry, "remove");
onlyPuttingNewFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "only_new");
activeConnection = rep.getJobEntryAttributeBoolean(id_jobentry, "active");
proxyHost = rep.getJobEntryAttributeString(id_jobentry, "proxy_host"); //$NON-NLS-1$
proxyPort = rep.getJobEntryAttributeString(id_jobentry, "proxy_port"); //$NON-NLS-1$
proxyUsername = rep.getJobEntryAttributeString(id_jobentry, "proxy_username"); //$NON-NLS-1$
proxyPassword = rep.getJobEntryAttributeString(id_jobentry, "proxy_password"); //$NON-NLS-1$
connectionType = FTPSConnection.getConnectionTypeByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"connection_type"), ""));
}
catch(KettleException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobFTPSPUT.UnableToLoadFromRepo", String.valueOf(id_jobentry)), dbe);
}
}
示例10: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
limitlines = rep.getJobEntryAttributeString(id_jobentry, "limitlines");
listcolumn = rep.getJobEntryAttributeString(id_jobentry, "listcolumn");
highpriority=rep.getJobEntryAttributeBoolean(id_jobentry, "highpriority");
optionenclosed=rep.getJobEntryAttributeBoolean(id_jobentry, "optionenclosed");
outdumpvalue=(int) rep.getJobEntryAttributeInteger(id_jobentry, "outdumpvalue");
iffileexists=(int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
long id_db = rep.getJobEntryAttributeInteger(id_jobentry, "id_database");
if (id_db>0)
{
connection = DatabaseMeta.findDatabase(databases, id_db);
}
else
{
// This is were we end up in normally, the previous lines are for backward compatibility.
connection = DatabaseMeta.findDatabase(databases, rep.getJobEntryAttributeString(id_jobentry, "connection"));
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'table exists' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例11: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
protocol = rep.getJobEntryAttributeString(id_jobentry, "protocol");
port = rep.getJobEntryAttributeString(id_jobentry, "port");
serverName = rep.getJobEntryAttributeString(id_jobentry, "servername");
userName = rep.getJobEntryAttributeString(id_jobentry, "username");
password = rep.getJobEntryAttributeString(id_jobentry, "password");
ftpDirectory = rep.getJobEntryAttributeString(id_jobentry, "ftpdirectory");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
timeout = (int)rep.getJobEntryAttributeInteger(id_jobentry, "timeout");
activeConnection = rep.getJobEntryAttributeBoolean(id_jobentry, "active");
copyprevious = rep.getJobEntryAttributeBoolean(id_jobentry, "copyprevious");
useproxy = rep.getJobEntryAttributeBoolean(id_jobentry, "useproxy");
proxyHost = rep.getJobEntryAttributeString(id_jobentry, "proxy_host"); //$NON-NLS-1$
proxyPort = rep.getJobEntryAttributeString(id_jobentry, "proxy_port"); //$NON-NLS-1$
proxyUsername = rep.getJobEntryAttributeString(id_jobentry, "proxy_username"); //$NON-NLS-1$
proxyPassword = rep.getJobEntryAttributeString(id_jobentry, "proxy_password"); //$NON-NLS-1$
publicpublickey = rep.getJobEntryAttributeBoolean(id_jobentry, "publicpublickey");
keyFilename = rep.getJobEntryAttributeString(id_jobentry, "keyfilename");
keyFilePass = rep.getJobEntryAttributeString(id_jobentry, "keyfilepass");
nr_limit_success = rep.getJobEntryAttributeString(id_jobentry, "nr_limit_success");
success_condition = rep.getJobEntryAttributeString(id_jobentry, "success_condition");
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'ftp' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例12: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, long id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
super.loadRep(rep, id_jobentry, databases, slaveServers);
serverName = rep.getJobEntryAttributeString(id_jobentry, "servername");
int intServerPort = (int)rep.getJobEntryAttributeInteger(id_jobentry, "serverport");
serverPort = rep.getJobEntryAttributeString(id_jobentry, "serverport"); // backward compatible.
if (intServerPort>0 && Const.isEmpty(serverPort)) serverPort = Integer.toString(intServerPort);
userName = rep.getJobEntryAttributeString(id_jobentry, "username");
password = rep.getJobEntryAttributeString(id_jobentry, "password");
remoteDirectory = rep.getJobEntryAttributeString(id_jobentry, "remoteDirectory");
localDirectory = rep.getJobEntryAttributeString(id_jobentry, "localDirectory");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
binaryMode = rep.getJobEntryAttributeBoolean(id_jobentry, "binary");
timeout = (int)rep.getJobEntryAttributeInteger(id_jobentry, "timeout");
remove = rep.getJobEntryAttributeBoolean(id_jobentry, "remove");
onlyPuttingNewFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "only_new");
activeConnection = rep.getJobEntryAttributeBoolean(id_jobentry, "active");
controlEncoding = rep.getJobEntryAttributeString(id_jobentry, "control_encoding");
if ( controlEncoding == null )
{
// if we couldn't retrieve an encoding, assume it's an old instance and
// put in the the encoding used before v 2.4.0
controlEncoding = LEGACY_CONTROL_ENCODING;
}
proxyHost = rep.getJobEntryAttributeString(id_jobentry, "proxy_host"); //$NON-NLS-1$
proxyPort = rep.getJobEntryAttributeString(id_jobentry, "proxy_port"); //$NON-NLS-1$
proxyUsername = rep.getJobEntryAttributeString(id_jobentry, "proxy_username"); //$NON-NLS-1$
proxyPassword = rep.getJobEntryAttributeString(id_jobentry, "proxy_password"); //$NON-NLS-1$
}
catch(KettleException dbe)
{
throw new KettleException(Messages.getString("JobFTPPUT.UnableToLoadFromRepo", String.valueOf(id_jobentry)), dbe);
}
}
示例13: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
xmlfilename = rep.getJobEntryAttributeString(id_jobentry, "xmlfilename");
xslfilename = rep.getJobEntryAttributeString(id_jobentry, "xslfilename");
outputfilename = rep.getJobEntryAttributeString(id_jobentry, "outputfilename");
iffileexists=(int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
filenamesfromprevious=rep.getJobEntryAttributeBoolean(id_jobentry, "filenamesfromprevious");
xsltfactory = rep.getJobEntryAttributeString(id_jobentry, "xsltfactory");
if(xsltfactory==null) xsltfactory=FACTORY_JAXP;
int nrparams = rep.countNrJobEntryAttributes(id_jobentry, "param_name"); //$NON-NLS-1$
int nroutputprops = rep.countNrJobEntryAttributes(id_jobentry, "output_property_name"); //$NON-NLS-1$
allocate(nrparams, nroutputprops);
for (int i = 0; i < nrparams; i++)
{
parameterField[i] = rep.getJobEntryAttributeString(id_jobentry, i, "param_field"); //$NON-NLS-1$
parameterName[i] = rep.getJobEntryAttributeString(id_jobentry, i, "param_name"); //$NON-NLS-1$
}
for (int i = 0; i < nroutputprops; i++)
{
outputPropertyName[i] = rep.getJobEntryAttributeString(id_jobentry, i, "output_property_name"); //$NON-NLS-1$
outputPropertyValue[i] = rep.getJobEntryAttributeString(id_jobentry, i, "output_property_value"); //$NON-NLS-1$
}
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'xslt' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例14: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
escaped = rep.getJobEntryAttributeString(id_jobentry, "escaped");
linestarted = rep.getJobEntryAttributeString(id_jobentry, "linestarted");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
replacedata = rep.getJobEntryAttributeBoolean(id_jobentry, "replacedata");
ignorelines = rep.getJobEntryAttributeString(id_jobentry, "ignorelines");
listattribut = rep.getJobEntryAttributeString(id_jobentry, "listattribut");
localinfile = rep.getJobEntryAttributeBoolean(id_jobentry, "localinfile");
prorityvalue =(int) rep.getJobEntryAttributeInteger(id_jobentry, "prorityvalue");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'Mysql bulk load' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例15: loadRep
import org.pentaho.di.repository.Repository; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
port = rep.getJobEntryAttributeString(id_jobentry, "port");
serverName = rep.getJobEntryAttributeString(id_jobentry, "servername");
userName = rep.getJobEntryAttributeString(id_jobentry, "username");
password = Encr.decryptPasswordOptionallyEncrypted( rep.getJobEntryAttributeString(id_jobentry, "password") );
ftpDirectory = rep.getJobEntryAttributeString(id_jobentry, "ftpdirectory");
targetDirectory = rep.getJobEntryAttributeString(id_jobentry, "targetdirectory");
wildcard = rep.getJobEntryAttributeString(id_jobentry, "wildcard");
binaryMode = rep.getJobEntryAttributeBoolean(id_jobentry, "binary");
timeout = (int)rep.getJobEntryAttributeInteger(id_jobentry, "timeout");
remove = rep.getJobEntryAttributeBoolean(id_jobentry, "remove");
onlyGettingNewFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "only_new");
activeConnection = rep.getJobEntryAttributeBoolean(id_jobentry, "active");
controlEncoding = rep.getJobEntryAttributeString(id_jobentry, "control_encoding");
if ( controlEncoding == null )
{
// if we couldn't retrieve an encoding, assume it's an old instance and
// put in the the encoding used before v 2.4.0
controlEncoding = LEGACY_CONTROL_ENCODING;
}
movefiles = rep.getJobEntryAttributeBoolean(id_jobentry, "movefiles");
movetodirectory = rep.getJobEntryAttributeString(id_jobentry, "movetodirectory");
adddate=rep.getJobEntryAttributeBoolean(id_jobentry, "adddate");
addtime=rep.getJobEntryAttributeBoolean(id_jobentry, "adddate");
SpecifyFormat=rep.getJobEntryAttributeBoolean(id_jobentry, "SpecifyFormat");
date_time_format = rep.getJobEntryAttributeString(id_jobentry, "date_time_format");
AddDateBeforeExtension=rep.getJobEntryAttributeBoolean(id_jobentry, "AddDateBeforeExtension");
String addToResult=rep.getStepAttributeString (id_jobentry, "add_to_result_filenames");
if(Const.isEmpty(addToResult))
isaddresult = true;
else
isaddresult = rep.getStepAttributeBoolean(id_jobentry, "add_to_result_filenames");
createmovefolder=rep.getJobEntryAttributeBoolean(id_jobentry, "createmovefolder");
proxyHost = rep.getJobEntryAttributeString(id_jobentry, "proxy_host"); //$NON-NLS-1$
proxyPort = rep.getJobEntryAttributeString(id_jobentry, "proxy_port"); //$NON-NLS-1$
proxyUsername = rep.getJobEntryAttributeString(id_jobentry, "proxy_username"); //$NON-NLS-1$
proxyPassword = Encr.decryptPasswordOptionallyEncrypted(rep.getJobEntryAttributeString(id_jobentry, "proxy_password")); //$NON-NLS-1$
socksProxyHost = rep.getJobEntryAttributeString(id_jobentry, "socksproxy_host"); //$NON-NLS-1$
socksProxyPort = rep.getJobEntryAttributeString(id_jobentry, "socksproxy_port"); //$NON-NLS-1$
socksProxyUsername = rep.getJobEntryAttributeString(id_jobentry, "socksproxy_username"); //$NON-NLS-1$
socksProxyPassword = Encr.decryptPasswordOptionallyEncrypted(rep.getJobEntryAttributeString(id_jobentry, "socksproxy_password")); //$NON-NLS-1$
SifFileExists = rep.getJobEntryAttributeString(id_jobentry, "ifFileExists");
if(Const.isEmpty(SifFileExists))
{
ifFileExists=ifFileExistsSkip;
}else
{
if(SifFileExists.equals(SifFileExistsCreateUniq))
ifFileExists=ifFileExistsCreateUniq;
else if(SifFileExists.equals(SifFileExistsFail))
ifFileExists=ifFileExistsFail;
else
ifFileExists=ifFileExistsSkip;
}
nr_limit = rep.getJobEntryAttributeString(id_jobentry, "nr_limit");
success_condition = Const.NVL(rep.getJobEntryAttributeString(id_jobentry, "success_condition"), SUCCESS_IF_NO_ERRORS);
}
catch(KettleException dbe)
{
throw new KettleException("Unable to load job entry of type 'ftp' from the repository for id_jobentry="+id_jobentry, dbe);
}
}