本文整理汇总了Java中org.pentaho.di.core.logging.LogLevel.getLogLevelForCode方法的典型用法代码示例。如果您正苦于以下问题:Java LogLevel.getLogLevelForCode方法的具体用法?Java LogLevel.getLogLevelForCode怎么用?Java LogLevel.getLogLevelForCode使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.logging.LogLevel
的用法示例。
在下文中一共展示了LogLevel.getLogLevelForCode方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadXML
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep)
throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases, slaveServers);
logmessage = XMLHandler.getTagValue(entrynode, "logmessage");
entryLogLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
logsubject = XMLHandler.getTagValue(entrynode, "logsubject");
}
catch(Exception e)
{
throw new KettleXMLException(BaseMessages.getString(PKG, "WriteToLog.Error.UnableToLoadFromXML.Label"), e);
}
}
示例2: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
logmessage = rep.getJobEntryAttributeString(id_jobentry, "logmessage");
entryLogLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString(id_jobentry, "loglevel") );
logsubject = rep.getJobEntryAttributeString(id_jobentry, "logsubject");
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "WriteToLog.Error.UnableToLoadFromRepository.Label")+id_jobentry, dbe);
}
}
示例3: loadXML
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep) throws KettleXMLException
{
try
{
super.loadXML(entrynode, databases, slaveServers);
setFileName(XMLHandler.getTagValue(entrynode, "filename"));
setWorkDirectory(XMLHandler.getTagValue(entrynode, "work_directory"));
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row"));
setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile"));
setAppendLogfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue(entrynode, "set_append_logfile") );
addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date"));
addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time"));
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
insertScript = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "insertScript"));
script= XMLHandler.getTagValue(entrynode, "script");
// How many arguments?
int argnr = 0;
while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null)
argnr++;
arguments = new String[argnr];
// Read them all...
// THIS IS A VERY BAD WAY OF READING/SAVING AS IT MAKES
// THE XML "DUBIOUS". DON'T REUSE IT.
for (int a = 0; a < argnr; a++)
arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a);
} catch (KettleException e)
{
throw new KettleXMLException("Unable to load job entry of type 'shell' from XML node", e);
}
}
示例4: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers) throws KettleException
{
try
{
setFileName(rep.getJobEntryAttributeString(id_jobentry, "file_name"));
setWorkDirectory(rep.getJobEntryAttributeString(id_jobentry, "work_directory"));
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
insertScript = rep.getJobEntryAttributeBoolean(id_jobentry, "insertScript");
script = rep.getJobEntryAttributeString(id_jobentry, "script");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
arguments = new String[argnr];
// Read them all...
for (int a = 0; a < argnr; a++)
{
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
} catch (KettleDatabaseException dbe)
{
throw new KettleException(
"Unable to load job entry of type 'shell' from the repository with id_jobentry="
+ id_jobentry, dbe);
}
}
示例5: loadSettings
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadSettings() {
LogLevel logLevel = LogLevel.getLogLevelForCode(props.getLogLevel());
DefaultLogLevel.setLogLevel(logLevel);
log.setLogLevel(logLevel);
LogWriter.getInstance().setFilter(props.getLogFilter());
CentralLogStore.getAppender().setMaxNrLines(props.getMaxNrLinesInLog());
// transMeta.setMaxUndo(props.getMaxUndo());
DBCache.getInstance().setActive(props.useDBCache());
}
示例6: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
setFileName( rep.getJobEntryAttributeString( id_jobentry, "file_name" ) );
setWorkDirectory( rep.getJobEntryAttributeString( id_jobentry, "work_directory" ) );
argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
execPerRow = rep.getJobEntryAttributeBoolean( id_jobentry, "exec_per_row" );
setLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_logfile" );
setAppendLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_append_logfile" );
addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" );
addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" );
logfile = rep.getJobEntryAttributeString( id_jobentry, "logfile" );
logext = rep.getJobEntryAttributeString( id_jobentry, "logext" );
logFileLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) );
insertScript = rep.getJobEntryAttributeBoolean( id_jobentry, "insertScript" );
script = rep.getJobEntryAttributeString( id_jobentry, "script" );
// How many arguments?
int argnr = rep.countNrJobEntryAttributes( id_jobentry, "argument" );
allocate( argnr );
// Read them all...
for ( int a = 0; a < argnr; a++ ) {
arguments[a] = rep.getJobEntryAttributeString( id_jobentry, a, "argument" );
}
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( "Unable to load job entry of type 'shell' from the repository with id_jobentry="
+ id_jobentry, dbe );
}
}
示例7: loadXML
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadXML( Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers,
Repository rep, IMetaStore metaStore ) throws KettleXMLException {
try {
super.loadXML( entrynode, databases, slaveServers );
setFileName( XMLHandler.getTagValue( entrynode, "filename" ) );
setWorkDirectory( XMLHandler.getTagValue( entrynode, "work_directory" ) );
argFromPrevious = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "arg_from_previous" ) );
execPerRow = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "exec_per_row" ) );
setLogfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "set_logfile" ) );
setAppendLogfile = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "set_append_logfile" ) );
addDate = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_date" ) );
addTime = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "add_time" ) );
logfile = XMLHandler.getTagValue( entrynode, "logfile" );
logext = XMLHandler.getTagValue( entrynode, "logext" );
logFileLevel = LogLevel.getLogLevelForCode( XMLHandler.getTagValue( entrynode, "loglevel" ) );
insertScript = "Y".equalsIgnoreCase( XMLHandler.getTagValue( entrynode, "insertScript" ) );
script = XMLHandler.getTagValue( entrynode, "script" );
// How many arguments?
int argnr = 0;
while ( XMLHandler.getTagValue( entrynode, "argument" + argnr ) != null ) {
argnr++;
}
allocate( argnr );
// Read them all...
// THIS IS A VERY BAD WAY OF READING/SAVING AS IT MAKES
// THE XML "DUBIOUS". DON'T REUSE IT.
for ( int a = 0; a < argnr; a++ ) {
arguments[a] = XMLHandler.getTagValue( entrynode, "argument" + a );
}
} catch ( KettleException e ) {
throw new KettleXMLException( "Unable to load job entry of type 'shell' from XML node", e );
}
}
示例8: loadSettings
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadSettings() {
LogLevel logLevel = LogLevel.getLogLevelForCode( props.getLogLevel() );
DefaultLogLevel.setLogLevel( logLevel );
log.setLogLevel( logLevel );
KettleLogStore.getAppender().setMaxNrLines( props.getMaxNrLinesInLog() );
// transMeta.setMaxUndo(props.getMaxUndo());
DBCache.getInstance().setActive( props.useDBCache() );
}
示例9: loadXML
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException {
try {
super.loadXML(entrynode, databases, slaveServers);
String method = XMLHandler.getTagValue(entrynode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = XMLHandler.getTagValue(entrynode, "job_object_id");
jobObjectId = Const.isEmpty(jobId) ? null : new StringObjectId(jobId);
filename = XMLHandler.getTagValue(entrynode, "filename");
jobname = XMLHandler.getTagValue(entrynode, "jobname");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
paramsFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "params_from_previous"));
execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row"));
setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile"));
addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date"));
addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time"));
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
setAppendLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_append_logfile"));
remoteSlaveServerName = XMLHandler.getTagValue(entrynode, "slave_server_name");
passingExport = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "pass_export"));
directory = XMLHandler.getTagValue(entrynode, "directory");
createParentFolder = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "create_parent_folder"));
String wait = XMLHandler.getTagValue(entrynode, "wait_until_finished");
if (Const.isEmpty(wait))
waitingToFinish = true;
else
waitingToFinish = "Y".equalsIgnoreCase(wait);
followingAbortRemotely = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "follow_abort_remote"));
// How many arguments?
int argnr = 0;
while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null)
argnr++;
arguments = new String[argnr];
// Read them all... This is a very BAD way to do it by the way. Sven
// Boden.
for (int a = 0; a < argnr; a++) {
arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a);
}
Node parametersNode = XMLHandler.getSubNode(entrynode, "parameters"); //$NON-NLS-1$
String passAll = XMLHandler.getTagValue(parametersNode, "pass_all_parameters");
passingAllParameters = Const.isEmpty(passAll) || "Y".equalsIgnoreCase(passAll);
int nrParameters = XMLHandler.countNodes(parametersNode, "parameter"); //$NON-NLS-1$
parameters = new String[nrParameters];
parameterFieldNames = new String[nrParameters];
parameterValues = new String[nrParameters];
for (int i = 0; i < nrParameters; i++) {
Node knode = XMLHandler.getSubNodeByNr(parametersNode, "parameter", i); //$NON-NLS-1$
parameters[i] = XMLHandler.getTagValue(knode, "name"); //$NON-NLS-1$
parameterFieldNames[i] = XMLHandler.getTagValue(knode, "stream_name"); //$NON-NLS-1$
parameterValues[i] = XMLHandler.getTagValue(knode, "value"); //$NON-NLS-1$
}
} catch (KettleXMLException xe) {
throw new KettleXMLException("Unable to load 'job' job entry from XML node", xe);
}
}
示例10: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
/**
* Load the jobentry from repository
*/
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
String method = rep.getJobEntryAttributeString(id_jobentry, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = rep.getJobEntryAttributeString(id_jobentry, "job_object_id");
jobObjectId = Const.isEmpty(jobId) ? null : new StringObjectId(jobId);
jobname = rep.getJobEntryAttributeString(id_jobentry, "name");
directory = rep.getJobEntryAttributeString(id_jobentry, "dir_path");
filename = rep.getJobEntryAttributeString(id_jobentry, "file_name");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
paramsFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "params_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
remoteSlaveServerName = rep.getJobEntryAttributeString(id_jobentry, "slave_server_name");
passingExport = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_export");
waitingToFinish = rep.getJobEntryAttributeBoolean(id_jobentry, "wait_until_finished", true);
followingAbortRemotely = rep.getJobEntryAttributeBoolean(id_jobentry, "follow_abort_remote");
createParentFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "create_parent_folder");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
arguments = new String[argnr];
// Read all arguments ...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
// How many arguments?
int parameternr = rep.countNrJobEntryAttributes(id_jobentry, "parameter_name");
parameters = new String[parameternr];
parameterFieldNames = new String[parameternr];
parameterValues = new String[parameternr];
// Read all parameters ...
for (int a = 0; a < parameternr; a++) {
parameters[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_name");
parameterFieldNames[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_stream_name");
parameterValues[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_value");
}
passingAllParameters = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_all_parameters", true);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'job' from the repository with id_jobentry=" + id_jobentry, dbe);
}
}
示例11: loadXML
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException {
try {
super.loadXML(entrynode, databases, slaveServers);
String method = XMLHandler.getTagValue(entrynode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String transId = XMLHandler.getTagValue(entrynode, "trans_object_id");
transObjectId = Const.isEmpty(transId) ? null : new StringObjectId(transId);
filename = XMLHandler.getTagValue(entrynode, "filename");
transname = XMLHandler.getTagValue(entrynode, "transname");
directory = XMLHandler.getTagValue(entrynode, "directory");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
paramsFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "params_from_previous"));
execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row"));
clearResultRows = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "clear_rows"));
clearResultFiles = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "clear_files"));
setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile"));
addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date"));
addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time"));
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
clustering = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "cluster"));
createParentFolder = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "create_parent_folder"));
remoteSlaveServerName = XMLHandler.getTagValue(entrynode, "slave_server_name");
setAppendLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_append_logfile"));
String wait = XMLHandler.getTagValue(entrynode, "wait_until_finished");
if (Const.isEmpty(wait))
waitingToFinish = true;
else
waitingToFinish = "Y".equalsIgnoreCase(wait);
followingAbortRemotely = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "follow_abort_remote"));
// How many arguments?
int argnr = 0;
while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null)
argnr++;
arguments = new String[argnr];
// Read them all...
for (int a = 0; a < argnr; a++) {
arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a);
}
Node parametersNode = XMLHandler.getSubNode(entrynode, "parameters"); //$NON-NLS-1$
String passAll = XMLHandler.getTagValue(parametersNode, "pass_all_parameters");
passingAllParameters = Const.isEmpty(passAll) || "Y".equalsIgnoreCase(passAll);
int nrParameters = XMLHandler.countNodes(parametersNode, "parameter"); //$NON-NLS-1$
parameters = new String[nrParameters];
parameterFieldNames = new String[nrParameters];
parameterValues = new String[nrParameters];
for (int i = 0; i < nrParameters; i++) {
Node knode = XMLHandler.getSubNodeByNr(parametersNode, "parameter", i); //$NON-NLS-1$
parameters[i] = XMLHandler.getTagValue(knode, "name"); //$NON-NLS-1$
parameterFieldNames[i] = XMLHandler.getTagValue(knode, "stream_name"); //$NON-NLS-1$
parameterValues[i] = XMLHandler.getTagValue(knode, "value"); //$NON-NLS-1$
}
} catch (KettleException e) {
throw new KettleXMLException("Unable to load job entry of type 'trans' from XML node", e);
}
}
示例12: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
String method = rep.getJobEntryAttributeString(id_jobentry, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String transId = rep.getJobEntryAttributeString(id_jobentry, "trans_object_id");
transObjectId = Const.isEmpty(transId) ? null : new StringObjectId(transId);
transname = rep.getJobEntryAttributeString(id_jobentry, "name");
directory = rep.getJobEntryAttributeString(id_jobentry, "dir_path");
filename = rep.getJobEntryAttributeString(id_jobentry, "file_name");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
paramsFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "params_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
clearResultRows = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_rows", true);
clearResultFiles = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_files", true);
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
clustering = rep.getJobEntryAttributeBoolean(id_jobentry, "cluster");
createParentFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "create_parent_folder");
remoteSlaveServerName = rep.getJobEntryAttributeString(id_jobentry, "slave_server_name");
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
waitingToFinish = rep.getJobEntryAttributeBoolean(id_jobentry, "wait_until_finished", true);
followingAbortRemotely = rep.getJobEntryAttributeBoolean(id_jobentry, "follow_abort_remote");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
arguments = new String[argnr];
// Read all arguments...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
// How many arguments?
int parameternr = rep.countNrJobEntryAttributes(id_jobentry, "parameter_name");
parameters = new String[parameternr];
parameterFieldNames = new String[parameternr];
parameterValues = new String[parameternr];
// Read all parameters ...
for (int a = 0; a < parameternr; a++) {
parameters[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_name");
parameterFieldNames[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_stream_name");
parameterValues[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_value");
}
passingAllParameters = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_all_parameters", true);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'trans' from the repository for id_jobentry=" + id_jobentry, dbe);
}
}
示例13: loadXML
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException {
try {
super.loadXML(entrynode, databases, slaveServers);
String method = XMLHandler.getTagValue(entrynode, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = XMLHandler.getTagValue(entrynode, "job_object_id");
jobObjectId = Const.isEmpty(jobId) ? null : new StringObjectId(jobId);
filename = XMLHandler.getTagValue(entrynode, "filename");
jobname = XMLHandler.getTagValue(entrynode, "jobname");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous"));
paramsFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "params_from_previous"));
execPerRow = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "exec_per_row"));
setLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_logfile"));
addDate = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_date"));
addTime = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "add_time"));
logfile = XMLHandler.getTagValue(entrynode, "logfile");
logext = XMLHandler.getTagValue(entrynode, "logext");
logFileLevel = LogLevel.getLogLevelForCode(XMLHandler.getTagValue(entrynode, "loglevel"));
setAppendLogfile = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "set_append_logfile"));
remoteSlaveServerName = XMLHandler.getTagValue(entrynode, "slave_server_name");
passingExport = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "pass_export"));
directory = XMLHandler.getTagValue(entrynode, "directory");
createParentFolder = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "create_parent_folder"));
forcingSeparateLogging = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "force_separate_logging"));
String wait = XMLHandler.getTagValue(entrynode, "wait_until_finished");
if (Const.isEmpty(wait))
waitingToFinish = true;
else
waitingToFinish = "Y".equalsIgnoreCase(wait);
followingAbortRemotely = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "follow_abort_remote"));
// How many arguments?
int argnr = 0;
while (XMLHandler.getTagValue(entrynode, "argument" + argnr) != null)
argnr++;
arguments = new String[argnr];
// Read them all... This is a very BAD way to do it by the way. Sven
// Boden.
for (int a = 0; a < argnr; a++) {
arguments[a] = XMLHandler.getTagValue(entrynode, "argument" + a);
}
Node parametersNode = XMLHandler.getSubNode(entrynode, "parameters"); //$NON-NLS-1$
String passAll = XMLHandler.getTagValue(parametersNode, "pass_all_parameters");
passingAllParameters = Const.isEmpty(passAll) || "Y".equalsIgnoreCase(passAll);
int nrParameters = XMLHandler.countNodes(parametersNode, "parameter"); //$NON-NLS-1$
parameters = new String[nrParameters];
parameterFieldNames = new String[nrParameters];
parameterValues = new String[nrParameters];
for (int i = 0; i < nrParameters; i++) {
Node knode = XMLHandler.getSubNodeByNr(parametersNode, "parameter", i); //$NON-NLS-1$
parameters[i] = XMLHandler.getTagValue(knode, "name"); //$NON-NLS-1$
parameterFieldNames[i] = XMLHandler.getTagValue(knode, "stream_name"); //$NON-NLS-1$
parameterValues[i] = XMLHandler.getTagValue(knode, "value"); //$NON-NLS-1$
}
} catch (KettleXMLException xe) {
throw new KettleXMLException("Unable to load 'job' job entry from XML node", xe);
}
}
示例14: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
/**
* Load the jobentry from repository
*/
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
String method = rep.getJobEntryAttributeString(id_jobentry, "specification_method");
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode(method);
String jobId = rep.getJobEntryAttributeString(id_jobentry, "job_object_id");
jobObjectId = Const.isEmpty(jobId) ? null : new StringObjectId(jobId);
jobname = rep.getJobEntryAttributeString(id_jobentry, "name");
directory = rep.getJobEntryAttributeString(id_jobentry, "dir_path");
filename = rep.getJobEntryAttributeString(id_jobentry, "file_name");
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
paramsFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "params_from_previous");
execPerRow = rep.getJobEntryAttributeBoolean(id_jobentry, "exec_per_row");
setLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_logfile");
addDate = rep.getJobEntryAttributeBoolean(id_jobentry, "add_date");
addTime = rep.getJobEntryAttributeBoolean(id_jobentry, "add_time");
logfile = rep.getJobEntryAttributeString(id_jobentry, "logfile");
logext = rep.getJobEntryAttributeString(id_jobentry, "logext");
logFileLevel = LogLevel.getLogLevelForCode(rep.getJobEntryAttributeString(id_jobentry, "loglevel"));
setAppendLogfile = rep.getJobEntryAttributeBoolean(id_jobentry, "set_append_logfile");
remoteSlaveServerName = rep.getJobEntryAttributeString(id_jobentry, "slave_server_name");
passingExport = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_export");
waitingToFinish = rep.getJobEntryAttributeBoolean(id_jobentry, "wait_until_finished", true);
followingAbortRemotely = rep.getJobEntryAttributeBoolean(id_jobentry, "follow_abort_remote");
createParentFolder = rep.getJobEntryAttributeBoolean(id_jobentry, "create_parent_folder");
forcingSeparateLogging = rep.getJobEntryAttributeBoolean(id_jobentry, "force_separate_logging");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "argument");
arguments = new String[argnr];
// Read all arguments ...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "argument");
}
// How many arguments?
int parameternr = rep.countNrJobEntryAttributes(id_jobentry, "parameter_name");
parameters = new String[parameternr];
parameterFieldNames = new String[parameternr];
parameterValues = new String[parameternr];
// Read all parameters ...
for (int a = 0; a < parameternr; a++) {
parameters[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_name");
parameterFieldNames[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_stream_name");
parameterValues[a] = rep.getJobEntryAttributeString(id_jobentry, a, "parameter_value");
}
passingAllParameters = rep.getJobEntryAttributeBoolean(id_jobentry, "pass_all_parameters", true);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to load job entry of type 'job' from the repository with id_jobentry=" + id_jobentry, dbe);
}
}
示例15: loadRep
import org.pentaho.di.core.logging.LogLevel; //导入方法依赖的package包/类
@Override
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
String method = rep.getJobEntryAttributeString( id_jobentry, "specification_method" );
specificationMethod = ObjectLocationSpecificationMethod.getSpecificationMethodByCode( method );
String transId = rep.getJobEntryAttributeString( id_jobentry, "trans_object_id" );
transObjectId = Utils.isEmpty( transId ) ? null : new StringObjectId( transId );
transname = rep.getJobEntryAttributeString( id_jobentry, "name" );
directory = rep.getJobEntryAttributeString( id_jobentry, "dir_path" );
filename = rep.getJobEntryAttributeString( id_jobentry, "file_name" );
// Backward compatibility check for object specification
//
checkObjectLocationSpecificationMethod();
argFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "arg_from_previous" );
paramsFromPrevious = rep.getJobEntryAttributeBoolean( id_jobentry, "params_from_previous" );
execPerRow = rep.getJobEntryAttributeBoolean( id_jobentry, "exec_per_row" );
clearResultRows = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_rows", true );
clearResultFiles = rep.getJobEntryAttributeBoolean( id_jobentry, "clear_files", true );
setLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_logfile" );
addDate = rep.getJobEntryAttributeBoolean( id_jobentry, "add_date" );
addTime = rep.getJobEntryAttributeBoolean( id_jobentry, "add_time" );
logfile = rep.getJobEntryAttributeString( id_jobentry, "logfile" );
logext = rep.getJobEntryAttributeString( id_jobentry, "logext" );
logFileLevel = LogLevel.getLogLevelForCode( rep.getJobEntryAttributeString( id_jobentry, "loglevel" ) );
clustering = rep.getJobEntryAttributeBoolean( id_jobentry, "cluster" );
createParentFolder = rep.getJobEntryAttributeBoolean( id_jobentry, "create_parent_folder" );
remoteSlaveServerName = rep.getJobEntryAttributeString( id_jobentry, "slave_server_name" );
setAppendLogfile = rep.getJobEntryAttributeBoolean( id_jobentry, "set_append_logfile" );
waitingToFinish = rep.getJobEntryAttributeBoolean( id_jobentry, "wait_until_finished", true );
followingAbortRemotely = rep.getJobEntryAttributeBoolean( id_jobentry, "follow_abort_remote" );
loggingRemoteWork = rep.getJobEntryAttributeBoolean( id_jobentry, "logging_remote_work" );
runConfiguration = rep.getJobEntryAttributeString( id_jobentry, "run_configuration" );
// How many arguments?
int argnr = rep.countNrJobEntryAttributes( id_jobentry, "argument" );
allocateArgs( argnr );
// Read all arguments...
for ( int a = 0; a < argnr; a++ ) {
arguments[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "argument" );
}
// How many arguments?
int parameternr = rep.countNrJobEntryAttributes( id_jobentry, "parameter_name" );
allocateParams( parameternr );
// Read all parameters ...
for ( int a = 0; a < parameternr; a++ ) {
parameters[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_name" );
parameterFieldNames[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_stream_name" );
parameterValues[ a ] = rep.getJobEntryAttributeString( id_jobentry, a, "parameter_value" );
}
passingAllParameters = rep.getJobEntryAttributeBoolean( id_jobentry, "pass_all_parameters", true );
} catch ( KettleDatabaseException dbe ) {
throw new KettleException( "Unable to load job entry of type 'trans' from the repository for id_jobentry="
+ id_jobentry, dbe );
}
}