本文整理匯總了Java中org.pentaho.di.repository.Repository.loadDatabaseMetaFromJobEntryAttribute方法的典型用法代碼示例。如果您正苦於以下問題:Java Repository.loadDatabaseMetaFromJobEntryAttribute方法的具體用法?Java Repository.loadDatabaseMetaFromJobEntryAttribute怎麽用?Java Repository.loadDatabaseMetaFromJobEntryAttribute使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.pentaho.di.repository.Repository
的用法示例。
在下文中一共展示了Repository.loadDatabaseMetaFromJobEntryAttribute方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep( Repository rep, IMetaStore metaStore, ObjectId id_jobentry, List<DatabaseMeta> databases,
List<SlaveServer> slaveServers ) throws KettleException {
try {
setManagementAction( rep.getJobEntryAttributeString( id_jobentry, MANAGEMENT_ACTION ) );
setReplace( rep.getJobEntryAttributeBoolean( id_jobentry, REPLACE ) );
setFailIfExists( rep.getJobEntryAttributeBoolean( id_jobentry, FAIL_IF_EXISTS ) );
setWarehouseName( rep.getJobEntryAttributeString( id_jobentry, WAREHOUSE_NAME ) );
setWarehouseSize( rep.getJobEntryAttributeString( id_jobentry, WAREHOUSE_SIZE ) );
setWarehouseType( rep.getJobEntryAttributeString( id_jobentry, WAREHOUSE_TYPE ) );
setMaxClusterCount( rep.getJobEntryAttributeString( id_jobentry, MAX_CLUSTER_COUNT ) );
setMinClusterCount( rep.getJobEntryAttributeString( id_jobentry, MIN_CLUSTER_COUNT ) );
setAutoSuspend( rep.getJobEntryAttributeString( id_jobentry, AUTO_SUSPEND ) );
setAutoResume( rep.getJobEntryAttributeBoolean( id_jobentry, AUTO_RESUME ) );
setInitiallySuspended( rep.getJobEntryAttributeBoolean( id_jobentry, INITIALLY_SUSPENDED ) );
setResourceMonitor( rep.getJobEntryAttributeString( id_jobentry, RESOURCE_MONITOR ) );
setComment( rep.getJobEntryAttributeString( id_jobentry, COMMENT ) );
databaseMeta = rep.loadDatabaseMetaFromJobEntryAttribute( id_jobentry, CONNECTION, "id_database", databases );
setFailIfNotExists( rep.getJobEntryAttributeBoolean( id_jobentry, FAIL_IF_NOT_EXISTS ) );
} catch ( KettleException dbe ) {
throw new KettleException( BaseMessages.getString( PKG, "SnowflakeWarehouseManager.Error.Exception.UnableLoadRep" )
+ id_jobentry, dbe );
}
}
示例2: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
successCondition = getSuccessConditionByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"success_condition"), ""));
rowsCountValue = rep.getJobEntryAttributeString(id_jobentry, "rows_count_value");
iscustomSQL = rep.getJobEntryAttributeBoolean(id_jobentry, "is_custom_sql");
isUseVars = rep.getJobEntryAttributeBoolean(id_jobentry, "is_usevars");
isAddRowsResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_rows_result");
customSQL = rep.getJobEntryAttributeString(id_jobentry, "custom_sql");
maximumTimeout = rep.getJobEntryAttributeString(id_jobentry, "maximum_timeout");
checkCycleTime = rep.getJobEntryAttributeString(id_jobentry, "check_cycle_time");
successOnTimeout = rep.getJobEntryAttributeBoolean(id_jobentry, "success_on_timeout");
isClearResultList = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_result_rows");
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryWaitForSQL.UnableLoadRep",""+id_jobentry), dbe);
}
}
示例3: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
successCondition = getSuccessConditionByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry,"success_condition"), ""));
limit = rep.getJobEntryAttributeString(id_jobentry, "limit");
iscustomSQL = rep.getJobEntryAttributeBoolean(id_jobentry, "is_custom_sql");
isUseVars = rep.getJobEntryAttributeBoolean(id_jobentry, "is_usevars");
isAddRowsResult = rep.getJobEntryAttributeBoolean(id_jobentry, "add_rows_result");
isClearResultList = rep.getJobEntryAttributeBoolean(id_jobentry, "clear_result_rows");
customSQL = rep.getJobEntryAttributeString(id_jobentry, "custom_sql");
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryEvalTableContent.UnableLoadRep",""+id_jobentry), dbe);
}
}
示例4: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
this.argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name"); //$NON-NLS-1$
this.arguments = new String[argnr];
this.schemaname = new String[argnr];
// Read them all...
for (int a = 0; a < argnr; a++) {
this.arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name"); //$NON-NLS-1$
this.schemaname[a] = rep.getJobEntryAttributeString(id_jobentry, a, "schemaname"); //$NON-NLS-1$
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryTruncateTables.UnableLoadRep",""+id_jobentry), dbe);
}
}
示例5: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
sql = rep.getJobEntryAttributeString(id_jobentry, "sql");
String sSubs = rep.getJobEntryAttributeString(id_jobentry, "useVariableSubstitution");
if (sSubs != null && sSubs.equalsIgnoreCase("T"))
useVariableSubstitution = true;
String ssql = rep.getJobEntryAttributeString(id_jobentry, "sqlfromfile");
if (ssql != null && ssql.equalsIgnoreCase("T"))
sqlfromfile = true;
String ssendOneStatement = rep.getJobEntryAttributeString(id_jobentry, "sendOneStatement");
if (ssendOneStatement != null && ssendOneStatement.equalsIgnoreCase("T"))
sendOneStatement = true;
sqlfilename = rep.getJobEntryAttributeString(id_jobentry, "sqlfilename");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'sql' from the repository with id_jobentry="+id_jobentry, dbe);
}
}
示例6: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
// How many connections?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "id_database"); //$NON-NLS-1$
connections = new DatabaseMeta[argnr];
waitfors = new String[argnr];
waittimes = new int[argnr];
// Read them all...
for (int a = 0; a < argnr; a++)
{
connections[a] = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", a, "id_database", databases);
waitfors[a] = rep.getJobEntryAttributeString(id_jobentry, a, "waitfor");
waittimes[a] = getWaitByCode(Const.NVL(rep.getJobEntryAttributeString(id_jobentry, a, "waittime"), ""));
}
}
catch(KettleException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.ERROR_0002_Cannot_Load_Job_From_Repository",""+id_jobentry, dbe.getMessage()));
}
}
示例7: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name"); //$NON-NLS-1$
arguments = new String[argnr];
// Read them all...
for (int a = 0; a < argnr; a++)
{
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name");
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryColumnsExist.Meta.UnableLoadRep",""+id_jobentry), dbe);
}
}
示例8: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
escaped = rep.getJobEntryAttributeString(id_jobentry, "escaped");
linestarted = rep.getJobEntryAttributeString(id_jobentry, "linestarted");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
replacedata = rep.getJobEntryAttributeBoolean(id_jobentry, "replacedata");
ignorelines = rep.getJobEntryAttributeString(id_jobentry, "ignorelines");
listattribut = rep.getJobEntryAttributeString(id_jobentry, "listattribut");
localinfile = rep.getJobEntryAttributeBoolean(id_jobentry, "localinfile");
prorityvalue =(int) rep.getJobEntryAttributeInteger(id_jobentry, "prorityvalue");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'Mysql bulk load' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例9: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
separator = rep.getJobEntryAttributeString(id_jobentry, "separator");
enclosed = rep.getJobEntryAttributeString(id_jobentry, "enclosed");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
limitlines = rep.getJobEntryAttributeString(id_jobentry, "limitlines");
listcolumn = rep.getJobEntryAttributeString(id_jobentry, "listcolumn");
highpriority=rep.getJobEntryAttributeBoolean(id_jobentry, "highpriority");
optionenclosed=rep.getJobEntryAttributeBoolean(id_jobentry, "optionenclosed");
outdumpvalue=(int) rep.getJobEntryAttributeInteger(id_jobentry, "outdumpvalue");
iffileexists=(int) rep.getJobEntryAttributeInteger(id_jobentry, "iffileexists");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'table exists' from the repository for id_jobentry="+id_jobentry, dbe);
}
}
示例10: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException
{
try
{
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "TableExists.Meta.UnableLoadRep",""+id_jobentry), dbe);
}
}
示例11: loadRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers)
throws KettleException
{
try
{
schemaname = rep.getJobEntryAttributeString(id_jobentry, "schemaname");
tablename = rep.getJobEntryAttributeString(id_jobentry, "tablename");
filename = rep.getJobEntryAttributeString(id_jobentry, "filename");
datafiletype = rep.getJobEntryAttributeString(id_jobentry, "datafiletype");
fieldterminator = rep.getJobEntryAttributeString(id_jobentry, "fieldterminator");
lineterminated = rep.getJobEntryAttributeString(id_jobentry, "lineterminated");
codepage = rep.getJobEntryAttributeString(id_jobentry, "codepage");
specificcodepage = rep.getJobEntryAttributeString(id_jobentry, "specificcodepage");
formatfilename = rep.getJobEntryAttributeString(id_jobentry, "formatfilename");
firetriggers=rep.getJobEntryAttributeBoolean(id_jobentry, "firetriggers");
checkconstraints=rep.getJobEntryAttributeBoolean(id_jobentry, "checkconstraints");
keepnulls=rep.getJobEntryAttributeBoolean(id_jobentry, "keepnulls");
keepidentity=rep.getJobEntryAttributeBoolean(id_jobentry, "keepidentity");
tablock=rep.getJobEntryAttributeBoolean(id_jobentry, "tablock");
startfile = (int)rep.getJobEntryAttributeInteger(id_jobentry, "startfile");
endfile = (int)rep.getJobEntryAttributeInteger(id_jobentry, "endfile");
orderby = rep.getJobEntryAttributeString(id_jobentry, "orderby");
orderdirection = rep.getJobEntryAttributeString(id_jobentry, "orderdirection");
errorfilename = rep.getJobEntryAttributeString(id_jobentry, "errorfilename");
maxerrors = (int)rep.getJobEntryAttributeInteger(id_jobentry, "maxerrors");
batchsize = (int)rep.getJobEntryAttributeInteger(id_jobentry, "batchsize");
rowsperbatch = (int)rep.getJobEntryAttributeInteger(id_jobentry, "rowsperbatch");
adddatetime=rep.getJobEntryAttributeBoolean(id_jobentry, "adddatetime");
addfiletoresult=rep.getJobEntryAttributeBoolean(id_jobentry, "addfiletoresult");
truncate=rep.getJobEntryAttributeBoolean(id_jobentry, "truncate");
connection = rep.loadDatabaseMetaFromJobEntryAttribute(id_jobentry, "connection", "id_database", databases);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'MSsql bulk load' from the repository for id_jobentry="+id_jobentry, dbe);
}
}