本文整理匯總了Java中org.pentaho.di.repository.Repository.saveDatabaseMetaJobEntryAttribute方法的典型用法代碼示例。如果您正苦於以下問題:Java Repository.saveDatabaseMetaJobEntryAttribute方法的具體用法?Java Repository.saveDatabaseMetaJobEntryAttribute怎麽用?Java Repository.saveDatabaseMetaJobEntryAttribute使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.pentaho.di.repository.Repository
的用法示例。
在下文中一共展示了Repository.saveDatabaseMetaJobEntryAttribute方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_job ) throws KettleException {
try {
rep.saveDatabaseMetaJobEntryAttribute( id_job, getObjectId(), CONNECTION, "id_database", databaseMeta );
rep.saveJobEntryAttribute( id_job, getObjectId(), MANAGEMENT_ACTION, getManagementAction() );
rep.saveJobEntryAttribute( id_job, getObjectId(), REPLACE, isReplace() );
rep.saveJobEntryAttribute( id_job, getObjectId(), FAIL_IF_EXISTS, isFailIfExists() );
rep.saveJobEntryAttribute( id_job, getObjectId(), WAREHOUSE_NAME, getWarehouseName() );
rep.saveJobEntryAttribute( id_job, getObjectId(), WAREHOUSE_SIZE, getWarehouseSize() );
rep.saveJobEntryAttribute( id_job, getObjectId(), WAREHOUSE_TYPE, getWarehouseType() );
rep.saveJobEntryAttribute( id_job, getObjectId(), MAX_CLUSTER_COUNT, getMaxClusterCount() );
rep.saveJobEntryAttribute( id_job, getObjectId(), MIN_CLUSTER_COUNT, getMinClusterCount() );
rep.saveJobEntryAttribute( id_job, getObjectId(), AUTO_SUSPEND, getAutoSuspend() );
rep.saveJobEntryAttribute( id_job, getObjectId(), AUTO_RESUME, isAutoResume() );
rep.saveJobEntryAttribute( id_job, getObjectId(), INITIALLY_SUSPENDED, isInitiallySuspended() );
rep.saveJobEntryAttribute( id_job, getObjectId(), RESOURCE_MONITOR, getResourceMonitor() );
rep.saveJobEntryAttribute( id_job, getObjectId(), COMMENT, getComment() );
rep.saveJobEntryAttribute( id_job, getObjectId(), FAIL_IF_NOT_EXISTS, isFailIfNotExists() );
} catch ( KettleDatabaseException dbe ) {
throw new KettleDatabaseException( BaseMessages.getString( PKG, "SnowflakeWarehouseManager.Error.Exception.UnableSaveRep" )
+ getObjectId(), dbe );
}
}
示例2: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "separator", separator);
rep.saveJobEntryAttribute(id_job, getObjectId(), "enclosed", enclosed);
rep.saveJobEntryAttribute(id_job, getObjectId(), "escaped", escaped);
rep.saveJobEntryAttribute(id_job, getObjectId(), "linestarted", linestarted);
rep.saveJobEntryAttribute(id_job, getObjectId(), "lineterminated", lineterminated);
rep.saveJobEntryAttribute(id_job, getObjectId(), "replacedata", replacedata);
rep.saveJobEntryAttribute(id_job, getObjectId(), "ignorelines", ignorelines);
rep.saveJobEntryAttribute(id_job, getObjectId(), "listattribut", listattribut);
rep.saveJobEntryAttribute(id_job, getObjectId(), "localinfile", localinfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "prorityvalue", prorityvalue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addfiletoresult);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job="+id_job, dbe);
}
}
示例3: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job)
throws KettleException
{
try
{
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(),"success_condition", getSuccessConditionCode(successCondition));
rep.saveJobEntryAttribute(id_job, getObjectId(), "rows_count_value", rowsCountValue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "custom_sql", customSQL);
rep.saveJobEntryAttribute(id_job, getObjectId(), "is_custom_sql", iscustomSQL);
rep.saveJobEntryAttribute(id_job, getObjectId(), "is_usevars", isUseVars);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_rows_result", isAddRowsResult);
rep.saveJobEntryAttribute(id_job, getObjectId(), "maximum_timeout", maximumTimeout);
rep.saveJobEntryAttribute(id_job, getObjectId(), "check_cycle_time", checkCycleTime);
rep.saveJobEntryAttribute(id_job, getObjectId(), "success_on_timeout", successOnTimeout);
rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_result_rows", isClearResultList);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryWaitForSQL.UnableSaveRep",""+id_job), dbe);
}
}
示例4: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(),"success_condition", getSuccessConditionCode(successCondition));
rep.saveJobEntryAttribute(id_job, getObjectId(), "limit", limit);
rep.saveJobEntryAttribute(id_job, getObjectId(), "custom_sql", customSQL);
rep.saveJobEntryAttribute(id_job, getObjectId(), "is_custom_sql", iscustomSQL);
rep.saveJobEntryAttribute(id_job, getObjectId(), "is_usevars", isUseVars);
rep.saveJobEntryAttribute(id_job, getObjectId(), "add_rows_result", isAddRowsResult);
rep.saveJobEntryAttribute(id_job, getObjectId(), "clear_result_rows", isClearResultList);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryEvalTableContent.UnableSaveRep",""+id_job), dbe);
}
}
示例5: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job)
throws KettleException
{
try
{
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", this.argFromPrevious);
// save the arguments...
if (this.arguments != null) {
for (int i = 0; i < this.arguments.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "name", this.arguments[i]); //$NON-NLS-1$
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "schemaname", this.schemaname[i]); //$NON-NLS-1$
}
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryTruncateTables.UnableSaveRep",""+id_job), dbe);
}
}
示例6: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
rep.saveJobEntryAttribute(id_job, getObjectId(), "sql", sql);
rep.saveJobEntryAttribute(id_job, getObjectId(), "useVariableSubstitution", useVariableSubstitution ? "T" : "F" );
rep.saveJobEntryAttribute(id_job, getObjectId(), "sqlfromfile", sqlfromfile ? "T" : "F" );
rep.saveJobEntryAttribute(id_job, getObjectId(), "sqlfilename", sqlfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "sendOneStatement", sendOneStatement ? "T" : "F" );
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry of type 'sql' to the repository for id_job="+id_job, dbe);
}
}
示例7: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
// save the arguments...
if (connections != null) {
for (int i = 0; i < connections.length; i++) {
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), i, "connection", "id_database", connections[i]);
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "waittime", getWaitTimeCode(waittimes[i]));
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "waitfor", waitfors[i]);
}
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryCheckDbConnections.ERROR_0003_Cannot_Save_Job_Entry",""+id_job, dbe.getMessage()));
}
}
示例8: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
// save the arguments...
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "name", arguments[i]);
}
}
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "JobEntryColumnsExist.Meta.UnableSaveRep",""+id_job), dbe);
}
}
示例9: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "separator", separator);
rep.saveJobEntryAttribute(id_job, getObjectId(), "enclosed", enclosed);
rep.saveJobEntryAttribute(id_job, getObjectId(), "lineterminated", lineterminated);
rep.saveJobEntryAttribute(id_job, getObjectId(), "limitlines", limitlines);
rep.saveJobEntryAttribute(id_job, getObjectId(), "listcolumn", listcolumn);
rep.saveJobEntryAttribute(id_job, getObjectId(), "highpriority", highpriority);
rep.saveJobEntryAttribute(id_job, getObjectId(), "optionenclosed", optionenclosed);
rep.saveJobEntryAttribute(id_job, getObjectId(), "outdumpvalue", outdumpvalue);
rep.saveJobEntryAttribute(id_job, getObjectId(), "iffileexists", iffileexists);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addfiletoresult);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'Mysql Bulk Load' to the repository for id_job="+id_job, dbe);
}
}
示例10: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "filename", filename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "datafiletype", datafiletype);
rep.saveJobEntryAttribute(id_job, getObjectId(), "fieldterminator", fieldterminator);
rep.saveJobEntryAttribute(id_job, getObjectId(), "lineterminated", lineterminated);
rep.saveJobEntryAttribute(id_job, getObjectId(), "codepage", codepage);
rep.saveJobEntryAttribute(id_job, getObjectId(), "specificcodepage", specificcodepage);
rep.saveJobEntryAttribute(id_job, getObjectId(), "formatfilename", formatfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "firetriggers", firetriggers);
rep.saveJobEntryAttribute(id_job, getObjectId(), "checkconstraints", checkconstraints);
rep.saveJobEntryAttribute(id_job, getObjectId(), "keepnulls", keepnulls);
rep.saveJobEntryAttribute(id_job, getObjectId(), "keepidentity", keepidentity);
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablock", tablock);
rep.saveJobEntryAttribute(id_job, getObjectId(), "startfile", startfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "endfile", endfile);
rep.saveJobEntryAttribute(id_job, getObjectId(), "orderby", orderby);
rep.saveJobEntryAttribute(id_job, getObjectId(), "orderdirection", orderdirection);
rep.saveJobEntryAttribute(id_job, getObjectId(), "errorfilename", errorfilename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "maxerrors", maxerrors);
rep.saveJobEntryAttribute(id_job, getObjectId(), "batchsize", batchsize);
rep.saveJobEntryAttribute(id_job, getObjectId(), "rowsperbatch", rowsperbatch);
rep.saveJobEntryAttribute(id_job, getObjectId(), "adddatetime", adddatetime);
rep.saveJobEntryAttribute(id_job, getObjectId(), "addfiletoresult", addfiletoresult);
rep.saveJobEntryAttribute(id_job, getObjectId(), "truncate", truncate);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to load job entry of type 'MSsql Bulk Load' to the repository for id_job="+id_job, dbe);
}
}
示例11: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, ObjectId id_job) throws KettleException
{
try
{
rep.saveJobEntryAttribute(id_job, getObjectId(), "tablename", tablename);
rep.saveJobEntryAttribute(id_job, getObjectId(), "schemaname", schemaname);
rep.saveDatabaseMetaJobEntryAttribute(id_job, getObjectId(), "connection", "id_database", connection);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException(BaseMessages.getString(PKG, "TableExists.Meta.UnableSaveRep",""+id_job), dbe);
}
}