本文整理匯總了Java中org.pentaho.di.repository.Repository.getImportBaseDirectory方法的典型用法代碼示例。如果您正苦於以下問題:Java Repository.getImportBaseDirectory方法的具體用法?Java Repository.getImportBaseDirectory怎麽用?Java Repository.getImportBaseDirectory使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.pentaho.di.repository.Repository
的用法示例。
在下文中一共展示了Repository.getImportBaseDirectory方法的3個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, long id_transformation, long id_step) throws KettleException
{
rep.saveStepAttribute(id_transformation, id_step, "filename", fileName); //$NON-NLS-1$
rep.saveStepAttribute(id_transformation, id_step, "trans_name", transName); //$NON-NLS-1$
// Verify import from repository explorer into different directory...
//
if (rep.getImportBaseDirectory()!=null && !rep.getImportBaseDirectory().isRoot()) {
directoryPath = rep.getImportBaseDirectory().getPath()+directoryPath;
}
// Now we can save it with the correct reference...
//
rep.saveStepAttribute(id_transformation, id_step, "directory_path", directoryPath); //$NON-NLS-1$
for (int i=0;i<inputMappings.size();i++)
{
inputMappings.get(i).saveRep(rep, id_transformation, id_step, "input_", i);
}
for (int i=0;i<outputMappings.size();i++)
{
outputMappings.get(i).saveRep(rep, id_transformation, id_step, "output_", i);
}
// save the mapping parameters too
//
mappingParameters.saveRep(rep, id_transformation, id_step);
}
示例2: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, long id_job) throws KettleException
{
try
{
super.saveRep(rep, id_job);
if (rep.getImportBaseDirectory()!=null && !rep.getImportBaseDirectory().isRoot()) {
directory = rep.getImportBaseDirectory().getPath() + directoryPath;
}
if (directory == null) {
if (rep.getImportBaseDirectory()!=null) {
directory = rep.getImportBaseDirectory().getPath();
} else {
directory = new RepositoryDirectory().getPath(); // just pick the root directory
}
}
// Removed id_job as we do not know what it is if we are using variables in the path
// long id_job_attr = rep.getJobID(jobname, directory.getID());
// rep.saveJobEntryAttribute(id_job, getID(), "id_job", id_job_attr);
rep.saveJobEntryAttribute(id_job, getID(), "name", getJobName());
rep.saveJobEntryAttribute(id_job, getID(), "dir_path", getDirectory()!=null?getDirectory():"");
rep.saveJobEntryAttribute(id_job, getID(), "file_name", filename);
rep.saveJobEntryAttribute(id_job, getID(), "arg_from_previous", argFromPrevious);
rep.saveJobEntryAttribute(id_job, getID(), "params_from_previous", paramsFromPrevious);
rep.saveJobEntryAttribute(id_job, getID(), "exec_per_row", execPerRow);
rep.saveJobEntryAttribute(id_job, getID(), "set_logfile", setLogfile);
rep.saveJobEntryAttribute(id_job, getID(), "add_date", addDate);
rep.saveJobEntryAttribute(id_job, getID(), "add_time", addTime);
rep.saveJobEntryAttribute(id_job, getID(), "logfile", logfile);
rep.saveJobEntryAttribute(id_job, getID(), "logext", logext);
rep.saveJobEntryAttribute(id_job, getID(), "set_append_logfile", setAppendLogfile);
rep.saveJobEntryAttribute(id_job, getID(), "loglevel", LogWriter.getLogLevelDesc(loglevel));
rep.saveJobEntryAttribute(id_job, getID(), "slave_server_name", remoteSlaveServerName);
rep.saveJobEntryAttribute(id_job, getID(), "wait_until_finished", waitingToFinish);
rep.saveJobEntryAttribute(id_job, getID(), "follow_abort_remote", followingAbortRemotely);
// save the arguments...
if (arguments!=null)
{
for (int i=0;i<arguments.length;i++)
{
rep.saveJobEntryAttribute(id_job, getID(), i, "argument", arguments[i]);
}
}
// save the parameters...
if (parameters!=null)
{
for (int i=0;i<parameters.length;i++)
{
rep.saveJobEntryAttribute(id_job, getID(), i, "parameter_name", parameters[i]);
rep.saveJobEntryAttribute(id_job, getID(), i, "parameter_stream_name", Const.NVL(parameterFieldNames[i], ""));
rep.saveJobEntryAttribute(id_job, getID(), i, "parameter_value", Const.NVL(parameterValues[i], ""));
}
}
rep.saveJobEntryAttribute(id_job, getID(), "pass_all_parameters", passingAllParameters);
}
catch(KettleDatabaseException dbe)
{
throw new KettleException("Unable to save job entry of type job to the repository with id_job="+id_job, dbe);
}
}
示例3: saveRep
import org.pentaho.di.repository.Repository; //導入方法依賴的package包/類
public void saveRep(Repository rep, long id_job) throws KettleException {
try {
super.saveRep(rep, id_job);
if (directory == null) {
if (rep.getImportBaseDirectory()!=null) {
directory = rep.getImportBaseDirectory().getPath();
} else {
directory = new RepositoryDirectory().getPath(); // just pick the root directory
}
}
// Removed id_transformation as we do not know what it is if we are using variables in the path
// long id_transformation = rep.getTransformationID(transname, directory.getID());
// rep.saveJobEntryAttribute(id_job, getID(), "id_transformation", id_transformation);
rep.saveJobEntryAttribute(id_job, getID(), "name", getTransname());
rep.saveJobEntryAttribute(id_job, getID(), "dir_path", getDirectory() != null ? getDirectory() : "");
rep.saveJobEntryAttribute(id_job, getID(), "file_name", filename);
rep.saveJobEntryAttribute(id_job, getID(), "arg_from_previous", argFromPrevious);
rep.saveJobEntryAttribute(id_job, getID(), "exec_per_row", execPerRow);
rep.saveJobEntryAttribute(id_job, getID(), "clear_rows", clearResultRows);
rep.saveJobEntryAttribute(id_job, getID(), "clear_files", clearResultFiles);
rep.saveJobEntryAttribute(id_job, getID(), "set_logfile", setLogfile);
rep.saveJobEntryAttribute(id_job, getID(), "add_date", addDate);
rep.saveJobEntryAttribute(id_job, getID(), "add_time", addTime);
rep.saveJobEntryAttribute(id_job, getID(), "logfile", logfile);
rep.saveJobEntryAttribute(id_job, getID(), "logext", logext);
rep.saveJobEntryAttribute(id_job, getID(), "loglevel", LogWriter.getLogLevelDesc(loglevel));
rep.saveJobEntryAttribute(id_job, getID(), "cluster", clustering);
rep.saveJobEntryAttribute(id_job, getID(), "slave_server_name", remoteSlaveServerName);
rep.saveJobEntryAttribute(id_job, getID(), "set_append_logfile", setAppendLogfile);
rep.saveJobEntryAttribute(id_job, getID(), "wait_until_finished", waitingToFinish);
rep.saveJobEntryAttribute(id_job, getID(), "follow_abort_remote", followingAbortRemotely);
// Save the arguments...
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
rep.saveJobEntryAttribute(id_job, getID(), i, "argument", arguments[i]);
}
}
// Save the parameters...
if (parameters!=null)
{
for (int i=0;i<parameters.length;i++)
{
rep.saveJobEntryAttribute(id_job, getID(), i, "parameter_name", parameters[i]);
rep.saveJobEntryAttribute(id_job, getID(), i, "parameter_stream_name", Const.NVL(parameterFieldNames[i], ""));
rep.saveJobEntryAttribute(id_job, getID(), i, "parameter_value", Const.NVL(parameterValues[i], ""));
}
}
rep.saveJobEntryAttribute(id_job, getID(), "pass_all_parameters", passingAllParameters);
} catch (KettleDatabaseException dbe) {
throw new KettleException("Unable to save job entry of type 'trans' to the repository for id_job=" + id_job, dbe);
}
}