本文整理汇总了Java中org.pentaho.di.core.KettleEnvironment.init方法的典型用法代码示例。如果您正苦于以下问题:Java KettleEnvironment.init方法的具体用法?Java KettleEnvironment.init怎么用?Java KettleEnvironment.init使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.KettleEnvironment
的用法示例。
在下文中一共展示了KettleEnvironment.init方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: initEnv
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
/**
* 初始化环境变量 <br/>
* @author jingma
* @throws KettleException
*/
public static void initEnv() throws KettleException {
if(System.getenv("KETTLE_HOME")!=null){
System.setProperty("DI_HOME", System.getenv("KETTLE_HOME"));
System.setProperty("KETTLE_HOME", System.getenv("KETTLE_HOME"));
System.setProperty("org.osjava.sj.root", System.getenv("KETTLE_HOME")+"/simple-jndi");
log.debug("KETTLE_HOME配置[能自动加载该目录下plugins中的插件]:"+System.getenv("KETTLE_HOME"));
}
if(System.getenv("KETTLE_JNDI_ROOT")!=null){
System.setProperty("org.osjava.sj.root", System.getenv("KETTLE_JNDI_ROOT"));
log.debug("Simple-jndi配置根路径:"+System.getenv("KETTLE_JNDI_ROOT"));
}
//初始化kettle环境
if(!KettleEnvironment.isInitialized()){
KettleEnvironment.init();
KettleClientEnvironment.getInstance().setClient( KettleClientEnvironment.ClientType.SPOON );
}
}
示例2: init
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
@Override
public void init(AppView app) throws BeanFactoryException {
this.app = app;
// dlSystem = (DataLogicSystem) app.getBean(DataLogicSystem.class.getName());
// hostProp = dlSystem.getResourceAsProperties(app.getProperties() + "/properties");
this.app.waitCursorBegin();
try {
KettleEnvironment.init(false);
EnvUtil.environmentInit();
} catch (KettleException ex) {
MessageInf msg = new MessageInf(MessageInf.SGN_WARNING, AppLocal.getIntString("message.syncerror"), ex);
msg.show(this);
}
this.app.waitCursorEnd();
}
示例3: main
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
Display display = new Display();
KettleEnvironment.init();
PropsUI.init(display, PropsUI.TYPE_PROPERTIES_SPOON);
Shell shell = new Shell(display);
ImportRules importRules = new ImportRules();
importRules.loadXML(XMLHandler.getSubNode(XMLHandler.loadXMLFile("bin/import-rules.xml"), ImportRules.XML_TAG));
ImportRulesDialog dialog = new ImportRulesDialog(shell, importRules);
if (dialog.open()) {
for (ImportRuleInterface rule : importRules.getRules()) {
System.out.println(" - "+rule.toString());
}
}
}
示例4: test04_SelectFromWhereGroupBy
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
public void test04_SelectFromWhereGroupBy() throws Exception {
KettleEnvironment.init();
String sqlQuery = "SELECT Country, SUM(products_sold) as count, SUM(sales_amount) as sales FROM Service WHERE Category = 'A' GROUP BY Country";
SqlTransExecutor executor = new SqlTransExecutor(sqlQuery, getServices());
final List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
// print the eventual result rows...
//
executor.executeQuery(new RowAdapter() { @Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
rows.add(new RowMetaAndData(rowMeta, row));
} });
// Now the generated transformation is waiting for input so we
// can start the service transformation
//
executor.waitUntilFinished();
assertEquals(4, rows.size());
}
示例5: setUpBeforeClass
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
@Before
public void setUpBeforeClass() throws Exception {
KettleEnvironment.init();
LoggingObjectInterface log = new SimpleLoggingObject( "junit", LoggingObjectType.GENERAL, null );
File file = File.createTempFile( JobTrackerExecution.class.getSimpleName(), "" );
file.deleteOnExit();
DatabaseMeta databaseMeta =
new DatabaseMeta( NAME, "Hypersonic", "JDBC", null, "mem:HSQLDB-JUNIT-LOGJOB", null, null, null );
Database logDataBase = new Database( log, databaseMeta );
logDataBase.connect();
// run sql create for database
InputStream input = JobTrackerExecution.class.getClassLoader().getResourceAsStream( PKG + CREATE );
String sql = getStringFromInput( input );
logDataBase.execStatements( sql );
logDataBase.commit( true );
logDataBase.disconnect();
}
示例6: setUpLoadSave
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "databaseMeta", "sQL", "rowLimit", "executeEachInputRow", "variableReplacementActive", "lazyConversionActive" );
Map<String, String> getterMap = new HashMap<String, String>();
Map<String, String> setterMap = new HashMap<String, String>();
Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap );
}
示例7: setUpLoadSave
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
@Before
public void setUpLoadSave() throws Exception {
KettleEnvironment.init();
PluginRegistry.init( true );
List<String> attributes =
Arrays.asList( "condition", "send_true_to", "send_false_to" );
Map<String, String> getterMap = new HashMap<String, String>();
Map<String, String> setterMap = new HashMap<String, String>();
Map<String, FieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
attrValidatorMap.put( "condition", new ConditionLoadSaveValidator() );
attrValidatorMap.put( "trueStepName", new StringLoadSaveValidator() );
attrValidatorMap.put( "falseStepname", new StringLoadSaveValidator() );
getterMap.put( "send_true_to", "getTrueStepname" );
setterMap.put( "send_true_to", "setTrueStepname" );
getterMap.put( "send_false_to", "getFalseStepname" );
setterMap.put( "send_false_to", "setFalseStepname" );
Map<String, FieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<String, FieldLoadSaveValidator<?>>();
loadSaveTester =
new LoadSaveTester( testMetaClass, attributes, getterMap, setterMap, attrValidatorMap, typeValidatorMap );
}
示例8: initEnv
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
/**
* 初始化环境变量 <br/>
* @author jingma
* @throws KettleException
*/
public static void initEnv() throws KettleException {
log.debug("KETTLE_HOME配置[能自动加载该目录下plugins中的插件]:"+System.getProperty("KETTLE_HOME"));
log.debug("Simple-jndi配置根路径:"+System.getProperty("org.osjava.sj.root"));
log.debug("日志最大行数:"+System.getProperty("KETTLE_MAX_LOG_SIZE_IN_LINES"));
log.debug("最大日志管道数:"+System.getProperty("KETTLE_MAX_LOGGING_REGISTRY_SIZE"));
//初始化kettle环境
if(!KettleEnvironment.isInitialized()){
KettleEnvironment.init();
KettleClientEnvironment.getInstance().setClient( KettleClientEnvironment.ClientType.SPOON );
}
}
示例9: validateAndInitialiseKettelEngine
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
private void validateAndInitialiseKettelEngine() throws GraphGeneratorException {
File file = new File(
outputLocation + File.separator + schemaInfo.getDatabaseName() + File.separator
+ this.tableName + File.separator + this.segmentId + File.separator + this.taskNo
+ File.separator);
boolean isDirCreated = false;
if (!file.exists()) {
isDirCreated = file.mkdirs();
if (!isDirCreated) {
LOGGER.error(
"Unable to create directory or directory already exist" + file.getAbsolutePath());
throw new GraphGeneratorException("INTERNAL_SYSTEM_ERROR");
}
}
synchronized (DRIVERS) {
try {
if (!kettleInitialized) {
EnvUtil.environmentInit();
KettleEnvironment.init();
kettleInitialized = true;
}
} catch (KettleException kettlExp) {
LOGGER.error(kettlExp);
throw new GraphGeneratorException("Error While Initializing the Kettel Engine ", kettlExp);
}
}
}
示例10: initKettleEnv
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
/**
*
*/
private void initKettleEnv() {
try {
KettleEnvironment.init(false);
LOGGER.info("Kettle environment initialized");
} catch (KettleException ke) {
LOGGER.error("Unable to initialize Kettle Environment " + ke.getMessage());
}
}
示例11: testTextFileOutput2
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
/**
* Tests the default setting of createparentfolder to true by creating a new TextFileOutputMeta and
* verifying that createparentfolder is true
* @throws Exception
*/
@Test
public void testTextFileOutput2() throws Exception {
KettleEnvironment.init();
TextFileOutputMeta textFileOutputMeta = new TextFileOutputMeta();
assertTrue(textFileOutputMeta.isCreateParentFolder());
textFileOutputMeta.setDefault();
assertTrue(textFileOutputMeta.isCreateParentFolder());
}
示例12: test_MAPPING_MULTI_OUTPUT
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
public void test_MAPPING_MULTI_OUTPUT() throws Exception {
KettleEnvironment.init();
TimedTransRunner timedTransRunner =
new TimedTransRunner(
"test/org/pentaho/di/trans/steps/mapping/multi_output/use filereader.ktr", LogLevel.ERROR,
getTargetDatabase(), 1000 );
assertTrue( timedTransRunner.runEngine( true ) );
Result newResult = timedTransRunner.getNewResult();
assertTrue( newResult.getNrErrors() == 0 );
}
示例13: test03_SelectFromWhere
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
public void test03_SelectFromWhere() throws Exception {
KettleEnvironment.init();
String sqlQuery = "SELECT Category, Country, products_sold as nr, sales_amount as sales FROM Service WHERE Category = 'A'";
SqlTransExecutor executor = new SqlTransExecutor(sqlQuery, getServices());
final List<RowMetaAndData> rows = new ArrayList<RowMetaAndData>();
// print the eventual result rows...
//
executor.executeQuery(new RowAdapter() { @Override
public void rowWrittenEvent(RowMetaInterface rowMeta, Object[] row) throws KettleStepException {
rows.add(new RowMetaAndData(rowMeta, row));
} });
// Now the generated transformation is waiting for input so we
// can start the service transformation
//
executor.waitUntilFinished();
// Save to temp file for checking
//
File file = new File("/tmp/gen.ktr");
FileOutputStream fos = new FileOutputStream(file);
fos.write(org.pentaho.di.core.xml.XMLHandler.getXMLHeader().getBytes("UTF-8"));
fos.write(executor.getGenTransMeta().getXML().getBytes("UTF-8"));
fos.close();
assertEquals(4, rows.size());
}
示例14: setUp
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
@BeforeClass
public static void setUp() throws KettleException {
KettleEnvironment.init();
smh =
new StepMockHelper<TextFileInputMeta, TextFileInputData>( "CsvInputTest", TextFileInputMeta.class, TextFileInputData.class );
when( smh.logChannelInterfaceFactory.create( any(), any( LoggingObjectInterface.class ) ) )
.thenReturn( smh.logChannelInterface );
when( smh.trans.isRunning() ).thenReturn( true );
}
示例15: executeTransformation
import org.pentaho.di.core.KettleEnvironment; //导入方法依赖的package包/类
public Map<String, Object> executeTransformation(String ktrPath) {
Map<String, Object> executionResult = new HashMap<String, Object>();
try {
KettleEnvironment.init();
EnvUtil.environmentInit();
TransMeta transMeta = new TransMeta(ktrPath);
List<DatabaseMeta> dbMetaList = transMeta.getDatabases();
for (DatabaseMeta dbMeta : dbMetaList) {
if (dbMeta.getName().equals(this.connectionName)) {
dbMeta.setHostname(this.dbHostName);
dbMeta.setUsername(this.dbUerName);
dbMeta.setPassword(this.dbPassword);
dbMeta.setDBPort(this.dbPort);
dbMeta.setDBName(this.dbName);
}
}
Trans transformation = new Trans(transMeta);
if (this.parameters != null) {
for (Map.Entry<String, String> entry : this.parameters.entrySet()) {
transformation.setParameterValue((String) entry.getKey(), (String) entry.getValue());
}
}
transformation.execute(null);
transformation.waitUntilFinished();
for (StepMetaDataCombi combi : transformation.getSteps()) {
StepDTO stepDTO = new StepDTO();
stepDTO.setStepName(combi.step.getStepname());
stepDTO.setLinesInput(Long.valueOf(combi.step.getLinesInput()));
stepDTO.setLinesOutput(Long.valueOf(combi.step.getLinesOutput()));
stepDTO.setLinesRead(Long.valueOf(combi.step.getLinesRead()));
stepDTO.setLinesRejected(Long.valueOf(combi.step.getLinesRejected()));
stepDTO.setLinesUpdated(Long.valueOf(combi.step.getLinesUpdated()));
stepDTO.setStepDestinationNameList(new ArrayList<String>());
for (RowSet rowSet : combi.step.getOutputRowSets()) {
stepDTO.getStepDestinationNameList().add(rowSet.getDestinationStepName());
}
this.getStepDTOList().add(stepDTO);
}
if (transformation.getErrors() > 0) {
System.out.println("Erroruting Transformation");
executionResult.put("transformationExecuted", false);
return executionResult;
} else {
executionResult.put("transformationExecuted", true);
return executionResult;
}
} catch (Exception e) {
e.printStackTrace();
executionResult.put("transformationExecuted", false);
return executionResult;
}
}