本文整理汇总了Java中org.pentaho.di.core.logging.LogWriter.getInstance方法的典型用法代码示例。如果您正苦于以下问题:Java LogWriter.getInstance方法的具体用法?Java LogWriter.getInstance怎么用?Java LogWriter.getInstance使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.pentaho.di.core.logging.LogWriter
的用法示例。
在下文中一共展示了LogWriter.getInstance方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getTableFields
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public RowMetaInterface getTableFields()
{
LogWriter log = LogWriter.getInstance();
RowMetaInterface fields = null;
if (databaseMeta!=null)
{
Database db = new Database(databaseMeta);
databases = new Database[] { db }; // Keep track of this one for cancelQuery
try
{
db.connect();
String schemaTable = databaseMeta.getQuotedSchemaTableCombination(schemaName, tablename);
fields = db.getTableFields(schemaTable);
}
catch(KettleDatabaseException dbe)
{
log.logError(toString(), Messages.getString("DatabaseLookupMeta.ERROR0004.ErrorGettingTableFields")+dbe.getMessage()); //$NON-NLS-1$
}
finally
{
db.disconnect();
}
}
return fields;
}
示例2: getTableFields
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public RowMetaInterface getTableFields()
{
LogWriter log = LogWriter.getInstance();
RowMetaInterface fields = null;
if (databaseMeta != null)
{
Database db = new Database(databaseMeta);
try
{
db.connect();
fields = db.getTableFields(databaseMeta.getQuotedSchemaTableCombination(schemaName, tableName));
}
catch (KettleDatabaseException dbe)
{
log.logError(toString(), Messages.getString("DimensionLookupMeta.Log.DatabaseErrorOccurred") + dbe.getMessage()); //$NON-NLS-1$
}
finally
{
db.disconnect();
}
}
return fields;
}
示例3: run
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public void run()
{
LogWriter log = LogWriter.getInstance();
try
{
InputStreamReader isr = new InputStreamReader(is);
BufferedReader br = new BufferedReader(isr);
String line = null;
while ((line = br.readLine()) != null)
{
log.logBasic(type, line);
}
}
catch (IOException ioe)
{
log.logError(type, Const.getStackTracker(ioe));
}
}
示例4: getJobMeta
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
private JobMeta getJobMeta(Repository rep, VariableSpace space) throws KettleException
{
try
{
if (rep!=null && getDirectory()!=null)
{
return new JobMeta(LogWriter.getInstance(),
rep,
(space != null ? space.environmentSubstitute(getJobName()): getJobName()),
rep.getDirectoryTree().findDirectory(environmentSubstitute(getDirectory())));
}
else
{
return new JobMeta(LogWriter.getInstance(),
(space != null ? space.environmentSubstitute(getFilename()) : getFilename()),
rep, null);
}
}
catch(Exception e)
{
throw new KettleException("Unexpected error during job metadata load", e);
}
}
示例5: CreateRemoteFolder
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
/**
* Create remote folder
*
* @param sftpClient
* @param foldername
* @return true, if foldername is created
*/
private boolean CreateRemoteFolder(SFTPv3Client sftpClient, String foldername)
{
LogWriter log = LogWriter.getInstance();
boolean retval=false;
if(!sshDirectoryExists(sftpClient, foldername))
{
try
{
sftpClient.mkdir(foldername, 0700);
retval=true;
if(log.isDetailed()) log.logDetailed(toString(), Messages.getString("JobSSH2GET.Log.RemoteFolderCreated",foldername));
}catch (Exception e)
{
log.logError(toString(), Messages.getString("JobSSH2GET.Log.Error.CreatingRemoteFolder",foldername));
}
}
return retval;
}
示例6: OGRWriter
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public OGRWriter(String dataDestinationPath, boolean isFileDataSource, String format, String options, int geomType, String layerName, int writeMode, String fidField, boolean isFIDFieldPreserved){
this.log = LogWriter.getInstance();
ogrDataDestinationPath = dataDestinationPath;
this.isFileDataSource = isFileDataSource;
error = false;
ogrLayer = null;
ogrLayerName = layerName;
ogrDataFormat = format;
ogrOptions = options;
ogrGeomType = geomType;
ogrWriteMode = writeMode;
ogrFIDField = fidField;
preserveFIDField = isFIDFieldPreserved;
ogrDriver = null;
//this.encoding = encoding;
ogrGeometry = null;
ogrSpatialReference = new SpatialReference();
ogrDataDestinationOptions = new Vector<String>();
if (ogrOptions!= null) {
String[] ogr_options = ogrOptions.trim().split(" ");
for(int i=0;i<ogr_options.length;i++)
ogrDataDestinationOptions.addElement(ogr_options[i]);
}
}
示例7: GeotoolsWriter
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public GeotoolsWriter(URL fileURL, String charset){
log = LogWriter.getInstance();
gisURL = fileURL;
this.charset = charset;
error = false;
sf = null;
featWriter = null;
featureType = null;
factory = null;
rowMeta = null;
}
示例8: getCommandLineArgs
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public static CommandLineOption[] getCommandLineArgs(List<String> args) {
CommandLineOption[] clOptions = new CommandLineOption[] { new CommandLineOption("rep", "Repository name", new StringBuffer()), new CommandLineOption("user", "Repository username", new StringBuffer()),
new CommandLineOption("pass", "Repository password", new StringBuffer()), new CommandLineOption("job", "The name of the job to launch", new StringBuffer()),
new CommandLineOption("trans", "The name of the transformation to launch", new StringBuffer()), new CommandLineOption("dir", "The directory (don't forget the leading /)", new StringBuffer()),
new CommandLineOption("file", "The filename (Transformation in XML) to launch", new StringBuffer()), new CommandLineOption("level", "The logging level (Basic, Detailed, Debug, Rowlevel, Error, Nothing)", new StringBuffer()),
new CommandLineOption("logfile", "The logging file to write to", new StringBuffer()), new CommandLineOption("log", "The logging file to write to (deprecated)", new StringBuffer(), false, true), };
LogWriter log;
LogWriter.setConsoleAppenderDebug();
// start with the default logger until we find out otherwise
log = LogWriter.getInstance(LogWriter.LOG_LEVEL_BASIC);
// Parse the options...
if (!CommandLineOption.parseArguments(args, clOptions, log)) {
log.logError("Spoon", "Command line option not understood");
System.exit(8);
}
String kettleRepname = Const.getEnvironmentVariable("KETTLE_REPOSITORY", null);
String kettleUsername = Const.getEnvironmentVariable("KETTLE_USER", null);
String kettlePassword = Const.getEnvironmentVariable("KETTLE_PASSWORD", null);
if (!Const.isEmpty(kettleRepname))
clOptions[0].setArgument(new StringBuffer(kettleRepname));
if (!Const.isEmpty(kettleUsername))
clOptions[1].setArgument(new StringBuffer(kettleUsername));
if (!Const.isEmpty(kettlePassword))
clOptions[2].setArgument(new StringBuffer(kettlePassword));
return clOptions;
}
示例9: JobEntryCopy
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public JobEntryCopy(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException
{
try
{
String stype = XMLHandler.getTagValue(entrynode, "type");
JobPlugin jobPlugin = JobEntryLoader.getInstance().findJobEntriesWithID(stype);
if (jobPlugin == null)
throw new KettleStepLoaderException("No valid step/plugin specified (jobPlugin=null) for " + stype);
// Get an empty JobEntry of the appropriate class...
entry = JobEntryLoader.getInstance().getJobEntryClass(jobPlugin);
if (entry != null)
{
// System.out.println("New JobEntryInterface built of type:
// "+entry.getTypeDesc());
entry.loadXML(entrynode, databases, slaveServers, rep);
// Handle GUI information: nr & location?
setNr(Const.toInt(XMLHandler.getTagValue(entrynode, "nr"), 0));
setLaunchingInParallel("Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "parallel")));
setDrawn("Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "draw")));
int x = Const.toInt(XMLHandler.getTagValue(entrynode, "xloc"), 0);
int y = Const.toInt(XMLHandler.getTagValue(entrynode, "yloc"), 0);
setLocation(x, y);
}
} catch (Throwable e)
{
String message = "Unable to read Job Entry copy info from XML node : " + e.toString();
LogWriter log = LogWriter.getInstance();
log.logError(toString(), message);
log.logError(toString(), Const.getStackTracker(e));
throw new KettleXMLException(message, e);
}
}
示例10: RepositoryImportProgressDialog
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public RepositoryImportProgressDialog(Shell parent, int style, Repository rep, String fileDirectory, String[] filenames, RepositoryDirectory baseDirectory)
{
super(parent, style);
this.log = LogWriter.getInstance();
this.props = PropsUI.getInstance();
this.parent = parent;
this.rep = rep;
this.fileDirectory = fileDirectory;
this.filenames = filenames;
this.baseDirectory = baseDirectory;
rep.setImportBaseDirectory(baseDirectory);
}
示例11: execute
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public Result execute(Result previousResult, int nr, Repository rep, Job parentJob)
{
LogWriter log = LogWriter.getInstance();
Result result = previousResult;
result.setResult( false );
if (filename!=null)
{
String realFilename = getRealFilename();
try
{
FileObject file = KettleVFS.getFileObject(realFilename);
if (file.exists() && file.isReadable())
{
log.logDetailed(toString(), Messages.getString("JobEntryFileExists.File_Exists", realFilename)); //$NON-NLS-1$
result.setResult( true );
}
else
{
log.logDetailed(toString(), Messages.getString("JobEntryFileExists.File_Does_Not_Exist", realFilename)); //$NON-NLS-1$
}
}
catch (IOException e)
{
result.setNrErrors(1);
log.logError(toString(), Messages.getString("JobEntryFileExists.ERROR_0004_IO_Exception", e.toString())); //$NON-NLS-1$
}
}
else
{
result.setNrErrors(1);
log.logError(toString(), Messages.getString("JobEntryFileExists.ERROR_0005_No_Filename_Defined")); //$NON-NLS-1$
}
return result;
}
示例12: EnterListDialog
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public EnterListDialog(Shell parent, int style, String input[])
{
super(parent, style);
this.log = LogWriter.getInstance();
this.props = PropsUI.getInstance();
this.input = input;
this.retval = null;
selection = new Hashtable<Integer,String>();
opened = false;
}
示例13: saveRep
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public void saveRep(Repository rep, long id_transformation)
throws KettleException
{
LogWriter log = LogWriter.getInstance();
try
{
log.logDebug(toString(), Messages.getString("StepMeta.Log.SaveNewStep")); //$NON-NLS-1$
// Insert new Step in repository
setID(rep.insertStep( id_transformation,
getName(),
getDescription(),
getStepID(),
distributes,
copies,
getLocation()==null?-1:getLocation().x,
getLocation()==null?-1:getLocation().y,
isDrawn()
)
);
// Save partitioning selection for the step
stepPartitioningMeta.saveRep(rep, id_transformation, getID());
// The id_step is known, as well as the id_transformation
// This means we can now save the attributes of the step...
log.logDebug(toString(), Messages.getString("StepMeta.Log.SaveStepDetails")); //$NON-NLS-1$
stepMetaInterface.saveRep(rep, id_transformation, getID());
// Save the clustering schema that was chosen.
rep.saveStepAttribute(id_transformation, getID(), "cluster_schema", clusterSchema==null?"":clusterSchema.getName());
}
catch(KettleException e)
{
throw new KettleException(Messages.getString("StepMeta.Exception.UnableToSaveStepInfo",String.valueOf(id_transformation)), e); //$NON-NLS-1$
}
}
示例14: ProcessFile
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
private boolean ProcessFile(String filename, String wildcard,Job parentJob) {
LogWriter log = LogWriter.getInstance();
boolean rcode = false;
FileObject filefolder = null;
String realFilefoldername = environmentSubstitute(filename);
String realwildcard = environmentSubstitute(wildcard);
try {
filefolder = KettleVFS.getFileObject(realFilefoldername);
// Here gc() is explicitly called if e.g. createfile is used in the same
// job for the same file. The problem is that after creating the file the
// file object is not properly garbaged collected and thus the file cannot
// be deleted anymore. This is a known problem in the JVM.
System.gc();
if (filefolder.exists()) {
// the file or folder exists
if (filefolder.getType() == FileType.FOLDER)
{
// It's a folder
if (log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.ProcessingFolder", realFilefoldername)); //$NON-NLS-1$
// Delete Files
int Nr = filefolder.delete(new TextFileSelector(filefolder.toString(),realwildcard,parentJob));
if (log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.TotalDeleted", String.valueOf(Nr))); //$NON-NLS-1$
rcode = true;
} else {
// It's a file
if(log.isDetailed())
log.logDetailed(toString(), Messages.getString("JobEntryDeleteFiles.ProcessingFile", realFilefoldername)); //$NON-NLS-1$
boolean deleted = filefolder.delete();
if (!deleted) {
log.logError(toString(), Messages.getString("JobEntryDeleteFiles.CouldNotDeleteFile", realFilefoldername)); //$NON-NLS-1$
} else {
if(log.isBasic()) log.logBasic(toString(), Messages.getString("JobEntryDeleteFiles.FileDeleted", filename)); //$NON-NLS-1$
rcode = true;
}
}
} else {
// File already deleted, no reason to try to delete it
if(log.isBasic()) log.logBasic(toString(), Messages.getString("JobEntryDeleteFiles.FileAlreadyDeleted", realFilefoldername)); //$NON-NLS-1$
rcode = true;
}
} catch (IOException e) {
log.logError(toString(), Messages.getString("JobEntryDeleteFiles.CouldNotProcess", realFilefoldername, e.getMessage())); //$NON-NLS-1$
} finally {
if (filefolder != null) {
try {
filefolder.close();
} catch (IOException ex) {
}
;
}
}
return rcode;
}
示例15: KMLReader
import org.pentaho.di.core.logging.LogWriter; //导入方法依赖的package包/类
public KMLReader(java.net.URL fileURL) {
log = LogWriter.getInstance();
kmlURL = fileURL;
kml = KmlFactory.createKml();
k = 0;
}