本文整理汇总了Java中org.apache.sqoop.util.PerfCounters.startClock方法的典型用法代码示例。如果您正苦于以下问题:Java PerfCounters.startClock方法的具体用法?Java PerfCounters.startClock怎么用?Java PerfCounters.startClock使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.sqoop.util.PerfCounters
的用法示例。
在下文中一共展示了PerfCounters.startClock方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: runJob
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
@Override
protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
InterruptedException {
PerfCounters perfCounters = new PerfCounters();
perfCounters.startClock();
boolean success = doSubmitJob(job);
perfCounters.stopClock();
Counters jobCounters = job.getCounters();
// If the job has been retired, these may be unavailable.
if (null == jobCounters) {
displayRetiredJobNotice(LOG);
} else {
perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters")
.findCounter("HDFS_BYTES_READ").getValue());
LOG.info("Transferred " + perfCounters.toString());
long numRecords = ConfigurationHelper.getNumMapInputRecords(job);
LOG.info("Exported " + numRecords + " records.");
}
return success;
}
示例2: runJob
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
@Override
protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
InterruptedException {
PerfCounters perfCounters = new PerfCounters();
perfCounters.startClock();
boolean success = doSubmitJob(job);
perfCounters.stopClock();
Counters jobCounters = job.getCounters();
// If the job has been retired, these may be unavailable.
if (null == jobCounters) {
displayRetiredJobNotice(LOG);
} else {
perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters")
.findCounter("HDFS_BYTES_READ").getValue());
LOG.info("Transferred " + perfCounters.toString());
long numRecords = ConfigurationHelper.getNumMapInputRecords(job);
LOG.info("Exported " + numRecords + " records.");
}
return success;
}
示例3: runJob
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
/**
* Actually run the MapReduce job.
*/
@Override
protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
InterruptedException {
PerfCounters perfCounters = new PerfCounters();
perfCounters.startClock();
boolean success = job.waitForCompletion(true);
perfCounters.stopClock();
Counters jobCounters = job.getCounters();
// If the job has been retired, these may be unavailable.
if (null == jobCounters) {
displayRetiredJobNotice(LOG);
} else {
perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters")
.findCounter("HDFS_BYTES_WRITTEN").getValue());
LOG.info("Transferred " + perfCounters.toString());
long numRecords = ConfigurationHelper.getNumMapOutputRecords(job);
LOG.info("Retrieved " + numRecords + " records.");
}
return success;
}
示例4: runJob
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
@Override
protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
InterruptedException {
PerfCounters perfCounters = new PerfCounters();
perfCounters.startClock();
boolean success = job.waitForCompletion(true);
perfCounters.stopClock();
Counters jobCounters = job.getCounters();
// If the job has been retired, these may be unavailable.
if (null == jobCounters) {
displayRetiredJobNotice(LOG);
} else {
perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters")
.findCounter("HDFS_BYTES_READ").getValue());
LOG.info("Transferred " + perfCounters.toString());
long numRecords = ConfigurationHelper.getNumMapInputRecords(job);
LOG.info("Exported " + numRecords + " records.");
}
return success;
}
示例5: runJob
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
/**
* Actually run the MapReduce job.
*/
@Override
protected boolean runJob(Job job) throws ClassNotFoundException, IOException,
InterruptedException {
PerfCounters perfCounters = new PerfCounters();
perfCounters.startClock();
boolean success = doSubmitJob(job);
if (isHCatJob) {
SqoopHCatUtilities.instance().invokeOutputCommitterForLocalMode(job);
}
perfCounters.stopClock();
Counters jobCounters = job.getCounters();
// If the job has been retired, these may be unavailable.
if (null == jobCounters) {
displayRetiredJobNotice(LOG);
} else {
perfCounters.addBytes(jobCounters.getGroup("FileSystemCounters")
.findCounter("HDFS_BYTES_WRITTEN").getValue());
LOG.info("Transferred " + perfCounters.toString());
long numRecords = ConfigurationHelper.getNumMapOutputRecords(job);
LOG.info("Retrieved " + numRecords + " records.");
}
return success;
}
示例6: map
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
public void map(Integer dataSliceId, NullWritable val, Context context)
throws IOException, InterruptedException {
conf = context.getConfiguration();
dbc = new DBConfiguration(conf);
numMappers = ConfigurationHelper.getConfNumMaps(conf);
char rd = (char) conf.getInt(DelimiterSet.OUTPUT_RECORD_DELIM_KEY, '\n');
initNetezzaExternalTableImport(dataSliceId);
counter = new PerfCounters();
counter.startClock();
Text outputRecord = new Text();
if (extTableThread.isAlive()) {
try {
String inputRecord = recordReader.readLine();
while (inputRecord != null) {
if (Thread.interrupted()) {
if (!extTableThread.isAlive()) {
break;
}
}
outputRecord.set(inputRecord + rd);
// May be we should set the output to be String for faster performance
// There is no real benefit in changing it to Text and then
// converting it back in our case
context.write(outputRecord, NullWritable.get());
counter.addBytes(1 + inputRecord.length());
inputRecord = recordReader.readLine();
}
} finally {
recordReader.close();
extTableThread.join();
counter.stopClock();
LOG.info("Transferred " + counter.toString());
if (extTableThread.hasExceptions()) {
extTableThread.printException();
throw new IOException(extTableThread.getExcepton());
}
}
}
}
示例7: map
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
public void map(Integer dataSliceId, NullWritable val, Context context)
throws IOException, InterruptedException {
conf = context.getConfiguration();
dbc = new DBConfiguration(conf);
numMappers = ConfigurationHelper.getConfNumMaps(conf);
char rd = (char) conf.getInt(DelimiterSet.OUTPUT_RECORD_DELIM_KEY, '\n');
initNetezzaExternalTableImport(dataSliceId);
counter = new PerfCounters();
counter.startClock();
Text outputRecord = new Text();
if (extTableThread.isAlive()) {
try {
String inputRecord = recordReader.readLine();
while (inputRecord != null) {
if (Thread.interrupted()) {
if (!extTableThread.isAlive()) {
break;
}
}
outputRecord.set(inputRecord + rd);
// May be we should set the output to be String for faster performance
// There is no real benefit in changing it to Text and then
// converting it back in our case
writeRecord(outputRecord, context);
counter.addBytes(1 + inputRecord.length());
inputRecord = recordReader.readLine();
}
} finally {
recordReader.close();
extTableThread.join();
counter.stopClock();
LOG.info("Transferred " + counter.toString());
if (extTableThread.hasExceptions()) {
extTableThread.printException();
throw new IOException(extTableThread.getException());
}
}
}
}
开发者ID:aliyun,项目名称:aliyun-maxcompute-data-collectors,代码行数:42,代码来源:NetezzaExternalTableImportMapper.java
示例8: initNetezzaExternalTableExport
import org.apache.sqoop.util.PerfCounters; //导入方法依赖的package包/类
private void initNetezzaExternalTableExport(Context context)
throws IOException {
this.conf = context.getConfiguration();
dbc = new DBConfiguration(conf);
File taskAttemptDir = TaskId.getLocalWorkPath(conf);
this.outputDelimiters = new DelimiterSet(',', '\n', '\000', '\\', false);
this.fifoFile = new File(taskAttemptDir, ("nzexttable-export.txt"));
String filename = fifoFile.toString();
NamedFifo nf;
// Create the FIFO itself.
try {
nf = new NamedFifo(this.fifoFile);
nf.create();
} catch (IOException ioe) {
// Command failed.
LOG.error("Could not create FIFO file " + filename);
this.fifoFile = null;
throw new IOException(
"Could not create FIFO for netezza external table import", ioe);
}
String sqlStmt = getSqlStatement();
boolean cleanup = false;
try {
con = dbc.getConnection();
extTableThread = new NetezzaJDBCStatementRunner(Thread.currentThread(),
con, sqlStmt);
} catch (SQLException sqle) {
cleanup = true;
throw new IOException(sqle);
} catch (ClassNotFoundException cnfe) {
throw new IOException(cnfe);
} finally {
if (con != null && cleanup) {
try {
con.close();
} catch (Exception e) {
LOG.debug("Exception closing connection " + e.getMessage());
}
}
con = null;
}
counter = new PerfCounters();
extTableThread.start();
// We start the JDBC thread first in this case as we want the FIFO reader to
// be running.
recordWriter = new BufferedOutputStream(new FileOutputStream(nf.getFile()));
counter.startClock();
}