本文整理匯總了Java中org.apache.hadoop.mapreduce.MapContext.getInputSplit方法的典型用法代碼示例。如果您正苦於以下問題:Java MapContext.getInputSplit方法的具體用法?Java MapContext.getInputSplit怎麽用?Java MapContext.getInputSplit使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.mapreduce.MapContext
的用法示例。
在下文中一共展示了MapContext.getInputSplit方法的5個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: writeDebugHeader
import org.apache.hadoop.mapreduce.MapContext; //導入方法依賴的package包/類
private void writeDebugHeader() {
processError("===== Task Information Header =====" );
processError("\nCommand: " + command);
processError("\nStart time: " + new Date(System.currentTimeMillis()));
if (job.getBoolean("mapred.task.is.map", false)) {
MapContext context = (MapContext)PigMapReduce.sJobContext;
PigSplit pigSplit = (PigSplit)context.getInputSplit();
InputSplit wrappedSplit = pigSplit.getWrappedSplit();
if (wrappedSplit instanceof FileSplit) {
FileSplit mapInputFileSplit = (FileSplit)wrappedSplit;
processError("\nInput-split file: " +
mapInputFileSplit.getPath().toString());
processError("\nInput-split start-offset: " +
Long.toString(mapInputFileSplit.getStart()));
processError("\nInput-split length: " +
Long.toString(mapInputFileSplit.getLength()));
}
}
processError("\n===== * * * =====\n");
}
示例2: getInputSplit
import org.apache.hadoop.mapreduce.MapContext; //導入方法依賴的package包/類
@Override
public InputSplit getInputSplit() {
if (base instanceof MapContext) {
MapContext<KEYIN, VALUEIN, KEYOUT, VALUEOUT> mc =
(MapContext<KEYIN, VALUEIN, KEYOUT, VALUEOUT>) base;
return mc.getInputSplit();
} else {
return null;
}
}
示例3: map
import org.apache.hadoop.mapreduce.MapContext; //導入方法依賴的package包/類
@SuppressWarnings("unchecked")
@Override
public void map(Object inputObj, MapContext<Object,Object,Object,Object> context) throws IOException, InterruptedException
{
long time;
if (_lastSplit == context.getInputSplit())
{
time = _lastTime;
}
else
{
_lastSplit = (FileSplit)context.getInputSplit();
time = PathUtils.getDateForNestedDatedPath((_lastSplit).getPath().getParent()).getTime();
_lastTime = time;
}
getMapCollector().setContext(context);
// Set the time, representing the time range this data was derived from.
// The key is tagged with this time.
getMapCollector().setTime(time);
try
{
AvroKey<GenericRecord> input = (AvroKey<GenericRecord>)inputObj;
getMapper().map(input.datum(),getMapCollector());
}
catch (InterruptedException e)
{
throw new IOException(e);
}
}
示例4: writeDebugHeader
import org.apache.hadoop.mapreduce.MapContext; //導入方法依賴的package包/類
private void writeDebugHeader() {
processError("===== Task Information Header =====" );
processError("\nCommand: " + command);
processError("\nStart time: " + new Date(System.currentTimeMillis()));
if (job.getBoolean("mapred.task.is.map", false)) {
MapContext context = (MapContext)PigMapReduce.sJobContext;
PigSplit pigSplit = (PigSplit)context.getInputSplit();
int numPaths = pigSplit.getNumPaths();
processError("\nPigSplit contains " + numPaths + " wrappedSplits.");
StringBuilder sb = new StringBuilder();
for(int i = 0; i < numPaths; i++) {
InputSplit wrappedSplit = pigSplit.getWrappedSplit(i);
if (wrappedSplit instanceof FileSplit) {
FileSplit mapInputFileSplit = (FileSplit)wrappedSplit;
sb.append("\nInput-split: file=");
sb.append(mapInputFileSplit.getPath());
sb.append(" start-offset=");
sb.append(Long.toString(mapInputFileSplit.getStart()));
sb.append(" length=");
sb.append(Long.toString(mapInputFileSplit.getLength()));
processError(sb.toString());
sb.setLength(0);
}
}
}
processError("\n===== * * * =====\n");
}
示例5: writeDebugHeader
import org.apache.hadoop.mapreduce.MapContext; //導入方法依賴的package包/類
private void writeDebugHeader() {
processError("===== Task Information Header =====" );
processError("\nCommand: " + command);
processError("\nStart time: " + new Date(System.currentTimeMillis()));
if (job.getBoolean(MRConfiguration.TASK_IS_MAP, false)) {
MapContext context = (MapContext)PigMapReduce.sJobContext;
PigSplit pigSplit = (PigSplit)context.getInputSplit();
int numPaths = pigSplit.getNumPaths();
processError("\nPigSplit contains " + numPaths + " wrappedSplits.");
StringBuilder sb = new StringBuilder();
for(int i = 0; i < numPaths; i++) {
InputSplit wrappedSplit = pigSplit.getWrappedSplit(i);
if (wrappedSplit instanceof FileSplit) {
FileSplit mapInputFileSplit = (FileSplit)wrappedSplit;
sb.append("\nInput-split: file=");
sb.append(mapInputFileSplit.getPath());
sb.append(" start-offset=");
sb.append(Long.toString(mapInputFileSplit.getStart()));
sb.append(" length=");
sb.append(Long.toString(mapInputFileSplit.getLength()));
processError(sb.toString());
sb.setLength(0);
}
}
}
processError("\n===== * * * =====\n");
}