本文整理匯總了Java中org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit.getNumPaths方法的典型用法代碼示例。如果您正苦於以下問題:Java PigSplit.getNumPaths方法的具體用法?Java PigSplit.getNumPaths怎麽用?Java PigSplit.getNumPaths使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit
的用法示例。
在下文中一共展示了PigSplit.getNumPaths方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: writeDebugHeader
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
private void writeDebugHeader() {
processError("===== Task Information Header =====" );
processError("\nCommand: " + command);
processError("\nStart time: " + new Date(System.currentTimeMillis()));
if (job.getBoolean("mapred.task.is.map", false)) {
MapContext context = (MapContext)PigMapReduce.sJobContext;
PigSplit pigSplit = (PigSplit)context.getInputSplit();
int numPaths = pigSplit.getNumPaths();
processError("\nPigSplit contains " + numPaths + " wrappedSplits.");
StringBuilder sb = new StringBuilder();
for(int i = 0; i < numPaths; i++) {
InputSplit wrappedSplit = pigSplit.getWrappedSplit(i);
if (wrappedSplit instanceof FileSplit) {
FileSplit mapInputFileSplit = (FileSplit)wrappedSplit;
sb.append("\nInput-split: file=");
sb.append(mapInputFileSplit.getPath());
sb.append(" start-offset=");
sb.append(Long.toString(mapInputFileSplit.getStart()));
sb.append(" length=");
sb.append(Long.toString(mapInputFileSplit.getLength()));
processError(sb.toString());
sb.setLength(0);
}
}
}
processError("\n===== * * * =====\n");
}
示例2: test1
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test1() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l1", "l4", "l5"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(result.size(), 2);
int index = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
if (index == 0) {
Assert.assertEquals(2, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(500, pigSplit.getLength(0));
Assert.assertEquals(400, pigSplit.getLength(1));
}
else {
Assert.assertEquals(1, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l4", "l5"
});
Assert.assertEquals(400, pigSplit.getLength(0));
}
index++;
}
}
示例3: test3
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test3() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l4", "l5"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(1, result.size());
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
Assert.assertEquals(3, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3", "l4", "l5"
});
Assert.assertEquals(500, pigSplit.getLength(0));
Assert.assertEquals(200, pigSplit.getLength(1));
Assert.assertEquals(100, pigSplit.getLength(2));
}
}
示例4: test8
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test8() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l1", "l4", "l5"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(result.size(), 1);
int index = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
Assert.assertEquals(3, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3", "l4", "l5"
});
Assert.assertEquals(200, pigSplit.getLength(0));
Assert.assertEquals(100, pigSplit.getLength(1));
Assert.assertEquals(100, pigSplit.getLength(2));
index++;
}
}
示例5: test9
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test9() throws IOException, InterruptedException {
// verify locations in order
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l3", "l4", "l5"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l5", "l6", "l1"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(result.size(), 1);
int index = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
Assert.assertEquals(3, len);
// only 5 locations are in list: refer to PIG-1648 for more details
checkLocationOrdering(pigSplit.getLocations(), new String[] {
"l5", "l1", "l6", "l3", "l4"
});
Assert.assertEquals(400, pigSplit.getLength(0));
Assert.assertEquals(200, pigSplit.getLength(1));
Assert.assertEquals(100, pigSplit.getLength(2));
index++;
}
}
示例6: writeDebugHeader
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
private void writeDebugHeader() {
processError("===== Task Information Header =====" );
processError("\nCommand: " + command);
processError("\nStart time: " + new Date(System.currentTimeMillis()));
if (job.getBoolean(MRConfiguration.TASK_IS_MAP, false)) {
MapContext context = (MapContext)PigMapReduce.sJobContext;
PigSplit pigSplit = (PigSplit)context.getInputSplit();
int numPaths = pigSplit.getNumPaths();
processError("\nPigSplit contains " + numPaths + " wrappedSplits.");
StringBuilder sb = new StringBuilder();
for(int i = 0; i < numPaths; i++) {
InputSplit wrappedSplit = pigSplit.getWrappedSplit(i);
if (wrappedSplit instanceof FileSplit) {
FileSplit mapInputFileSplit = (FileSplit)wrappedSplit;
sb.append("\nInput-split: file=");
sb.append(mapInputFileSplit.getPath());
sb.append(" start-offset=");
sb.append(Long.toString(mapInputFileSplit.getStart()));
sb.append(" length=");
sb.append(Long.toString(mapInputFileSplit.getLength()));
processError(sb.toString());
sb.setLength(0);
}
}
}
processError("\n===== * * * =====\n");
}
示例7: test2
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test2() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(600, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(700, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(800, new String[] {
"l1", "l4", "l5"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(result.size(), 3);
int index = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
if (index == 0) {
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l4", "l5"
});
Assert.assertEquals(1, len);
Assert.assertEquals(800, pigSplit.getLength(0));
}
else if (index == 1) {
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(1, len);
Assert.assertEquals(700, pigSplit.getLength(0));
}
else {
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(1, len);
Assert.assertEquals(600, pigSplit.getLength(0));
}
index++;
}
}
示例8: test4
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test4() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l4", "l5"
}));
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l4", "l5"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(2, result.size());
int idx = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
if (idx == 0) {
Assert.assertEquals(2, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l4", "l5"
});
Assert.assertEquals(500, pigSplit.getLength(0));
Assert.assertEquals(100, pigSplit.getLength(1));
}
else {
Assert.assertEquals(4, len);
Assert.assertEquals(500, pigSplit.getLength(0));
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(200, pigSplit.getLength(1));
Assert.assertEquals(200, pigSplit.getLength(2));
Assert.assertEquals(100, pigSplit.getLength(3));
}
idx++;
}
}
示例9: test5
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test5() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l1", "l4", "l5"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, false, conf);
Assert.assertEquals(3, result.size());
int index = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
if (index == 0) {
Assert.assertEquals(1, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(500, pigSplit.getLength(0));
}
else if (index == 1) {
Assert.assertEquals(1, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(400, pigSplit.getLength(0));
}
else {
Assert.assertEquals(1, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l4", "l5"
});
Assert.assertEquals(400, pigSplit.getLength(0));
}
index++;
}
}
示例10: test6
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test6() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(600, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(300, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l2", "l3"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(3, result.size());
int idx = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
if (idx == 0) {
Assert.assertEquals(2, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(600, pigSplit.getLength(0));
Assert.assertEquals(400, pigSplit.getLength(1));
}
else if (idx == 1) {
Assert.assertEquals(3, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(500, pigSplit.getLength(0));
Assert.assertEquals(300, pigSplit.getLength(1));
Assert.assertEquals(200, pigSplit.getLength(2));
}
else {
Assert.assertEquals(1, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(100, pigSplit.getLength(0));
}
idx++;
}
}
示例11: test7
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.PigSplit; //導入方法依賴的package包/類
@Test
public void test7() throws IOException, InterruptedException {
ArrayList<InputSplit> rawSplits = new ArrayList<InputSplit>();
rawSplits.add(new DummyInputSplit(100, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(200, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(300, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(400, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(500, new String[] {
"l1", "l2", "l3"
}));
rawSplits.add(new DummyInputSplit(600, new String[] {
"l1", "l2", "l3"
}));
List<InputSplit> result = pigInputFormat.getPigSplits(rawSplits, 0, ok,
null, true, conf);
Assert.assertEquals(3, result.size());
int idx = 0;
for (InputSplit split : result) {
PigSplit pigSplit = (PigSplit) split;
int len = pigSplit.getNumPaths();
if (idx == 0) {
Assert.assertEquals(2, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(600, pigSplit.getLength(0));
Assert.assertEquals(400, pigSplit.getLength(1));
}
else if (idx == 1) {
Assert.assertEquals(3, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(500, pigSplit.getLength(0));
Assert.assertEquals(300, pigSplit.getLength(1));
Assert.assertEquals(200, pigSplit.getLength(2));
}
else {
Assert.assertEquals(1, len);
checkLocations(pigSplit.getLocations(), new String[] {
"l1", "l2", "l3"
});
Assert.assertEquals(100, pigSplit.getLength(0));
}
idx++;
}
}