本文整理汇总了Java中org.apache.drill.exec.store.schedule.CompleteFileWork类的典型用法代码示例。如果您正苦于以下问题:Java CompleteFileWork类的具体用法?Java CompleteFileWork怎么用?Java CompleteFileWork使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
CompleteFileWork类属于org.apache.drill.exec.store.schedule包,在下文中一共展示了CompleteFileWork类的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: manyFiles
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
@Test
public void manyFiles() throws Exception {
List<CompleteFileWork> chunks = generateChunks(1000);
Iterator<DrillbitEndpoint> incomingEndpointsIterator = Iterators.cycle(endpoints);
List<DrillbitEndpoint> incomingEndpoints = Lists.newArrayList();
final int width = 28 * 30;
for (int i = 0; i < width; i++) {
incomingEndpoints.add(incomingEndpointsIterator.next());
}
ListMultimap<Integer, CompleteFileWork> mappings = AssignmentCreator.getMappings(incomingEndpoints, chunks, null);
System.out.println(mappings.keySet().size());
for (int i = 0; i < width; i++) {
Assert.assertTrue("no mapping for entry " + i, mappings.get(i) != null && mappings.get(i).size() > 0);
}
}
示例2: manyFiles
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
@Test
public void manyFiles() throws Exception {
List<CompleteFileWork> chunks = generateChunks(1000);
Iterator<DrillbitEndpoint> incomingEndpointsIterator = Iterators.cycle(endpoints);
List<DrillbitEndpoint> incomingEndpoints = Lists.newArrayList();
final int width = widthPerNode * numEndPoints;
for (int i = 0; i < width; i++) {
incomingEndpoints.add(incomingEndpointsIterator.next());
}
ListMultimap<Integer, CompleteFileWork> mappings = AssignmentCreator.getMappings(incomingEndpoints, chunks);
System.out.println(mappings.keySet().size());
for (int i = 0; i < width; i++) {
Assert.assertTrue("no mapping for entry " + i, mappings.get(i) != null && mappings.get(i).size() > 0);
}
}
示例3: getScanStats
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
protected ScanStats getScanStats(final PlannerSettings settings, final EasyGroupScan scan) {
long data = 0;
for (final CompleteFileWork work : scan.getWorkIterable()) {
data += work.getTotalBytes();
}
final long estRowCount = data / 1024;
return new ScanStats(GroupScanProperty.NO_EXACT_ROW_COUNT, estRowCount, 1, data);
}
示例4: getWorkIterable
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
@JsonIgnore
public Iterable<CompleteFileWork> getWorkIterable() {
return new Iterable<CompleteFileWork>() {
@Override
public Iterator<CompleteFileWork> iterator() {
return Iterators.unmodifiableIterator(chunks.iterator());
}
};
}
示例5: convert
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
private List<FileWorkImpl> convert(List<CompleteFileWork> list) {
List<FileWorkImpl> newList = Lists.newArrayList();
for (CompleteFileWork f : list) {
newList.add(f.getAsFileWork());
}
return newList;
}
示例6: getScanStats
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
@Override
protected ScanStats getScanStats(final PlannerSettings settings, final EasyGroupScan scan) {
long data = 0;
for (final CompleteFileWork work : scan.getWorkIterable()) {
data += work.getTotalBytes();
}
final double estimatedRowSize = settings.getOptions().getOption(ExecConstants.TEXT_ESTIMATED_ROW_SIZE);
final double estRowCount = data / estimatedRowSize;
return new ScanStats(GroupScanProperty.NO_EXACT_ROW_COUNT, (long) estRowCount, 1, data);
}
示例7: generateChunks
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
private List<CompleteFileWork> generateChunks(int chunks) {
List<CompleteFileWork> chunkList = Lists.newArrayList();
for (int i = 0; i < chunks; i++) {
CompleteFileWork chunk = new CompleteFileWork(createByteMap(), 0, FILE_SIZE, "file" + i);
chunkList.add(chunk);
}
return chunkList;
}
示例8: testBalanceAcrossNodes
import org.apache.drill.exec.store.schedule.CompleteFileWork; //导入依赖的package包/类
@Test
public void testBalanceAcrossNodes() throws Exception {
int numChunks = widthPerNode * numEndPoints + 100;
List<CompleteFileWork> chunks = generateChunks(numChunks);
Iterator<DrillbitEndpoint> incomingEndpointsIterator = Iterators.cycle(endpoints);
List<DrillbitEndpoint> incomingEndpoints = Lists.newArrayList();
List<Integer> expectedAssignments = Lists.newArrayList();
List<Integer> actualAssignments = Lists.newArrayList();
final int width = widthPerNode * numEndPoints;
for (int i = 0; i < width; i++) {
incomingEndpoints.add(incomingEndpointsIterator.next());
}
// Calculate expected assignments for each node.
final int numAssignmentsPerNode = numChunks/numEndPoints;
int leftOver = numChunks - numAssignmentsPerNode * numEndPoints;
for (int i =0; i < numEndPoints; i++) {
int additional = leftOver > 0 ? 1 : 0;
expectedAssignments.add(numAssignmentsPerNode + additional);
if (leftOver > 0) {
leftOver--;
}
}
ListMultimap<Integer, CompleteFileWork> mappings = AssignmentCreator.getMappings(incomingEndpoints, chunks);
System.out.println(mappings.keySet().size());
// Verify that all fragments have chunks assigned.
for (int i = 0; i < width; i++) {
Assert.assertTrue("no mapping for entry " + i, mappings.get(i) != null && mappings.get(i).size() > 0);
}
// Verify actual and expected assignments per node match.
// Compute actual assignments for each node.
for (int i=0; i < numEndPoints; i++) {
int numAssignments = 0;
int index = i;
while(index < numEndPoints * widthPerNode) {
numAssignments += mappings.get(index).size();
index += numEndPoints;
}
actualAssignments.add(numAssignments);
}
for (int i=0; i < numEndPoints; i++) {
Assert.assertTrue(actualAssignments.get(i) == expectedAssignments.get(i));
}
}