本文整理匯總了Java中org.apache.flink.runtime.operators.util.TaskConfig類的典型用法代碼示例。如果您正苦於以下問題:Java TaskConfig類的具體用法?Java TaskConfig怎麽用?Java TaskConfig使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
TaskConfig類屬於org.apache.flink.runtime.operators.util包,在下文中一共展示了TaskConfig類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: initOutputs
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
@Override
protected void initOutputs() throws Exception {
// initialize the regular outputs first (the ones into the step function).
super.initOutputs();
// at this time, the outputs to the step function are created
// add the outputs for the final solution
List<RecordWriter<?>> finalOutputWriters = new ArrayList<RecordWriter<?>>();
final TaskConfig finalOutConfig = this.config.getIterationHeadFinalOutputConfig();
final ClassLoader userCodeClassLoader = getUserCodeClassLoader();
this.finalOutputCollector = BatchTask.getOutputCollector(this, finalOutConfig,
userCodeClassLoader, finalOutputWriters, config.getNumOutputs(), finalOutConfig.getNumOutputs());
// sanity check the setup
final int writersIntoStepFunction = this.eventualOutputs.size();
final int writersIntoFinalResult = finalOutputWriters.size();
final int syncGateIndex = this.config.getIterationHeadIndexOfSyncOutput();
if (writersIntoStepFunction + writersIntoFinalResult != syncGateIndex) {
throw new Exception("Error: Inconsistent head task setup - wrong mapping of output gates.");
}
// now, we can instantiate the sync gate
this.toSync = getEnvironment().getWriter(syncGateIndex);
}
示例2: prepare
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
@Override
public void prepare() throws Exception {
final TaskConfig config = this.taskContext.getTaskConfig();
if (config.getDriverStrategy() != DriverStrategy.ALL_REDUCE) {
throw new Exception("Unrecognized driver strategy for AllReduce driver: " + config.getDriverStrategy().name());
}
TypeSerializerFactory<T> serializerFactory = this.taskContext.getInputSerializer(0);
this.serializer = serializerFactory.getSerializer();
this.input = this.taskContext.getInput(0);
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (LOG.isDebugEnabled()) {
LOG.debug("AllReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
}
}
示例3: prepare
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
@Override
public void prepare() throws Exception {
final TaskConfig config = this.taskContext.getTaskConfig();
if (config.getDriverStrategy() != DriverStrategy.CO_GROUP_RAW) {
throw new Exception("Unrecognized driver strategy for CoGoup Python driver: " + config.getDriverStrategy().name());
}
final MutableObjectIterator<IT1> in1 = this.taskContext.getInput(0);
final MutableObjectIterator<IT2> in2 = this.taskContext.getInput(1);
IT1 reuse1 = this.taskContext.<IT1>getInputSerializer(0).getSerializer().createInstance();
IT2 reuse2 = this.taskContext.<IT2>getInputSerializer(1).getSerializer().createInstance();
this.coGroupIterator1 = new SimpleIterable<IT1>(reuse1, in1);
this.coGroupIterator2 = new SimpleIterable<IT2>(reuse2, in2);
if (LOG.isDebugEnabled()) {
LOG.debug(this.taskContext.formatLogString("CoGroup task iterator ready."));
}
}
示例4: prepare
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
@Override
public void prepare() throws Exception {
TaskConfig config = this.taskContext.getTaskConfig();
if (config.getDriverStrategy() != DriverStrategy.SORTED_REDUCE) {
throw new Exception("Unrecognized driver strategy for Reduce driver: " + config.getDriverStrategy().name());
}
this.serializer = this.taskContext.<T>getInputSerializer(0).getSerializer();
this.comparator = this.taskContext.getDriverComparator(0);
this.input = this.taskContext.getInput(0);
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (LOG.isDebugEnabled()) {
LOG.debug("ReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
}
}
示例5: prepare
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
@Override
public void prepare() throws Exception {
TaskConfig config = this.taskContext.getTaskConfig();
if (config.getDriverStrategy() != DriverStrategy.SORTED_GROUP_REDUCE) {
throw new Exception("Unrecognized driver strategy for GroupReduce driver: " + config.getDriverStrategy().name());
}
final Counter numRecordsIn = this.taskContext.getMetricGroup().getIOMetricGroup().getNumRecordsInCounter();
this.serializer = this.taskContext.<IT>getInputSerializer(0).getSerializer();
this.comparator = this.taskContext.getDriverComparator(0);
this.input = new CountingMutableObjectIterator<>(this.taskContext.<IT>getInput(0), numRecordsIn);
ExecutionConfig executionConfig = taskContext.getExecutionConfig();
this.objectReuseEnabled = executionConfig.isObjectReuseEnabled();
if (LOG.isDebugEnabled()) {
LOG.debug("GroupReduceDriver object reuse: " + (this.objectReuseEnabled ? "ENABLED" : "DISABLED") + ".");
}
}
示例6: testInputFormatVertex
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
@Test
public void testInputFormatVertex() {
try {
final TestInputFormat inputFormat = new TestInputFormat();
final InputFormatVertex vertex = new InputFormatVertex("Name");
new TaskConfig(vertex.getConfiguration()).setStubWrapper(new UserCodeObjectWrapper<InputFormat<?, ?>>(inputFormat));
final ClassLoader cl = getClass().getClassLoader();
vertex.initializeOnMaster(cl);
InputSplit[] splits = vertex.getInputSplitSource().createInputSplits(77);
assertNotNull(splits);
assertEquals(1, splits.length);
assertEquals(TestSplit.class, splits[0].getClass());
}
catch (Exception e) {
e.printStackTrace();
fail(e.getMessage());
}
}
示例7: DriverTestBase
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
protected DriverTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
throw new IllegalArgumentException();
}
final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
this.perSortMem = perSortMemory;
this.perSortFractionMem = (double)perSortMemory/totalMem;
this.ioManager = new IOManagerAsync();
this.memManager = totalMem > 0 ? new MemoryManager(totalMem,1) : null;
this.inputs = new ArrayList<MutableObjectIterator<Record>>();
this.comparators = new ArrayList<TypeComparator<Record>>();
this.sorters = new ArrayList<UnilateralSortMerger<Record>>();
this.owner = new DummyInvokable();
this.taskConfig = new TaskConfig(new Configuration());
this.executionConfig = executionConfig;
this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
示例8: registerFileInputTask
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
public void registerFileInputTask(AbstractInvokable inTask,
Class<? extends DelimitedInputFormat<Record>> stubClass, String inPath, String delimiter)
{
DelimitedInputFormat<Record> format;
try {
format = stubClass.newInstance();
}
catch (Throwable t) {
throw new RuntimeException("Could not instantiate test input format.", t);
}
format.setFilePath(inPath);
format.setDelimiter(delimiter);
TaskConfig dsConfig = new TaskConfig(this.mockEnv.getTaskConfiguration());
dsConfig.setStubWrapper(new UserCodeObjectWrapper<>(format));
this.inputSplitProvider.addInputSplits(inPath, 5);
}
示例9: UnaryOperatorTestBase
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
protected UnaryOperatorTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
throw new IllegalArgumentException();
}
final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
this.perSortMem = perSortMemory;
this.perSortFractionMem = (double)perSortMemory/totalMem;
this.ioManager = new IOManagerAsync();
this.memManager = totalMem > 0 ? new MemoryManager(totalMem, 1) : null;
this.owner = new DummyInvokable();
Configuration config = new Configuration();
this.taskConfig = new TaskConfig(config);
this.executionConfig = executionConfig;
this.comparators = new ArrayList<TypeComparator<IN>>(2);
this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
示例10: BinaryOperatorTestBase
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
protected BinaryOperatorTestBase(ExecutionConfig executionConfig, long memory, int maxNumSorters, long perSortMemory) {
if (memory < 0 || maxNumSorters < 0 || perSortMemory < 0) {
throw new IllegalArgumentException();
}
final long totalMem = Math.max(memory, 0) + (Math.max(maxNumSorters, 0) * perSortMemory);
this.perSortMem = perSortMemory;
this.perSortFractionMem = (double) perSortMemory / totalMem;
this.ioManager = new IOManagerAsync();
this.memManager = totalMem > 0 ? new MemoryManager(totalMem, 1) : null;
this.inputs = new ArrayList<>();
this.comparators = new ArrayList<>();
this.sorters = new ArrayList<>();
this.owner = new DummyInvokable();
this.taskConfig = new TaskConfig(new Configuration());
this.executionConfig = executionConfig;
this.taskManageInfo = new TestingTaskManagerRuntimeInfo();
}
示例11: createEdgesInput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
private static InputFormatVertex createEdgesInput(JobGraph jobGraph, String edgesPath, int numSubTasks,
TypeSerializerFactory<?> serializer, TypeComparatorFactory<?> comparator)
{
// edges
@SuppressWarnings("unchecked")
CsvInputFormat edgesInFormat = new CsvInputFormat(' ', LongValue.class, LongValue.class);
InputFormatVertex edgesInput = JobGraphUtils.createInput(edgesInFormat, edgesPath, "EdgesInput", jobGraph,
numSubTasks);
TaskConfig edgesInputConfig = new TaskConfig(edgesInput.getConfiguration());
{
edgesInputConfig.setOutputSerializer(serializer);
edgesInputConfig.addOutputShipStrategy(ShipStrategyType.PARTITION_HASH);
edgesInputConfig.setOutputComparator(comparator, 0);
}
return edgesInput;
}
示例12: createOutput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
private static OutputFormatVertex createOutput(JobGraph jobGraph, String resultPath, int numSubTasks,
TypeSerializerFactory<?> serializer) {
OutputFormatVertex output = JobGraphUtils.createFileOutput(jobGraph, "Final Output", numSubTasks);
TaskConfig outputConfig = new TaskConfig(output.getConfiguration());
{
outputConfig.addInputToGroup(0);
outputConfig.setInputSerializer(serializer, 0);
outputConfig.setStubWrapper(new UserCodeClassWrapper<CsvOutputFormat>(CsvOutputFormat.class));
outputConfig.setStubParameter(FileOutputFormat.FILE_PARAMETER_KEY, resultPath);
Configuration outputUserConfig = outputConfig.getStubParameters();
outputUserConfig.setString(CsvOutputFormat.RECORD_DELIMITER_PARAMETER, "\n");
outputUserConfig.setString(CsvOutputFormat.FIELD_DELIMITER_PARAMETER, " ");
outputUserConfig.setClass(CsvOutputFormat.FIELD_TYPE_PARAMETER_PREFIX + 0, LongValue.class);
outputUserConfig.setInteger(CsvOutputFormat.RECORD_POSITION_PARAMETER_PREFIX + 0, 0);
outputUserConfig.setClass(CsvOutputFormat.FIELD_TYPE_PARAMETER_PREFIX + 1, LongValue.class);
outputUserConfig.setInteger(CsvOutputFormat.RECORD_POSITION_PARAMETER_PREFIX + 1, 1);
outputUserConfig.setInteger(CsvOutputFormat.NUM_FIELDS_PARAMETER, 2);
}
return output;
}
示例13: createMapper
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
private static AbstractJobVertex createMapper(JobGraph jobGraph, int numSubTasks, TypeSerializerFactory<?> serializer) {
AbstractJobVertex pointsInput = JobGraphUtils.createTask(RegularPactTask.class, "Map[DotProducts]", jobGraph, numSubTasks);
{
TaskConfig taskConfig = new TaskConfig(pointsInput.getConfiguration());
taskConfig.setStubWrapper(new UserCodeClassWrapper<DotProducts>(DotProducts.class));
taskConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
taskConfig.setOutputSerializer(serializer);
taskConfig.setDriver(CollectorMapDriver.class);
taskConfig.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
taskConfig.addInputToGroup(0);
taskConfig.setInputLocalStrategy(0, LocalStrategy.NONE);
taskConfig.setInputSerializer(serializer, 0);
taskConfig.setBroadcastInputName("models", 0);
taskConfig.addBroadcastInputToGroup(0);
taskConfig.setBroadcastInputSerializer(serializer, 0);
}
return pointsInput;
}
示例14: createOutput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
private static OutputFormatVertex createOutput(JobGraph jobGraph, String resultPath, int numSubTasks, TypeSerializerFactory<?> serializer) {
OutputFormatVertex output = JobGraphUtils.createFileOutput(jobGraph, "Output", numSubTasks);
{
TaskConfig taskConfig = new TaskConfig(output.getConfiguration());
taskConfig.addInputToGroup(0);
taskConfig.setInputSerializer(serializer, 0);
@SuppressWarnings("unchecked")
CsvOutputFormat outFormat = new CsvOutputFormat("\n", " ", LongValue.class, LongValue.class, LongValue.class);
outFormat.setOutputFilePath(new Path(resultPath));
taskConfig.setStubWrapper(new UserCodeObjectWrapper<CsvOutputFormat>(outFormat));
}
return output;
}
示例15: createPointsInput
import org.apache.flink.runtime.operators.util.TaskConfig; //導入依賴的package包/類
private static InputFormatVertex createPointsInput(JobGraph jobGraph, String pointsPath, int numSubTasks, TypeSerializerFactory<?> serializer) {
@SuppressWarnings("unchecked")
CsvInputFormat pointsInFormat = new CsvInputFormat('|', IntValue.class, DoubleValue.class, DoubleValue.class, DoubleValue.class);
InputFormatVertex pointsInput = JobGraphUtils.createInput(pointsInFormat, pointsPath, "[Points]", jobGraph, numSubTasks);
{
TaskConfig taskConfig = new TaskConfig(pointsInput.getConfiguration());
taskConfig.addOutputShipStrategy(ShipStrategyType.FORWARD);
taskConfig.setOutputSerializer(serializer);
TaskConfig chainedMapper = new TaskConfig(new Configuration());
chainedMapper.setDriverStrategy(DriverStrategy.COLLECTOR_MAP);
chainedMapper.setStubWrapper(new UserCodeObjectWrapper<PointBuilder>(new PointBuilder()));
chainedMapper.addOutputShipStrategy(ShipStrategyType.FORWARD);
chainedMapper.setOutputSerializer(serializer);
taskConfig.addChainedTask(ChainedCollectorMapDriver.class, chainedMapper, "Build points");
}
return pointsInput;
}