当前位置: 首页>>代码示例>>Java>>正文


Java ExecutionConfig类代码示例

本文整理汇总了Java中org.apache.flink.api.common.ExecutionConfig的典型用法代码示例。如果您正苦于以下问题:Java ExecutionConfig类的具体用法?Java ExecutionConfig怎么用?Java ExecutionConfig使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ExecutionConfig类属于org.apache.flink.api.common包,在下文中一共展示了ExecutionConfig类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: testDeSerialization

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Test
public void testDeSerialization() {
	try {
		TypeInformation<MyPOJO> info = TypeExtractor.getForClass(MyPOJO.class);

		TypeInformationSerializationSchema<MyPOJO> schema =
				new TypeInformationSerializationSchema<MyPOJO>(info, new ExecutionConfig());

		MyPOJO[] types = {
				new MyPOJO(72, new Date(763784523L), new Date(88234L)),
				new MyPOJO(-1, new Date(11111111111111L)),
				new MyPOJO(42),
				new MyPOJO(17, new Date(222763784523L))
		};

		for (MyPOJO val : types) {
			byte[] serialized = schema.serialize(val);
			MyPOJO deser = schema.deserialize(serialized);
			assertEquals(val, deser);
		}
	}
	catch (Exception e) {
		e.printStackTrace();
		fail(e.getMessage());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:27,代码来源:TypeInformationSerializationSchemaTest.java

示例2: setOutputType

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Override
public void setOutputType(TypeInformation<OUT> outTypeInfo, ExecutionConfig executionConfig) {
	outTypeSerializer = outTypeInfo.createSerializer(executionConfig);

	ByteArrayOutputStream baos = new ByteArrayOutputStream();
	DataOutputViewStreamWrapper out = new DataOutputViewStreamWrapper(baos);

	try {
		outTypeSerializer.serialize(initialValue, out);
	} catch (IOException ioe) {
		throw new RuntimeException("Unable to serialize initial value of type " +
				initialValue.getClass().getSimpleName() + " of fold operator.", ioe);
	}

	serializedInitialValue = baos.toByteArray();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:StreamGroupedFold.java

示例3: setUp

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Before
public void setUp() throws Exception {
	if (!aggrStateDesc.isSerializerInitialized()) {
		aggrStateDesc.initializeSerializerUnlessSet(new ExecutionConfig());
	}

	final String initValue = "42";

	ByteArrayOutputStream out = new ByteArrayOutputStream();
	aggrStateDesc.getSerializer().serialize(initValue, new DataOutputViewStreamWrapper(out));

	aggrState = ImmutableAggregatingState.createState(
			aggrStateDesc,
			out.toByteArray()
	);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:ImmutableAggregatingStateTest.java

示例4: createComparator

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
private static <T> TypeComparatorFactory<?> createComparator(TypeInformation<T> typeInfo, FieldList keys, boolean[] sortOrder, ExecutionConfig executionConfig) {

		TypeComparator<T> comparator;
		if (typeInfo instanceof CompositeType) {
			comparator = ((CompositeType<T>) typeInfo).createComparator(keys.toArray(), sortOrder, 0, executionConfig);
		}
		else if (typeInfo instanceof AtomicType) {
			// handle grouping of atomic types
			comparator = ((AtomicType<T>) typeInfo).createComparator(sortOrder[0], executionConfig);
		}
		else {
			throw new RuntimeException("Unrecognized type: " + typeInfo);
		}

		return new RuntimeComparatorFactory<>(comparator);
	}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:Utils.java

示例5: AbstractHTMInferenceOperator

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
public AbstractHTMInferenceOperator(
        final ExecutionConfig executionConfig,
        final TypeInformation<IN> inputType,
        final boolean isProcessingTime,
        final NetworkFactory<IN> networkFactory,
        final ResetFunction<IN> resetFunction
) {
    super(resetFunction != null ? resetFunction : NEVER_RESET_FUNCTION);

    this.executionConfig = executionConfig;
    this.inputType = inputType;
    this.isProcessingTime = isProcessingTime;
    this.networkFactory = networkFactory;

    this.inputSerializer = inputType.createSerializer(executionConfig);
}
 
开发者ID:htm-community,项目名称:flink-htm,代码行数:17,代码来源:AbstractHTMInferenceOperator.java

示例6: initializeState

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Override
public void initializeState(FunctionInitializationContext context) throws Exception {
  if (checkpointCoder == null) {
    // no checkpoint coder available in this source
    return;
  }

  OperatorStateStore stateStore = context.getOperatorStateStore();
  CoderTypeInformation<
      KV<? extends UnboundedSource<OutputT, CheckpointMarkT>, CheckpointMarkT>>
      typeInformation = (CoderTypeInformation) new CoderTypeInformation<>(checkpointCoder);
  stateForCheckpoint = stateStore.getOperatorState(
      new ListStateDescriptor<>(DefaultOperatorStateBackend.DEFAULT_OPERATOR_STATE_NAME,
          typeInformation.createSerializer(new ExecutionConfig())));

  if (context.isRestored()) {
    isRestored = true;
    LOG.info("Having restore state in the UnbounedSourceWrapper.");
  } else {
    LOG.info("No restore state for UnbounedSourceWrapper.");
  }
}
 
开发者ID:apache,项目名称:beam,代码行数:23,代码来源:UnboundedSourceWrapper.java

示例7: testEitherWithTupleValues

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Test
public void testEitherWithTupleValues() {
	@SuppressWarnings("unchecked")
	Either<Tuple2<LongValue, LongValue>, DoubleValue>[] testData = new Either[] {
		Left(new Tuple2<>(new LongValue(2L), new LongValue(9L))),
		new Left<>(new Tuple2<>(new LongValue(Long.MIN_VALUE), new LongValue(Long.MAX_VALUE))),
		new Right<>(new DoubleValue(32.0)),
		Right(new DoubleValue(Double.MIN_VALUE)),
		Right(new DoubleValue(Double.MAX_VALUE))};

	EitherTypeInfo<Tuple2<LongValue, LongValue>, DoubleValue> eitherTypeInfo = new EitherTypeInfo<>(
		new TupleTypeInfo<Tuple2<LongValue, LongValue>>(ValueTypeInfo.LONG_VALUE_TYPE_INFO, ValueTypeInfo.LONG_VALUE_TYPE_INFO),
		ValueTypeInfo.DOUBLE_VALUE_TYPE_INFO);
	EitherSerializer<Tuple2<LongValue, LongValue>, DoubleValue> eitherSerializer =
		(EitherSerializer<Tuple2<LongValue, LongValue>, DoubleValue>) eitherTypeInfo.createSerializer(new ExecutionConfig());
	SerializerTestInstance<Either<Tuple2<LongValue, LongValue>, DoubleValue>> testInstance =
		new EitherSerializerTestInstance<>(eitherSerializer, eitherTypeInfo.getTypeClass(), -1, testData);
	testInstance.testAll();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:20,代码来源:EitherSerializerTest.java

示例8: getKeyedStateBackend

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
private static KeyedStateBackend<ByteBuffer> getKeyedStateBackend(int numberOfKeyGroups,
                                                 KeyGroupRange keyGroupRange) {
  MemoryStateBackend backend = new MemoryStateBackend();
  try {
    AbstractKeyedStateBackend<ByteBuffer> keyedStateBackend = backend.createKeyedStateBackend(
        new DummyEnvironment("test", 1, 0),
        new JobID(),
        "test_op",
        new GenericTypeInfo<>(ByteBuffer.class).createSerializer(new ExecutionConfig()),
        numberOfKeyGroups,
        keyGroupRange,
        new KvStateRegistry().createTaskRegistry(new JobID(), new JobVertexID()));
    keyedStateBackend.setCurrentKey(ByteBuffer.wrap(
        CoderUtils.encodeToByteArray(StringUtf8Coder.of(), "1")));
    return keyedStateBackend;
  } catch (Exception e) {
    throw new RuntimeException(e);
  }
}
 
开发者ID:apache,项目名称:beam,代码行数:20,代码来源:FlinkKeyGroupStateInternalsTest.java

示例9: testCassandraScalaTupleAtLeastOnceSinkBuilderDetection

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Test
public void testCassandraScalaTupleAtLeastOnceSinkBuilderDetection() throws Exception {
	Class<scala.Tuple1<String>> c = (Class<scala.Tuple1<String>>) new scala.Tuple1<>("hello").getClass();
	Seq<TypeInformation<?>> typeInfos = JavaConverters.asScalaBufferConverter(
		Collections.<TypeInformation<?>>singletonList(BasicTypeInfo.STRING_TYPE_INFO)).asScala();
	Seq<String> fieldNames = JavaConverters.asScalaBufferConverter(
		Collections.singletonList("_1")).asScala();

	CaseClassTypeInfo<scala.Tuple1<String>> typeInfo = new CaseClassTypeInfo<scala.Tuple1<String>>(c, null, typeInfos, fieldNames) {
		@Override
		public TypeSerializer<scala.Tuple1<String>> createSerializer(ExecutionConfig config) {
			return null;
		}
	};

	StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
	DataStream<scala.Tuple1<String>> input = env.fromElements(new scala.Tuple1<>("hello")).returns(typeInfo);

	CassandraSink.CassandraSinkBuilder<scala.Tuple1<String>> sinkBuilder = CassandraSink.addSink(input);
	assertTrue(sinkBuilder instanceof CassandraSink.CassandraScalaProductSinkBuilder);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:22,代码来源:CassandraConnectorITCase.java

示例10: executeOnCollections

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Override
protected List<OUT> executeOnCollections(List<IN> input, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception {
	FlatMapFunction<IN, OUT> function = userFunction.getUserCodeObject();
	
	FunctionUtils.setFunctionRuntimeContext(function, ctx);
	FunctionUtils.openFunction(function, parameters);

	ArrayList<OUT> result = new ArrayList<OUT>(input.size());

	TypeSerializer<IN> inSerializer = getOperatorInfo().getInputType().createSerializer(executionConfig);
	TypeSerializer<OUT> outSerializer = getOperatorInfo().getOutputType().createSerializer(executionConfig);

	CopyingListCollector<OUT> resultCollector = new CopyingListCollector<OUT>(result, outSerializer);

	for (IN element : input) {
		IN inCopy = inSerializer.copy(element);
		function.flatMap(inCopy, resultCollector);
	}

	FunctionUtils.closeFunction(function);

	return result;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:24,代码来源:FlatMapOperatorBase.java

示例11: getExecutionVertex

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
public static ExecutionJobVertex getExecutionVertex(
		JobVertexID id, ScheduledExecutorService executor) 
	throws Exception {

	JobVertex ajv = new JobVertex("TestVertex", id);
	ajv.setInvokableClass(mock(AbstractInvokable.class).getClass());

	ExecutionGraph graph = new ExecutionGraph(
		executor,
		executor,
		new JobID(), 
		"test job", 
		new Configuration(),
		new SerializedValue<>(new ExecutionConfig()),
		AkkaUtils.getDefaultTimeout(),
		new NoRestartStrategy(),
		new Scheduler(ExecutionContext$.MODULE$.fromExecutor(executor)));

	return spy(new ExecutionJobVertex(graph, ajv, 1, AkkaUtils.getDefaultTimeout()));
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:21,代码来源:ExecutionGraphTestUtils.java

示例12: testSlidingEventTimeWindowsApply

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Test
@SuppressWarnings("unchecked")
public void testSlidingEventTimeWindowsApply() throws Exception {
	closeCalled.set(0);

	final int windowSize = 3;
	final int windowSlide = 1;

	TypeInformation<Tuple2<String, Integer>> inputType = TypeInfoParser.parse("Tuple2<String, Integer>");

	ListStateDescriptor<Tuple2<String, Integer>> stateDesc = new ListStateDescriptor<>("window-contents",
			inputType.createSerializer(new ExecutionConfig()));

	WindowOperator<String, Tuple2<String, Integer>, Iterable<Tuple2<String, Integer>>, Tuple2<String, Integer>, TimeWindow> operator = new WindowOperator<>(
			SlidingEventTimeWindows.of(Time.of(windowSize, TimeUnit.SECONDS), Time.of(windowSlide, TimeUnit.SECONDS)),
			new TimeWindow.Serializer(),
			new TupleKeySelector(),
			BasicTypeInfo.STRING_TYPE_INFO.createSerializer(new ExecutionConfig()),
			stateDesc,
			new InternalIterableWindowFunction<>(new RichSumReducer<TimeWindow>()),
			EventTimeTrigger.create(),
			0,
			null /* late data output tag */);

	testSlidingEventTimeWindows(operator);

	// we close once in the rest...
	Assert.assertEquals("Close was not called.", 2, closeCalled.get());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:30,代码来源:WindowOperatorTest.java

示例13: testValueStateDescriptorEagerSerializer

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Test
public void testValueStateDescriptorEagerSerializer() throws Exception {

	TypeSerializer<String> serializer = new KryoSerializer<>(String.class, new ExecutionConfig());
	
	ListStateDescriptor<String> descr = 
			new ListStateDescriptor<String>("testName", serializer);
	
	assertEquals("testName", descr.getName());
	assertNotNull(descr.getSerializer());
	assertTrue(descr.getSerializer() instanceof ListSerializer);
	assertNotNull(descr.getElementSerializer());
	assertEquals(serializer, descr.getElementSerializer());

	ListStateDescriptor<String> copy = CommonTestUtils.createCopySerializable(descr);

	assertEquals("testName", copy.getName());
	assertNotNull(copy.getSerializer());
	assertTrue(copy.getSerializer() instanceof ListSerializer);

	assertNotNull(copy.getElementSerializer());
	assertEquals(serializer, copy.getElementSerializer());
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:24,代码来源:ListStateDescriptorTest.java

示例14: executeOnCollections

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Override
protected List<OUT> executeOnCollections(List<IN> inputData, RuntimeContext ctx, ExecutionConfig executionConfig) throws Exception {
	MapPartitionFunction<IN, OUT> function = this.userFunction.getUserCodeObject();
	
	FunctionUtils.setFunctionRuntimeContext(function, ctx);
	FunctionUtils.openFunction(function, this.parameters);
	
	ArrayList<OUT> result = new ArrayList<OUT>(inputData.size() / 4);

	TypeSerializer<IN> inSerializer = getOperatorInfo().getInputType().createSerializer(executionConfig);
	TypeSerializer<OUT> outSerializer = getOperatorInfo().getOutputType().createSerializer(executionConfig);

	CopyingIterator<IN> source = new CopyingIterator<IN>(inputData.iterator(), inSerializer);
	CopyingListCollector<OUT> resultCollector = new CopyingListCollector<OUT>(result, outSerializer);

	function.mapPartition(source, resultCollector);

	result.trimToSize();
	FunctionUtils.closeFunction(function);
	return result;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:22,代码来源:MapPartitionOperatorBase.java

示例15: getConfigurations

import org.apache.flink.api.common.ExecutionConfig; //导入依赖的package包/类
@Parameterized.Parameters
public static Collection<Object[]> getConfigurations() throws IOException {
	LinkedList<Object[]> configs = new LinkedList<>();
	
	ExecutionConfig withReuse = new ExecutionConfig();
	withReuse.enableObjectReuse();
	
	ExecutionConfig withoutReuse = new ExecutionConfig();
	withoutReuse.disableObjectReuse();
	
	Object[] a = {withoutReuse};
	configs.add(a);
	Object[] b = {withReuse};
	configs.add(b);
	
	return configs;
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:18,代码来源:BinaryOperatorTestBase.java


注:本文中的org.apache.flink.api.common.ExecutionConfig类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。