当前位置: 首页>>代码示例>>Java>>正文


Java TupleTypeInfo.getBasicTupleTypeInfo方法代码示例

本文整理汇总了Java中org.apache.flink.api.java.typeutils.TupleTypeInfo.getBasicTupleTypeInfo方法的典型用法代码示例。如果您正苦于以下问题:Java TupleTypeInfo.getBasicTupleTypeInfo方法的具体用法?Java TupleTypeInfo.getBasicTupleTypeInfo怎么用?Java TupleTypeInfo.getBasicTupleTypeInfo使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.flink.api.java.typeutils.TupleTypeInfo的用法示例。


在下文中一共展示了TupleTypeInfo.getBasicTupleTypeInfo方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: checkJoinWithReplicatedSourceInputBehindReduce

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests compiler fail for join program with replicated data source behind reduce.
 */
@Test(expected = CompilerException.class)
public void checkJoinWithReplicatedSourceInputBehindReduce() {
	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.reduce(new LastReduce())
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:26,代码来源:ReplicatingDataSourceTest.java

示例2: checkJoinWithReplicatedSourceInputBehindRebalance

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests compiler fail for join program with replicated data source behind rebalance.
 */
@Test(expected = CompilerException.class)
public void checkJoinWithReplicatedSourceInputBehindRebalance() {
	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.rebalance()
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:26,代码来源:ReplicatingDataSourceTest.java

示例3: beforeTest

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Before
public void beforeTest() {
	ExecutionConfig config = new ExecutionConfig();
	config.disableObjectReuse();
	
	TupleTypeInfo<Tuple2<String, String>> typeInfo1 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class);
	TupleTypeInfo<Tuple2<String, Integer>> typeInfo2 = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class);
	serializer1 = typeInfo1.createSerializer(config);
	serializer2 = typeInfo2.createSerializer(config);
	comparator1 = typeInfo1.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	comparator2 = typeInfo2.createComparator(new int[]{0}, new boolean[]{true}, 0, config);
	pairComp = new GenericPairComparator<>(comparator1, comparator2);

	this.memoryManager = new MemoryManager(MEMORY_SIZE, 1);
	this.ioManager = new IOManagerAsync();
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:17,代码来源:AbstractSortMergeOuterJoinIteratorITCase.java

示例4: checkJoinWithReplicatedSourceInputBehindFilter

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests join program with replicated data source behind filter.
 */
@Test
public void checkJoinWithReplicatedSourceInputBehindFilter() {

	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.filter(new NoFilter())
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);

	// check the optimized Plan
	// when join should have forward strategy on both sides
	SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next();
	DualInputPlanNode joinNode = (DualInputPlanNode) sinkNode.getPredecessor();

	ShipStrategyType joinIn1 = joinNode.getInput1().getShipStrategy();
	ShipStrategyType joinIn2 = joinNode.getInput2().getShipStrategy();

	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, joinIn1);
	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, joinIn2);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:38,代码来源:ReplicatingDataSourceTest.java

示例5: checkCrossWithReplicatedSourceInputBehindMap

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests cross program with replicated data source behind map and filter.
 */
@Test
public void checkCrossWithReplicatedSourceInputBehindMap() {

	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.map(new IdMap())
			.filter(new NoFilter())
			.cross(source2)
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);

	// check the optimized Plan
	// when cross should have forward strategy on both sides
	SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next();
	DualInputPlanNode crossNode = (DualInputPlanNode) sinkNode.getPredecessor();

	ShipStrategyType crossIn1 = crossNode.getInput1().getShipStrategy();
	ShipStrategyType crossIn2 = crossNode.getInput2().getShipStrategy();

	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn1);
	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn2);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:39,代码来源:ReplicatingDataSourceTest.java

示例6: testReadFirstN

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Test
public void testReadFirstN() throws IOException {
	try {
		final String fileContent = "111|222|333|444|555|\n666|777|888|999|000|\n";
		final FileInputSplit split = createTempFile(fileContent);

		final TupleTypeInfo<Tuple2<Integer, Integer>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(Integer.class, Integer.class);
		final CsvInputFormat<Tuple2<Integer, Integer>> format = new TupleCsvInputFormat<Tuple2<Integer, Integer>>(PATH, typeInfo);

		format.setFieldDelimiter("|");

		format.configure(new Configuration());
		format.open(split);

		Tuple2<Integer, Integer> result = new Tuple2<Integer, Integer>();

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals(Integer.valueOf(111), result.f0);
		assertEquals(Integer.valueOf(222), result.f1);

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals(Integer.valueOf(666), result.f0);
		assertEquals(Integer.valueOf(777), result.f1);

		result = format.nextRecord(result);
		assertNull(result);
		assertTrue(format.reachedEnd());
	}
	catch (Exception ex) {
		fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
	}

}
 
开发者ID:axbaretto,项目名称:flink,代码行数:36,代码来源:CsvInputFormatTest.java

示例7: checkCrossWithReplicatedSourceInput

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests cross program with replicated data source.
 */
@Test
public void checkCrossWithReplicatedSourceInput() {

	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.cross(source2)
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);

	// check the optimized Plan
	// when cross should have forward strategy on both sides
	SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next();
	DualInputPlanNode crossNode = (DualInputPlanNode) sinkNode.getPredecessor();

	ShipStrategyType crossIn1 = crossNode.getInput1().getShipStrategy();
	ShipStrategyType crossIn2 = crossNode.getInput2().getShipStrategy();

	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn1);
	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, crossIn2);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:37,代码来源:ReplicatingDataSourceTest.java

示例8: checkJoinWithReplicatedSourceInputBehindMultiMaps

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests join program with replicated data source behind multiple map ops.
 */
@Test
public void checkJoinWithReplicatedSourceInputBehindMultiMaps() {

	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.filter(new NoFilter())
			.mapPartition(new IdPMap())
			.flatMap(new IdFlatMap())
			.map(new IdMap())
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);

	// check the optimized Plan
	// when join should have forward strategy on both sides
	SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next();
	DualInputPlanNode joinNode = (DualInputPlanNode) sinkNode.getPredecessor();

	ShipStrategyType joinIn1 = joinNode.getInput1().getShipStrategy();
	ShipStrategyType joinIn2 = joinNode.getInput2().getShipStrategy();

	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, joinIn1);
	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, joinIn2);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:41,代码来源:ReplicatingDataSourceTest.java

示例9: testQuotedStringParsingWithIncludeFields

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Test
public void testQuotedStringParsingWithIncludeFields() throws Exception {
	final String fileContent = "\"20:41:52-1-3-2015\"|\"Re: Taskmanager memory error in Eclipse\"|" +
			"\"Blahblah <[email protected]>\"|\"blaaa|\"blubb\"";

	final File tempFile = File.createTempFile("CsvReaderQuotedString", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tempFile));
	writer.write(fileContent);
	writer.close();

	TupleTypeInfo<Tuple2<String, String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class);
	CsvInputFormat<Tuple2<String, String>> inputFormat = new TupleCsvInputFormat<Tuple2<String, String>>(new Path(tempFile.toURI().toString()), typeInfo, new boolean[]{true, false, true});

	inputFormat.enableQuotedStringParsing('"');
	inputFormat.setFieldDelimiter("|");
	inputFormat.setDelimiter('\n');

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	Tuple2<String, String> record = inputFormat.nextRecord(new Tuple2<String, String>());

	assertEquals("20:41:52-1-3-2015", record.f0);
	assertEquals("Blahblah <[email protected]>", record.f1);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:31,代码来源:CsvInputFormatTest.java

示例10: testQuotedStringParsingWithEscapedQuotes

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Test
public void testQuotedStringParsingWithEscapedQuotes() throws Exception {
	final String fileContent = "\"\\\"Hello\\\" World\"|\"We are\\\" young\"";

	final File tempFile = File.createTempFile("CsvReaderQuotedString", "tmp");
	tempFile.deleteOnExit();
	tempFile.setWritable(true);

	OutputStreamWriter writer = new OutputStreamWriter(new FileOutputStream(tempFile));
	writer.write(fileContent);
	writer.close();

	TupleTypeInfo<Tuple2<String, String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class);
	CsvInputFormat<Tuple2<String, String>> inputFormat = new TupleCsvInputFormat<>(new Path(tempFile.toURI().toString()), typeInfo);

	inputFormat.enableQuotedStringParsing('"');
	inputFormat.setFieldDelimiter("|");
	inputFormat.setDelimiter('\n');

	inputFormat.configure(new Configuration());
	FileInputSplit[] splits = inputFormat.createInputSplits(1);

	inputFormat.open(splits[0]);

	Tuple2<String, String> record = inputFormat.nextRecord(new Tuple2<String, String>());

	assertEquals("\\\"Hello\\\" World", record.f0);
	assertEquals("We are\\\" young", record.f1);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:30,代码来源:CsvInputFormatTest.java

示例11: createTypeInfo

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Override
protected TupleTypeInfo<Tuple3<String, Integer, Integer>> createTypeInfo() {
	return TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Integer.class);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:5,代码来源:CassandraConnectorITCase.java

示例12: ignoreSingleCharPrefixComments

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Test
public void ignoreSingleCharPrefixComments() {
	try {
		final String fileContent = "#description of the data\n" +
			"#successive commented line\n" +
			"this is|1|2.0|\n" +
			"a test|3|4.0|\n" +
			"#next|5|6.0|\n";

		final FileInputSplit split = createTempFile(fileContent);

		final TupleTypeInfo<Tuple3<String, Integer, Double>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Double.class);
		final CsvInputFormat<Tuple3<String, Integer, Double>> format = new TupleCsvInputFormat<Tuple3<String, Integer, Double>>(PATH, "\n", "|", typeInfo);
		format.setCommentPrefix("#");

		final Configuration parameters = new Configuration();
		format.configure(parameters);
		format.open(split);

		Tuple3<String, Integer, Double> result = new Tuple3<String, Integer, Double>();

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("this is", result.f0);
		assertEquals(Integer.valueOf(1), result.f1);
		assertEquals(new Double(2.0), result.f2);

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("a test", result.f0);
		assertEquals(Integer.valueOf(3), result.f1);
		assertEquals(new Double(4.0), result.f2);

		result = format.nextRecord(result);
		assertNull(result);
	}
	catch (Exception ex) {
		ex.printStackTrace();
		fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:42,代码来源:CsvInputFormatTest.java

示例13: ignoreMultiCharPrefixComments

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Test
public void ignoreMultiCharPrefixComments() {
	try {

		final String fileContent = "//description of the data\n" +
			"//successive commented line\n" +
			"this is|1|2.0|\n" +
			"a test|3|4.0|\n" +
			"//next|5|6.0|\n";

		final FileInputSplit split = createTempFile(fileContent);

		final TupleTypeInfo<Tuple3<String, Integer, Double>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, Integer.class, Double.class);
		final CsvInputFormat<Tuple3<String, Integer, Double>> format = new TupleCsvInputFormat<Tuple3<String, Integer, Double>>(PATH, "\n", "|", typeInfo);
		format.setCommentPrefix("//");

		final Configuration parameters = new Configuration();
		format.configure(parameters);
		format.open(split);

		Tuple3<String, Integer, Double> result = new Tuple3<String, Integer, Double>();

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("this is", result.f0);
		assertEquals(Integer.valueOf(1), result.f1);
		assertEquals(new Double(2.0), result.f2);

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("a test", result.f0);
		assertEquals(Integer.valueOf(3), result.f1);
		assertEquals(new Double(4.0), result.f2);

		result = format.nextRecord(result);
		assertNull(result);
	}
	catch (Exception ex) {
		ex.printStackTrace();
		fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:43,代码来源:CsvInputFormatTest.java

示例14: checkJoinWithReplicatedSourceInput

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
/**
 * Tests join program with replicated data source.
 */
@Test
public void checkJoinWithReplicatedSourceInput() {

	ExecutionEnvironment env = ExecutionEnvironment.createLocalEnvironment();
	env.setParallelism(DEFAULT_PARALLELISM);

	TupleTypeInfo<Tuple1<String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class);
	ReplicatingInputFormat<Tuple1<String>, FileInputSplit> rif =
			new ReplicatingInputFormat<Tuple1<String>, FileInputSplit>(new TupleCsvInputFormat<Tuple1<String>>(new Path("/some/path"), typeInfo));

	DataSet<Tuple1<String>> source1 = env.createInput(rif, new TupleTypeInfo<Tuple1<String>>(BasicTypeInfo.STRING_TYPE_INFO));
	DataSet<Tuple1<String>> source2 = env.readCsvFile("/some/otherpath").types(String.class);

	DataSink<Tuple2<Tuple1<String>, Tuple1<String>>> out = source1
			.join(source2).where("*").equalTo("*")
			.writeAsText("/some/newpath");

	Plan plan = env.createProgramPlan();

	// submit the plan to the compiler
	OptimizedPlan oPlan = compileNoStats(plan);

	// check the optimized Plan
	// when join should have forward strategy on both sides
	SinkPlanNode sinkNode = oPlan.getDataSinks().iterator().next();
	DualInputPlanNode joinNode = (DualInputPlanNode) sinkNode.getPredecessor();

	ShipStrategyType joinIn1 = joinNode.getInput1().getShipStrategy();
	ShipStrategyType joinIn2 = joinNode.getInput2().getShipStrategy();

	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, joinIn1);
	Assert.assertEquals("Invalid ship strategy for an operator.", ShipStrategyType.FORWARD, joinIn2);
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:37,代码来源:ReplicatingDataSourceTest.java

示例15: readStringFieldsWithTrailingDelimiters

import org.apache.flink.api.java.typeutils.TupleTypeInfo; //导入方法依赖的package包/类
@Test
public void readStringFieldsWithTrailingDelimiters() {
	try {
		final String fileContent = "abc|-def|-ghijk\nabc|-|-hhg\n|-|-|-\n";
		final FileInputSplit split = createTempFile(fileContent);

		final TupleTypeInfo<Tuple3<String, String, String>> typeInfo = TupleTypeInfo.getBasicTupleTypeInfo(String.class, String.class, String.class);
		final CsvInputFormat<Tuple3<String, String, String>> format = new TupleCsvInputFormat<Tuple3<String, String, String>>(PATH, typeInfo);

		format.setFieldDelimiter("|-");

		format.configure(new Configuration());
		format.open(split);

		Tuple3<String, String, String> result = new Tuple3<String, String, String>();

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("abc", result.f0);
		assertEquals("def", result.f1);
		assertEquals("ghijk", result.f2);

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("abc", result.f0);
		assertEquals("", result.f1);
		assertEquals("hhg", result.f2);

		result = format.nextRecord(result);
		assertNotNull(result);
		assertEquals("", result.f0);
		assertEquals("", result.f1);
		assertEquals("", result.f2);

		result = format.nextRecord(result);
		assertNull(result);
		assertTrue(format.reachedEnd());
	}
	catch (Exception ex) {
		fail("Test failed due to a " + ex.getClass().getName() + ": " + ex.getMessage());
	}
}
 
开发者ID:axbaretto,项目名称:flink,代码行数:43,代码来源:CsvInputFormatTest.java


注:本文中的org.apache.flink.api.java.typeutils.TupleTypeInfo.getBasicTupleTypeInfo方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。