本文整理汇总了Java中org.apache.spark.sql.AnalysisException类的典型用法代码示例。如果您正苦于以下问题:Java AnalysisException类的具体用法?Java AnalysisException怎么用?Java AnalysisException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AnalysisException类属于org.apache.spark.sql包,在下文中一共展示了AnalysisException类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: checkApplyBulkMutations_Exception_TableExist
import org.apache.spark.sql.AnalysisException; //导入依赖的package包/类
@Test(expected = AnalysisException.class)
public void checkApplyBulkMutations_Exception_TableExist() throws Exception {
JdbcOutput jdbcOutput = new JdbcOutput();
jdbcOutput.configure(ConfigUtils.configFromPath(JdbcInput.class.getResource(JDBC_PROPERTIES_TABLE_USER_PATH).getPath()));
ArrayList<Tuple2<MutationType, Dataset<Row>>> planned = new ArrayList<>();
Dataset<Row> o = Contexts.getSparkSession().read().json(JdbcInput.class.getResource(SAMPLE_DATA_PATH).getPath());
Tuple2<MutationType, Dataset<Row>> input = new Tuple2<>(MutationType.INSERT, o);
planned.add(input);
jdbcOutput.applyBulkMutations(planned);
}
示例2: testNoUDFs
import org.apache.spark.sql.AnalysisException; //导入依赖的package包/类
@Test
(expected = AnalysisException.class)
public void testNoUDFs() throws Throwable {
Config config = ConfigUtils.configFromResource("/udf/udf_none.conf");
Contexts.closeSparkSession(true);
try {
Runner.run(config);
}
// Data steps run off the main thread so we have to dig into the concurrency-related exception first
catch (ExecutionException e) {
throw e.getCause();
}
}
示例3: testInputRepartitionInvalidColumn
import org.apache.spark.sql.AnalysisException; //导入依赖的package包/类
@Test (expected = AnalysisException.class)
public void testInputRepartitionInvalidColumn() throws Exception {
Map<String, Object> configMap = Maps.newHashMap();
configMap.put("input.type", DummyInput.class.getName());
configMap.put("input.starting.partitions", 10);
configMap.put("input." + BatchStep.REPARTITION_COLUMNS_PROPERTY, Lists.newArrayList("modulo == 0"));
configMap.put("input." + BatchStep.REPARTITION_NUM_PARTITIONS_PROPERTY, 5);
Config config = ConfigFactory.parseMap(configMap);
BatchStep batchStep = new BatchStep("test", config);
batchStep.submit(Sets.<Step>newHashSet());
batchStep.getData();
}