本文整理汇总了Java中org.apache.hadoop.hive.ql.session.SessionState.setCurrentSessionState方法的典型用法代码示例。如果您正苦于以下问题:Java SessionState.setCurrentSessionState方法的具体用法?Java SessionState.setCurrentSessionState怎么用?Java SessionState.setCurrentSessionState使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.session.SessionState
的用法示例。
在下文中一共展示了SessionState.setCurrentSessionState方法的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: testEstimateOlapQuery
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Test(expectedExceptions = {UnsupportedOperationException.class})
public void testEstimateOlapQuery() throws Exception {
SessionState.setCurrentSessionState(ss);
ExplainQueryContext ctx = createExplainContext("cube SELECT ID FROM test_cube", queryConf);
ctx.setOlapQuery(true);
ctx.getDriverContext().setDriverRewriterPlan(driver, new DriverQueryPlan() {
@Override
public String getPlan() {
return null;
}
@Override
public QueryCost getCost() {
return null;
}
@Override
public Map<String, Set<?>> getPartitions() {
return Maps.newHashMap();
}
});
QueryCost cost = driver.estimate(ctx);
assertEquals(cost.getEstimatedResourceUsage(), 0.0);
cost.getEstimatedExecTimeMillis();
}
示例2: testExplainPartitionedTable
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Test explain partitioned table
*
* @throws Exception the exception
*/
@Test
public void testExplainPartitionedTable() throws Exception {
int handleSize = getHandleSize();
createPartitionedTable("test_part_table");
// acquire
SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_part_table", queryConf));
assertHandleSize(handleSize);
assertTrue(plan instanceof HiveQueryPlan);
assertNotNull(plan.getTablesQueried());
assertEquals(plan.getTablesQueried().size(), 1);
System.out.println("Tables:" + plan.getTablesQueried());
assertEquals(plan.getTableWeight(dataBase + ".test_part_table"), 500.0);
System.out.println("Parts:" + plan.getPartitions());
assertFalse(plan.getPartitions().isEmpty());
assertEquals(plan.getPartitions().size(), 1);
assertTrue(((String) plan.getPartitions().get(dataBase + ".test_part_table").iterator().next()).contains("today"));
assertTrue(((String) plan.getPartitions().get(dataBase + ".test_part_table").iterator().next()).contains("dt"));
}
示例3: testExplainOutput
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Test explain output.
*
* @throws Exception the exception
*/
@Test
public void testExplainOutput() throws Exception {
int handleSize = getHandleSize();
createTestTable("explain_test_1");
createTestTable("explain_test_2");
SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain(createExplainContext("SELECT explain_test_1.ID, count(1) FROM "
+ " explain_test_1 join explain_test_2 on explain_test_1.ID = explain_test_2.ID"
+ " WHERE explain_test_1.ID = 'foo' or explain_test_2.ID = 'bar'" + " GROUP BY explain_test_1.ID",
queryConf));
assertHandleSize(handleSize);
assertTrue(plan instanceof HiveQueryPlan);
assertNotNull(plan.getTablesQueried());
assertEquals(plan.getTablesQueried().size(), 2);
assertNotNull(plan.getTableWeights());
assertTrue(plan.getTableWeights().containsKey(dataBase + ".explain_test_1"));
assertTrue(plan.getTableWeights().containsKey(dataBase + ".explain_test_2"));
assertTrue(plan.getPlan() != null && !plan.getPlan().isEmpty());
driver.closeQuery(plan.getHandle());
}
示例4: testExplainOutputPersistent
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Test explain output persistent.
*
* @throws Exception the exception
*/
@Test
public void testExplainOutputPersistent() throws Exception {
int handleSize = getHandleSize();
createTestTable("explain_test_1");
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
SessionState.setCurrentSessionState(ss);
String query2 = "SELECT DISTINCT ID FROM explain_test_1";
PreparedQueryContext pctx = createPreparedQueryContext(query2);
pctx.setSelectedDriver(driver);
pctx.setLensSessionIdentifier(sessionid);
DriverQueryPlan plan2 = driver.explainAndPrepare(pctx);
// assertNotNull(plan2.getResultDestination());
assertHandleSize(handleSize);
assertNotNull(plan2.getTablesQueried());
assertEquals(plan2.getTablesQueried().size(), 1);
assertTrue(plan2.getTableWeights().containsKey(dataBase + ".explain_test_1"));
QueryContext ctx = createContext(pctx, queryConf);
LensResultSet resultSet = driver.execute(ctx);
assertHandleSize(handleSize);
HivePersistentResultSet persistentResultSet = (HivePersistentResultSet) resultSet;
String path = persistentResultSet.getOutputPath();
assertEquals(ctx.getDriverResultPath(), path);
driver.closeQuery(plan2.getHandle());
}
示例5: testPartitionInQueryPlan
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Test partition in query plan.
*
* @throws Exception the exception
*/
@Test
public void testPartitionInQueryPlan() throws Exception {
// Create tables with 10 & 1 partitions respectively
createPartitionedTable("table_1", 10);
createPartitionedTable("table_2", 1);
// Query should select 5 partitions of table 1 and 1 partitions of table 2
String explainQuery = "SELECT table_1.ID "
+ "FROM table_1 LEFT OUTER JOIN table_2 ON table_1.ID = table_2.ID AND table_2.DT='p0' "
+ "WHERE table_1.DT='p0' OR table_1.DT='p1' OR table_1.DT='p2' OR table_1.DT='p3' OR table_1.DT='p4' "
+ "AND table_1.ET='1'";
SessionState.setCurrentSessionState(ss);
DriverQueryPlan plan = driver.explain(createExplainContext(explainQuery, queryConf));
Assert.assertEquals(0, driver.getHiveHandleSize());
System.out.println("@@ partitions" + plan.getPartitions());
Assert.assertEquals(plan.getPartitions().size(), 2);
String dbName = TestRemoteHiveDriver.class.getSimpleName().toLowerCase();
Assert.assertTrue(plan.getPartitions().containsKey(dbName + ".table_1"));
Assert.assertEquals(plan.getPartitions().get(dbName + ".table_1").size(), 5);
Assert.assertTrue(plan.getPartitions().containsKey(dbName + ".table_2"));
Assert.assertEquals(plan.getPartitions().get(dbName + ".table_2").size(), 1);
FileUtils.deleteDirectory(new File("target/partdata"));
}
示例6: testEstimateNativeQuery
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Test(expectedExceptions = {UnsupportedOperationException.class})
public void testEstimateNativeQuery() throws Exception {
createTestTable("test_estimate");
SessionState.setCurrentSessionState(ss);
QueryCost cost = driver.estimate(createExplainContext("SELECT ID FROM test_estimate", queryConf));
assertEquals(cost.getEstimatedResourceUsage(), Double.MAX_VALUE);
cost.getEstimatedExecTimeMillis();
}
示例7: testExplainNativeFailingQuery
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Test
public void testExplainNativeFailingQuery() throws Exception {
SessionState.setCurrentSessionState(ss);
try {
driver.estimate(createExplainContext("SELECT ID FROM nonexist", queryConf));
fail("Should not reach here");
} catch (LensException e) {
assertTrue(LensUtil.getCauseMessage(e).contains("Line 1:32 Table not found 'nonexist'"));
}
}
示例8: call
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Override
public LensResultSet call() throws Exception {
SessionState.setCurrentSessionState(sessionState);
return execute(queryContext);
}
示例9: close
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Override
public void close() {
if (state != null && !state.equals(SessionState.get())) {
SessionState.setCurrentSessionState(state);
}
}
示例10: setDB
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@BeforeMethod
public void setDB() {
SessionState.setCurrentSessionState(ss);
}
示例11: testExplain
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Test explain.
*
* @throws Exception the exception
*/
@Test
public void testExplain() throws Exception {
int handleSize = getHandleSize();
SessionState.setCurrentSessionState(ss);
SessionState.get().setCurrentDatabase(dataBase);
createTestTable("test_explain");
DriverQueryPlan plan = driver.explain(createExplainContext("SELECT ID FROM test_explain", queryConf));
assertTrue(plan instanceof HiveQueryPlan);
assertEquals(plan.getTableWeight(dataBase + ".test_explain"), 500.0);
assertHandleSize(handleSize);
// test execute prepare
PreparedQueryContext pctx = new PreparedQueryContext("SELECT ID FROM test_explain", null, queryConf, drivers);
pctx.setSelectedDriver(driver);
pctx.setLensSessionIdentifier(sessionid);
SessionState.setCurrentSessionState(ss);
Configuration inConf = new Configuration(queryConf);
inConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, false);
plan = driver.explainAndPrepare(pctx);
QueryContext qctx = createContext(pctx, inConf);
LensResultSet result = driver.execute(qctx);
assertHandleSize(handleSize);
validateInMemoryResult(result);
// test execute prepare async
queryConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
qctx = createContext(pctx, queryConf);
driver.executeAsync(qctx);
assertNotNull(qctx.getDriverOpHandle());
validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
assertHandleSize(handleSize + 1);
driver.closeQuery(qctx.getQueryHandle());
assertHandleSize(handleSize);
// for backward compatibility
qctx = createContext(pctx, inConf);
qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
result = driver.execute(qctx);
assertNotNull(qctx.getDriverOpHandle());
assertHandleSize(handleSize);
validateInMemoryResult(result);
// test execute prepare async
qctx = createContext(pctx, queryConf);
qctx.setQueryHandle(new QueryHandle(pctx.getPrepareHandle().getPrepareHandleId()));
driver.executeAsync(qctx);
assertHandleSize(handleSize + 1);
validateExecuteAsync(qctx, DriverQueryState.SUCCESSFUL, true, false);
driver.closeQuery(qctx.getQueryHandle());
driver.closePreparedQuery(pctx.getPrepareHandle());
assertHandleSize(handleSize);
}
示例12: getVariableSubstitution
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
public VariableSubstitution getVariableSubstitution() {
// Make sure to set the session state for this thread before returning the VariableSubstitution. If not set,
// hivevar:s will not be evaluated.
SessionState.setCurrentSessionState(currentSessionState);
return new VariableSubstitution();
}