本文整理汇总了Java中org.apache.hadoop.hive.ql.session.SessionState.start方法的典型用法代码示例。如果您正苦于以下问题:Java SessionState.start方法的具体用法?Java SessionState.start怎么用?Java SessionState.start使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.session.SessionState
的用法示例。
在下文中一共展示了SessionState.start方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: TestHiveSink
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
public TestHiveSink() throws Exception {
partitionVals = new ArrayList<String>(2);
partitionVals.add(PART1_VALUE);
partitionVals.add(PART2_VALUE);
metaStoreURI = "null";
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
// 1) prepare hive
TxnDbUtil.cleanDb();
TxnDbUtil.prepDb();
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例2: TestHiveWriter
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
public TestHiveWriter() throws Exception {
partVals = new ArrayList<String>(2);
partVals.add(PART1_VALUE);
partVals.add(PART2_VALUE);
metaStoreURI = null;
int callTimeoutPoolSize = 1;
callTimeoutPool = Executors.newFixedThreadPool(callTimeoutPoolSize,
new ThreadFactoryBuilder().setNameFormat("hiveWriterTest").build());
// 1) Start metastore
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
if (metaStoreURI != null) {
conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI);
}
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例3: generateHiveTestData
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
private static void generateHiveTestData() throws Exception {
final SessionState ss = new SessionState(hiveConf);
SessionState.start(ss);
final Driver driver = new Driver(hiveConf);
executeQuery(driver, "CREATE DATABASE " + db_general);
createTbl(driver, db_general, g_student_user0, studentDef, studentData);
createTbl(driver, db_general, g_voter_role0, voterDef, voterData);
createTbl(driver, db_general, g_student_user2, studentDef, studentData);
executeQuery(driver, "SET ROLE admin");
executeQuery(driver, "CREATE ROLE " + test_role0);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[1]);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[2]);
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user0, org1Users[0]));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO ROLE %s", db_general, g_voter_role0, test_role0));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user2, org1Users[2]));
}
示例4: EmbeddedHive
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Create embedded Hive
*
* @param properties - Properties
*/
public EmbeddedHive(Properties properties) {
HiveConf conf = new HiveConf();
if (properties.get(PropertyNames.HIVE_JAR.toString()) != null) {
//this line may be required so that the embedded derby works well
//refers to dependencies containing ExecDriver class
conf.setVar(HiveConf.ConfVars.HIVEJAR, properties.get(PropertyNames.HIVE_JAR.toString()).toString());
}
//this property is required so that every test runs on a different warehouse location.
// This way we avoid conflicting scripts or dirty reexecutions
File tmpDir = new File(System.getProperty(JAVA_IO_TMPDIR));
warehouseDir = new File(tmpDir, UUID.randomUUID().toString());
warehouseDir.mkdir();
conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, warehouseDir.getAbsolutePath());
ss = new SessionState(new HiveConf(conf, EmbeddedHive.class));
SessionState.start(ss);
c = ss.getConf();
}
示例5: setup
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Setup.
*
* @throws Exception the exception
*/
@BeforeTest
public void setup() throws Exception {
conf.addResource("jdbcdriver-default.xml");
conf.addResource("drivers/jdbc/jdbc1/jdbcdriver-site.xml");
conf.addResource("drivers/jdbc/druid/jdbcdriver-site.xml");
qtest.init(conf);
hconf.addResource(conf);
SessionState.start(hconf);
List<FieldSchema> factColumns = new ArrayList<>();
factColumns.add(new FieldSchema("time_key", "string", ""));
factColumns.add(new FieldSchema("item_key", "int", ""));
factColumns.add(new FieldSchema("dollars_sold", "double", ""));
factColumns.add(new FieldSchema("units_sold", "int", ""));
try {
createHiveTable("default", "sales_fact", factColumns);
} catch (HiveException e) {
log.error("Encountered hive exception.", e);
}
}
示例6: testNoRewrite
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Test
// Testing multiple queries in one instance
public void testNoRewrite() throws LensException {
SessionState.start(hconf);
String query = "select count(distinct time_key) from sales_fact";
String actual = qtest.rewrite(query, conf, hconf);
String expected = "select count( distinct time_key ) from sales_fact ";
compareQueries(actual, expected);
String query2 = "select count(distinct time_key) from sales_fact sales_fact";
String actual2 = qtest.rewrite(query2, conf, hconf);
String expected2 = "select count( distinct time_key ) from sales_fact sales_fact___sales_fact";
compareQueries(expected2, actual2);
String query3 = "select count(distinct sales_fact.time_key) from db.sales_fact sales_fact";
String actual3 = qtest.rewrite(query3, conf, hconf);
String expected3 = "select count( distinct ( sales_fact__db_sales_fact_sales_fact . time_key )) "
+ "from db.sales_fact sales_fact__db_sales_fact_sales_fact";
compareQueries(expected3, actual3);
}
示例7: testWhereSubQueryFail
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Test
public void testWhereSubQueryFail() {
String query =
"select time_dim.day_of_week, sum(fact.dollars_sold) as dollars_sold from sales_fact fact "
+ "where fact.item_key in (select item_key from test.item_dim idim where idim.item_name = 'item_1') "
+ "and fact.location_key in (select location_key from test.location_dim ldim where "
+ "ldim.location_name = 'loc_1') "
+ "group by time_dim.day_of_week ";
SessionState.start(hconf);
try {
qtest.rewrite(query, conf, hconf);
Assert.fail("The Where Sub query did NOT suffer any exception");
} catch (LensException e) {
System.out.println("Exception as expected in where sub query..");
}
}
示例8: testNativeTableCommands
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Test native table commands.
*
* @throws Exception
* the exception
*/
@Test
public void testNativeTableCommands() throws Exception {
try (LensClient client = new LensClient()) {
SessionState.start(new HiveConf());
LensNativeTableCommands command = new LensNativeTableCommands();
command.setClient(client);
LOG.debug("Starting to test nativetable commands");
String tblList = command.showNativeTables();
Assert.assertFalse(tblList.contains("test_native_table_command"));
LensServerTestUtil.createHiveTable("test_native_table_command", new HashMap<String, String>());
tblList = command.showNativeTables();
Assert.assertTrue(tblList.contains("test_native_table_command"));
String desc = command.describeNativeTable("test_native_table_command");
LOG.info(desc);
Assert.assertTrue(desc.contains("col1"));
Assert.assertTrue(desc.contains("pcol1"));
Assert.assertTrue(desc.contains("MANAGED_TABLE"));
Assert.assertTrue(desc.contains("test.hive.table.prop"));
} finally {
LensServerTestUtil.dropHiveTable("test_native_table_command");
SessionState.detachSession();
}
}
示例9: beforeTest
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* Before test.
*
* @throws Exception the exception
*/
@BeforeTest
public void beforeTest() throws Exception {
// Check if hadoop property set
System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path"));
assertNotNull(System.getProperty("hadoop.bin.path"));
createDriver();
ss = new SessionState(hiveConf, "testuser");
SessionState.start(ss);
Hive client = Hive.get(hiveConf);
Database database = new Database();
database.setName(dataBase);
client.createDatabase(database, true);
SessionState.get().setCurrentDatabase(dataBase);
sessionid = SessionState.get().getSessionId();
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
QueryContext context = createContext("USE " + dataBase, this.queryConf);
driver.execute(context);
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
}
示例10: verifyLocalQuery
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
public void verifyLocalQuery(String queryStr) throws Exception {
// setup Hive driver
SessionState session = new SessionState(getHiveConf());
SessionState.start(session);
Driver driver = new Driver(session.getConf(), getUser());
// compile the query
CommandProcessorResponse compilerStatus = driver
.compileAndRespond(queryStr);
if (compilerStatus.getResponseCode() != 0) {
String errMsg = compilerStatus.getErrorMessage();
if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
printMissingPerms(getHiveConf().get(
HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
}
throw new SemanticException("Compilation error: "
+ compilerStatus.getErrorMessage());
}
driver.close();
System.out
.println("User " + getUser() + " has privileges to run the query");
}
示例11: setup
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
@Before
public void setup() throws Exception {
conf = new HiveConf();
baseDir = Files.createTempDir();
baseDir.setWritable(true, false);
conf.setVar(HiveConf.ConfVars.SCRATCHDIR, baseDir.getAbsolutePath());
SessionState.start(conf);
conf.setVar(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY,
SentryHiveAuthorizationTaskFactoryImpl.class.getName());
db = Mockito.mock(Hive.class);
table = new Table(DB, TABLE);
partition = new Partition(table);
context = new Context(conf);
parseDriver = new ParseDriver();
analyzer = new DDLSemanticAnalyzer(conf, db);
SessionState.start(conf);
Mockito.when(db.getTable(TABLE, false)).thenReturn(table);
Mockito.when(db.getPartition(table, new HashMap<String, String>(), false))
.thenReturn(partition);
HadoopDefaultAuthenticator auth = new HadoopDefaultAuthenticator();
auth.setConf(conf);
currentUser = auth.getUserName();
}
示例12: HiveExec
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
/**
* HiveExec constructor
* @param config HDFS Connector configuration
*/
public HiveExec(HdfsSinkConnectorConfig config) {
hiveConf = new HiveConf();
String hiveConfDir = config.getString(HdfsSinkConnectorConfig.HIVE_CONF_DIR_CONFIG);
hiveConf.addResource(new Path(hiveConfDir, "hive-site.xml"));
SessionState.start(new CliSessionState(hiveConf));
cliDriver = new CliDriver();
}
示例13: HiveAuthorizationHelper
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) {
authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED);
if (!authzEnabled) {
authorizerV2 = null;
return;
}
try {
final HiveConf hiveConfCopy = new HiveConf(hiveConf);
hiveConfCopy.set("user.name", user);
final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy,
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
SessionState ss = new SessionState(hiveConfCopy, user);
SessionState.start(ss);
authenticator.setSessionState(ss);
HiveAuthorizerFactory authorizerFactory =
HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Drill is emulating HS2 here
authorizerV2 = authorizerFactory.createHiveAuthorizer(
new HiveMetastoreClientFactory() {
@Override
public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException {
return mClient;
}
},
hiveConf, authenticator, authzContextBuilder.build());
authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy);
} catch (final HiveException e) {
throw new DrillRuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e);
}
logger.trace("Hive authorization enabled");
}
示例14: generateTestData
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
private static void generateTestData() throws Exception {
final SessionState ss = new SessionState(hiveConf);
SessionState.start(ss);
final Driver driver = new Driver(hiveConf);
executeQuery(driver, "CREATE DATABASE " + db_general);
createTbl(driver, db_general, g_student_user0, studentDef, studentData);
createTbl(driver, db_general, g_voter_role0, voterDef, voterData);
createTbl(driver, db_general, g_student_user2, studentDef, studentData);
executeQuery(driver, "SET ROLE admin");
executeQuery(driver, "CREATE ROLE " + test_role0);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[1]);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[2]);
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user0, org1Users[0]));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO ROLE %s", db_general, g_voter_role0, test_role0));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user2, org1Users[2]));
createView(org1Users[0], org1Groups[0], v_student_u0g0_750,
String.format("SELECT rownum, name, age, studentnum FROM %s.%s.%s",
hivePluginName, db_general, g_student_user0));
createView(org1Users[1], org1Groups[1], v_student_u1g1_750,
String.format("SELECT rownum, name, age FROM %s.%s.%s",
MINIDFS_STORAGE_PLUGIN_NAME, "tmp", v_student_u0g0_750));
}
示例15: HiveAuthorizationHelper
import org.apache.hadoop.hive.ql.session.SessionState; //导入方法依赖的package包/类
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) {
authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED);
if (!authzEnabled) {
authorizerV2 = null;
return;
}
try {
final HiveConf hiveConfCopy = new HiveConf(hiveConf);
hiveConfCopy.set("user.name", user);
final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy,
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
SessionState ss = new SessionState(hiveConfCopy, user);
SessionState.start(ss);
authenticator.setSessionState(ss);
HiveAuthorizerFactory authorizerFactory =
HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Dremio is emulating HS2 here
authorizerV2 = authorizerFactory.createHiveAuthorizer(
new HiveMetastoreClientFactory() {
@Override
public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException {
return mClient;
}
},
hiveConf, authenticator, authzContextBuilder.build());
authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy);
} catch (final HiveException e) {
throw new RuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e);
}
logger.trace("Hive authorization enabled");
}