本文整理汇总了Java中org.apache.hadoop.hive.ql.session.SessionState类的典型用法代码示例。如果您正苦于以下问题:Java SessionState类的具体用法?Java SessionState怎么用?Java SessionState使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
SessionState类属于org.apache.hadoop.hive.ql.session包,在下文中一共展示了SessionState类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: TestHiveSink
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
public TestHiveSink() throws Exception {
partitionVals = new ArrayList<String>(2);
partitionVals.add(PART1_VALUE);
partitionVals.add(PART2_VALUE);
metaStoreURI = "null";
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
// 1) prepare hive
TxnDbUtil.cleanDb();
TxnDbUtil.prepDb();
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例2: TestHiveWriter
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
public TestHiveWriter() throws Exception {
partVals = new ArrayList<String>(2);
partVals.add(PART1_VALUE);
partVals.add(PART2_VALUE);
metaStoreURI = null;
int callTimeoutPoolSize = 1;
callTimeoutPool = Executors.newFixedThreadPool(callTimeoutPoolSize,
new ThreadFactoryBuilder().setNameFormat("hiveWriterTest").build());
// 1) Start metastore
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
if (metaStoreURI != null) {
conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI);
}
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例3: generateHiveTestData
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
private static void generateHiveTestData() throws Exception {
final SessionState ss = new SessionState(hiveConf);
SessionState.start(ss);
final Driver driver = new Driver(hiveConf);
executeQuery(driver, "CREATE DATABASE " + db_general);
createTbl(driver, db_general, g_student_user0, studentDef, studentData);
createTbl(driver, db_general, g_voter_role0, voterDef, voterData);
createTbl(driver, db_general, g_student_user2, studentDef, studentData);
executeQuery(driver, "SET ROLE admin");
executeQuery(driver, "CREATE ROLE " + test_role0);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[1]);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[2]);
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user0, org1Users[0]));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO ROLE %s", db_general, g_voter_role0, test_role0));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user2, org1Users[2]));
}
示例4: getResourceFiles
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
public static String getResourceFiles(Configuration conf, SessionState.ResourceType t) {
// fill in local files to be added to the task environment
SessionState ss = SessionState.get();
Set<String> files = (ss == null) ? null : ss.list_resource(t, null);
if (files != null) {
List<String> realFiles = new ArrayList<String>(files.size());
for (String one : files) {
try {
realFiles.add(realFile(one, conf));
} catch (IOException e) {
throw new RuntimeException("Cannot validate file " + one + "due to exception: "
+ e.getMessage(), e);
}
}
return StringUtils.join(realFiles, ",");
} else {
return "";
}
}
示例5: getSessionSpecifiedClassLoader
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
/**
* get session specified class loader and get current class loader if fall
*
* @return
*/
public static ClassLoader getSessionSpecifiedClassLoader() {
SessionState state = SessionState.get();
if (state == null || state.getConf() == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("Hive Conf not found or Session not initiated, use thread based class loader instead");
}
return JavaUtils.getClassLoader();
}
ClassLoader sessionCL = state.getConf().getClassLoader();
if (sessionCL != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Use session specified class loader"); //it's normal case
}
return sessionCL;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Session specified class loader not found, use thread based class loader");
}
return JavaUtils.getClassLoader();
}
示例6: removeFromClassPath
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
/**
* remove elements from the classpath.
*
* @param pathsToRemove
* Array of classpath elements
*/
public static void removeFromClassPath(String[] pathsToRemove) throws Exception {
Thread curThread = Thread.currentThread();
URLClassLoader loader = (URLClassLoader) curThread.getContextClassLoader();
Set<URL> newPath = new HashSet<URL>(Arrays.asList(loader.getURLs()));
for (String onestr : pathsToRemove) {
URL oneurl = urlFromPathString(onestr);
if (oneurl != null) {
newPath.remove(oneurl);
}
}
JavaUtils.closeClassLoader(loader);
//this loader is closed, remove it from cached registry loaders to avoid remove it again.
Registry reg = SessionState.getRegistry();
if(reg != null) {
reg.removeFromUDFLoaders(loader);
}
loader = new URLClassLoader(newPath.toArray(new URL[0]));
curThread.setContextClassLoader(loader);
SessionState.get().getConf().setClassLoader(loader);
}
示例7: testInsertIntoTempTable
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
@Test(enabled = false)
public void testInsertIntoTempTable() throws Exception {
String tableName = createTable();
String insertTableName = createTable(false, false, true);
assertTableIsRegistered(DEFAULT_DB, tableName);
assertTableIsNotRegistered(DEFAULT_DB, insertTableName, true);
String query =
"insert into " + insertTableName + " select id, name from " + tableName;
runCommand(query);
Set<ReadEntity> inputs = getInputs(tableName, Entity.Type.TABLE);
Set<WriteEntity> outputs = getOutputs(insertTableName, Entity.Type.TABLE);
outputs.iterator().next().setName(getQualifiedTblName(insertTableName + HiveMetaStoreBridge.TEMP_TABLE_PREFIX + SessionState.get().getSessionId()));
outputs.iterator().next().setWriteType(WriteEntity.WriteType.INSERT);
validateProcess(constructEvent(query, HiveOperation.QUERY, inputs, outputs));
assertTableIsRegistered(DEFAULT_DB, tableName);
assertTableIsRegistered(DEFAULT_DB, insertTableName, null, true);
}
示例8: EmbeddedHive
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
/**
* Create embedded Hive
*
* @param properties - Properties
*/
public EmbeddedHive(Properties properties) {
HiveConf conf = new HiveConf();
if (properties.get(PropertyNames.HIVE_JAR.toString()) != null) {
//this line may be required so that the embedded derby works well
//refers to dependencies containing ExecDriver class
conf.setVar(HiveConf.ConfVars.HIVEJAR, properties.get(PropertyNames.HIVE_JAR.toString()).toString());
}
//this property is required so that every test runs on a different warehouse location.
// This way we avoid conflicting scripts or dirty reexecutions
File tmpDir = new File(System.getProperty(JAVA_IO_TMPDIR));
warehouseDir = new File(tmpDir, UUID.randomUUID().toString());
warehouseDir.mkdir();
conf.setVar(HiveConf.ConfVars.METASTOREWAREHOUSE, warehouseDir.getAbsolutePath());
ss = new SessionState(new HiveConf(conf, EmbeddedHive.class));
SessionState.start(ss);
c = ss.getConf();
}
示例9: readColumnarStruct
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
private ColumnarStruct readColumnarStruct(BytesRefArrayWritable buff, String schema) throws SerDeException {
Pattern pcols = Pattern.compile("[a-zA-Z_0-9]*[ ]");
List<String> types = HiveRCSchemaUtil.parseSchemaTypes(schema);
List<String> cols = HiveRCSchemaUtil.parseSchema(pcols, schema);
List<FieldSchema> fieldSchemaList = new ArrayList<FieldSchema>(
cols.size());
for (int i = 0; i < cols.size(); i++) {
fieldSchemaList.add(new FieldSchema(cols.get(i), HiveRCSchemaUtil
.findPigDataType(types.get(i))));
}
Properties props = new Properties();
props.setProperty(Constants.LIST_COLUMNS,
HiveRCSchemaUtil.listToString(cols));
props.setProperty(Constants.LIST_COLUMN_TYPES,
HiveRCSchemaUtil.listToString(types));
Configuration hiveConf = new HiveConf(conf, SessionState.class);
ColumnarSerDe serde = new ColumnarSerDe();
serde.initialize(hiveConf, props);
return (ColumnarStruct) serde.deserialize(buff);
}
示例10: testCreateJdbcDriver
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
/**
* Test create jdbc driver.
*
* @throws Exception the exception
*/
@BeforeTest
public void testCreateJdbcDriver() throws Exception {
baseConf = new Configuration();
baseConf.set(JDBCDriverConfConstants.JDBC_DRIVER_CLASS, "org.hsqldb.jdbc.JDBCDriver");
baseConf.set(JDBCDriverConfConstants.JDBC_DB_URI, "jdbc:hsqldb:mem:jdbcTestDB;MODE=MYSQL");
baseConf.set(JDBCDriverConfConstants.JDBC_USER, "sa");
baseConf.set(JDBCDriverConfConstants.JDBC_PASSWORD, "");
baseConf.set(JDBCDriverConfConstants.JDBC_QUERY_REWRITER_CLASS, ColumnarSQLRewriter.class.getName());
baseConf.set(JDBCDriverConfConstants.JDBC_EXPLAIN_KEYWORD_PARAM, "explain plan for ");
driver = new JDBCDriver();
driver.configure(baseConf, "jdbc", "jdbc1");
assertNotNull(driver);
assertTrue(driver.configured);
System.out.println("Driver configured!");
SessionState.start(new HiveConf(ColumnarSQLRewriter.class));
drivers = new ArrayList<LensDriver>() {
{
add(driver);
}
};
}
示例11: setup
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
/**
* Setup.
*
* @throws Exception the exception
*/
@BeforeTest
public void setup() throws Exception {
conf.addResource("jdbcdriver-default.xml");
conf.addResource("drivers/jdbc/jdbc1/jdbcdriver-site.xml");
conf.addResource("drivers/jdbc/druid/jdbcdriver-site.xml");
qtest.init(conf);
hconf.addResource(conf);
SessionState.start(hconf);
List<FieldSchema> factColumns = new ArrayList<>();
factColumns.add(new FieldSchema("time_key", "string", ""));
factColumns.add(new FieldSchema("item_key", "int", ""));
factColumns.add(new FieldSchema("dollars_sold", "double", ""));
factColumns.add(new FieldSchema("units_sold", "int", ""));
try {
createHiveTable("default", "sales_fact", factColumns);
} catch (HiveException e) {
log.error("Encountered hive exception.", e);
}
}
示例12: testNoRewrite
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
@Test
// Testing multiple queries in one instance
public void testNoRewrite() throws LensException {
SessionState.start(hconf);
String query = "select count(distinct time_key) from sales_fact";
String actual = qtest.rewrite(query, conf, hconf);
String expected = "select count( distinct time_key ) from sales_fact ";
compareQueries(actual, expected);
String query2 = "select count(distinct time_key) from sales_fact sales_fact";
String actual2 = qtest.rewrite(query2, conf, hconf);
String expected2 = "select count( distinct time_key ) from sales_fact sales_fact___sales_fact";
compareQueries(expected2, actual2);
String query3 = "select count(distinct sales_fact.time_key) from db.sales_fact sales_fact";
String actual3 = qtest.rewrite(query3, conf, hconf);
String expected3 = "select count( distinct ( sales_fact__db_sales_fact_sales_fact . time_key )) "
+ "from db.sales_fact sales_fact__db_sales_fact_sales_fact";
compareQueries(expected3, actual3);
}
示例13: testWhereSubQueryFail
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
@Test
public void testWhereSubQueryFail() {
String query =
"select time_dim.day_of_week, sum(fact.dollars_sold) as dollars_sold from sales_fact fact "
+ "where fact.item_key in (select item_key from test.item_dim idim where idim.item_name = 'item_1') "
+ "and fact.location_key in (select location_key from test.location_dim ldim where "
+ "ldim.location_name = 'loc_1') "
+ "group by time_dim.day_of_week ";
SessionState.start(hconf);
try {
qtest.rewrite(query, conf, hconf);
Assert.fail("The Where Sub query did NOT suffer any exception");
} catch (LensException e) {
System.out.println("Exception as expected in where sub query..");
}
}
示例14: testHavingOrderByQueryTestFail
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
@Test(dataProvider = "getHavingOrderByDataFail")
public void testHavingOrderByQueryTestFail(String isHavingSupported, String isOrderBySupported) {
conf.set(JDBCDriverConfConstants.JDBC_IS_HAVING_SUPPORTED, isHavingSupported);
conf.set(JDBCDriverConfConstants.JDBC_IS_ORDERBY_SUPPORTED, isOrderBySupported);
String query = "select a,sum(b) from tabl1 where a<=10 group by a having sum(b) > 10 order by a desc limit 10";
SessionState.start(hconf);
try {
qtest.rewrite(query, conf, hconf);
Assert.fail("The invalid query did NOT suffer any exception");
} catch (LensException e) {
System.out.println("Exception as expected in Having/Orderby query..");
}
}
示例15: createSources
import org.apache.hadoop.hive.ql.session.SessionState; //导入依赖的package包/类
public void createSources(HiveConf conf, String dbName) throws Exception {
try {
Database database = new Database();
database.setName(dbName);
Hive.get(conf).dropDatabase(dbName, true, true, true);
Hive.get(conf).createDatabase(database);
SessionState.get().setCurrentDatabase(dbName);
CubeMetastoreClient client = CubeMetastoreClient.getInstance(conf);
createFromXML(client);
assertTestFactTimelineClass(client);
createCubeCheapFactPartitions(client);
// commenting this as the week date format throws IllegalPatternException
// createCubeFactWeekly(client);
createTestFact2Partitions(client);
createTestFact2RawPartitions(client);
createBaseCubeFactPartitions(client);
createSummaryPartitions(client);
// dump(client);
} catch (Exception exc) {
log.error("Exception while creating sources.", exc);
throw exc;
}
}