本文整理汇总了Java中org.apache.hadoop.hive.ql.Driver类的典型用法代码示例。如果您正苦于以下问题:Java Driver类的具体用法?Java Driver怎么用?Java Driver使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Driver类属于org.apache.hadoop.hive.ql包,在下文中一共展示了Driver类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: TestHiveSink
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public TestHiveSink() throws Exception {
partitionVals = new ArrayList<String>(2);
partitionVals.add(PART1_VALUE);
partitionVals.add(PART2_VALUE);
metaStoreURI = "null";
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
// 1) prepare hive
TxnDbUtil.cleanDb();
TxnDbUtil.prepDb();
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例2: TestHiveWriter
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public TestHiveWriter() throws Exception {
partVals = new ArrayList<String>(2);
partVals.add(PART1_VALUE);
partVals.add(PART2_VALUE);
metaStoreURI = null;
int callTimeoutPoolSize = 1;
callTimeoutPool = Executors.newFixedThreadPool(callTimeoutPoolSize,
new ThreadFactoryBuilder().setNameFormat("hiveWriterTest").build());
// 1) Start metastore
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
if (metaStoreURI != null) {
conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI);
}
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例3: executeQuery
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
/**
* Execute the give <i>query</i> on given <i>hiveDriver</i> instance. If a {@link CommandNeedRetryException}
* exception is thrown, it tries upto 3 times before returning failure.
* @param hiveDriver
* @param query
*/
public static void executeQuery(Driver hiveDriver, String query) {
CommandProcessorResponse response = null;
boolean failed = false;
int retryCount = 3;
try {
response = hiveDriver.run(query);
} catch(CommandNeedRetryException ex) {
if (--retryCount == 0) {
failed = true;
}
}
if (failed || response.getResponseCode() != 0 ) {
throw new RuntimeException(String.format("Failed to execute command '%s', errorMsg = '%s'",
query, (response != null ? response.getErrorMessage() : "")));
}
}
示例4: generateHiveTestData
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
private static void generateHiveTestData() throws Exception {
final SessionState ss = new SessionState(hiveConf);
SessionState.start(ss);
final Driver driver = new Driver(hiveConf);
executeQuery(driver, "CREATE DATABASE " + db_general);
createTbl(driver, db_general, g_student_user0, studentDef, studentData);
createTbl(driver, db_general, g_voter_role0, voterDef, voterData);
createTbl(driver, db_general, g_student_user2, studentDef, studentData);
executeQuery(driver, "SET ROLE admin");
executeQuery(driver, "CREATE ROLE " + test_role0);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[1]);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[2]);
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user0, org1Users[0]));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO ROLE %s", db_general, g_voter_role0, test_role0));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user2, org1Users[2]));
}
示例5: runExplain
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
private CommandProcessorResponse runExplain(String hql, HiveConf conf) throws Exception {
Driver hiveDriver = new Driver(conf, "anonymous");
CommandProcessorResponse response = hiveDriver.run("EXPLAIN EXTENDED " + hql);
hiveDriver.resetFetch();
hiveDriver.setMaxRows(Integer.MAX_VALUE);
List<Object> explainResult = new ArrayList<Object>();
hiveDriver.getResults(explainResult);
for (Object explainRow : explainResult) {
// Print the following to stdout to check partition output.
// Not parsing the output because it will slow down the test
assertNotNull(explainRow.toString());
}
return response;
}
示例6: printHeader
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
/**
* If enabled and applicable to this command, print the field headers
* for the output.
*
* @param qp Driver that executed the command
* @param out Printstream which to send output to
*/
private void printHeader(Driver qp, PrintStream out) {
List<FieldSchema> fieldSchemas = qp.getSchema().getFieldSchemas();
if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVE_CLI_PRINT_HEADER)
&& fieldSchemas != null) {
// Print the column names
boolean first_col = true;
for (FieldSchema fs : fieldSchemas) {
if (!first_col) {
out.print('\t');
}
out.print(fs.getName());
first_col = false;
}
out.println();
}
}
示例7: execHiveDDL
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public void execHiveDDL(String ddl) throws Exception {
LOG.info("Executing ddl = " + ddl);
Driver hiveDriver = new Driver();
CommandProcessorResponse response = hiveDriver.run(ddl);
if (response.getResponseCode() > 0) {
throw new Exception(response.getErrorMessage());
}
}
示例8: execHiveSQLwithOverlay
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public void execHiveSQLwithOverlay(final String sqlStmt,
final String userName, Map<String, String> overLay) throws Exception {
final HiveConf hiveConf = new HiveConf();
for (Map.Entry<String, String> entry : overLay.entrySet()) {
hiveConf.set(entry.getKey(), entry.getValue());
}
UserGroupInformation clientUgi = UserGroupInformation
.createRemoteUser(userName);
clientUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Void run() throws Exception {
Driver driver = new Driver(hiveConf, userName);
SessionState.start(new CliSessionState(hiveConf));
CommandProcessorResponse cpr = driver.run(sqlStmt);
if (cpr.getResponseCode() != 0) {
throw new IOException("Failed to execute \"" + sqlStmt
+ "\". Driver returned " + cpr.getResponseCode() + " Error: "
+ cpr.getErrorMessage());
}
driver.close();
SessionState.get().close();
return null;
}
});
}
示例9: verifyLocalQuery
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public void verifyLocalQuery(String queryStr) throws Exception {
// setup Hive driver
SessionState session = new SessionState(getHiveConf());
SessionState.start(session);
Driver driver = new Driver(session.getConf(), getUser());
// compile the query
CommandProcessorResponse compilerStatus = driver
.compileAndRespond(queryStr);
if (compilerStatus.getResponseCode() != 0) {
String errMsg = compilerStatus.getErrorMessage();
if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
printMissingPerms(getHiveConf().get(
HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
}
throw new SemanticException("Compilation error: "
+ compilerStatus.getErrorMessage());
}
driver.close();
System.out
.println("User " + getUser() + " has privileges to run the query");
}
示例10: createDbAndTable
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public static void createDbAndTable(Driver driver, String databaseName,
String tableName, List<String> partVals,
String[] colNames, String[] colTypes,
String[] partNames, String dbLocation)
throws Exception {
String dbUri = "raw://" + dbLocation;
String tableLoc = dbUri + Path.SEPARATOR + tableName;
runDDL(driver, "create database IF NOT EXISTS " + databaseName + " location '" + dbUri + "'");
runDDL(driver, "use " + databaseName);
String crtTbl = "create table " + tableName +
" ( " + getTableColumnsStr(colNames,colTypes) + " )" +
getPartitionStmtStr(partNames) +
" clustered by ( " + colNames[0] + " )" +
" into 10 buckets " +
" stored as orc " +
" location '" + tableLoc + "'" +
" TBLPROPERTIES ('transactional'='true')";
runDDL(driver, crtTbl);
System.out.println("crtTbl = " + crtTbl);
if (partNames != null && partNames.length != 0) {
String addPart = "alter table " + tableName + " add partition ( " +
getTablePartsStr2(partNames, partVals) + " )";
runDDL(driver, addPart);
}
}
示例11: listRecordsInTable
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public static ArrayList<String> listRecordsInTable(Driver driver, String dbName, String tblName)
throws CommandNeedRetryException, IOException {
driver.run("select * from " + dbName + "." + tblName);
ArrayList<String> res = new ArrayList<String>();
driver.getResults(res);
return res;
}
示例12: listRecordsInPartition
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
public static ArrayList<String> listRecordsInPartition(Driver driver, String dbName,
String tblName, String continent,
String country)
throws CommandNeedRetryException, IOException {
driver.run("select * from " + dbName + "." + tblName + " where continent='"
+ continent + "' and country='" + country + "'");
ArrayList<String> res = new ArrayList<String>();
driver.getResults(res);
return res;
}
示例13: runDDL
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
private static boolean runDDL(Driver driver, String sql) throws QueryFailedException {
int retryCount = 1; // # of times to retry if first attempt fails
for (int attempt = 0; attempt <= retryCount; ++attempt) {
try {
driver.run(sql);
return true;
} catch (CommandNeedRetryException e) {
if (attempt == retryCount) {
throw new QueryFailedException(sql, e);
}
continue;
}
} // for
return false;
}
示例14: generateTestData
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
private static void generateTestData() throws Exception {
final SessionState ss = new SessionState(hiveConf);
SessionState.start(ss);
final Driver driver = new Driver(hiveConf);
executeQuery(driver, "CREATE DATABASE " + db_general);
createTbl(driver, db_general, g_student_user0, studentDef, studentData);
createTbl(driver, db_general, g_voter_role0, voterDef, voterData);
createTbl(driver, db_general, g_student_user2, studentDef, studentData);
executeQuery(driver, "SET ROLE admin");
executeQuery(driver, "CREATE ROLE " + test_role0);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[1]);
executeQuery(driver, "GRANT ROLE " + test_role0 + " TO USER " + org1Users[2]);
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user0, org1Users[0]));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO ROLE %s", db_general, g_voter_role0, test_role0));
executeQuery(driver, String.format("GRANT SELECT ON %s.%s TO USER %s", db_general, g_student_user2, org1Users[2]));
createView(org1Users[0], org1Groups[0], v_student_u0g0_750,
String.format("SELECT rownum, name, age, studentnum FROM %s.%s.%s",
hivePluginName, db_general, g_student_user0));
createView(org1Users[1], org1Groups[1], v_student_u1g1_750,
String.format("SELECT rownum, name, age FROM %s.%s.%s",
MINIDFS_STORAGE_PLUGIN_NAME, "tmp", v_student_u0g0_750));
}
示例15: createTable
import org.apache.hadoop.hive.ql.Driver; //导入依赖的package包/类
private static void createTable(final Driver hiveDriver, final String db, final String tbl, final String tblDef,
final String tblData, final String user, final String group, final short permissions) throws Exception {
executeQuery(hiveDriver, String.format(tblDef, db, tbl));
executeQuery(hiveDriver, String.format("LOAD DATA LOCAL INPATH '%s' INTO TABLE %s.%s", tblData, db, tbl));
final Path p = getWhPathForHiveObject(db, tbl);
fs.setPermission(p, new FsPermission(permissions));
fs.setOwner(p, user, group);
}