本文整理汇总了Java中org.apache.hadoop.hive.ql.processors.CommandProcessorResponse.getResponseCode方法的典型用法代码示例。如果您正苦于以下问题:Java CommandProcessorResponse.getResponseCode方法的具体用法?Java CommandProcessorResponse.getResponseCode怎么用?Java CommandProcessorResponse.getResponseCode使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.processors.CommandProcessorResponse
的用法示例。
在下文中一共展示了CommandProcessorResponse.getResponseCode方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: executeQuery
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
/**
* Execute the give <i>query</i> on given <i>hiveDriver</i> instance. If a {@link CommandNeedRetryException}
* exception is thrown, it tries upto 3 times before returning failure.
* @param hiveDriver
* @param query
*/
public static void executeQuery(Driver hiveDriver, String query) {
CommandProcessorResponse response = null;
boolean failed = false;
int retryCount = 3;
try {
response = hiveDriver.run(query);
} catch(CommandNeedRetryException ex) {
if (--retryCount == 0) {
failed = true;
}
}
if (failed || response.getResponseCode() != 0 ) {
throw new RuntimeException(String.format("Failed to execute command '%s', errorMsg = '%s'",
query, (response != null ? response.getErrorMessage() : "")));
}
}
示例2: executeHQL
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
/**
*
* @param hql
* @throws CommandNeedRetryException
* @throws IOException
*/
public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
CommandProcessorResponse response = getDriver().run(hql);
int retCode = response.getResponseCode();
if (retCode != 0) {
String err = response.getErrorMessage();
throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err);
}
}
示例3: execHiveDDL
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
public void execHiveDDL(String ddl) throws Exception {
LOG.info("Executing ddl = " + ddl);
Driver hiveDriver = new Driver();
CommandProcessorResponse response = hiveDriver.run(ddl);
if (response.getResponseCode() > 0) {
throw new Exception(response.getErrorMessage());
}
}
示例4: verifyLocalQuery
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
public void verifyLocalQuery(String queryStr) throws Exception {
// setup Hive driver
SessionState session = new SessionState(getHiveConf());
SessionState.start(session);
Driver driver = new Driver(session.getConf(), getUser());
// compile the query
CommandProcessorResponse compilerStatus = driver
.compileAndRespond(queryStr);
if (compilerStatus.getResponseCode() != 0) {
String errMsg = compilerStatus.getErrorMessage();
if (errMsg.contains(HiveAuthzConf.HIVE_SENTRY_PRIVILEGE_ERROR_MESSAGE)) {
printMissingPerms(getHiveConf().get(
HiveAuthzConf.HIVE_SENTRY_AUTH_ERRORS));
}
throw new SemanticException("Compilation error: "
+ compilerStatus.getErrorMessage());
}
driver.close();
System.out
.println("User " + getUser() + " has privileges to run the query");
}
示例5: execHiveDDL
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
public void execHiveDDL(String ddl) throws Exception {
LOG.info("Executing ddl = " + ddl);
Driver hiveDriver = new Driver();
CommandProcessorResponse response = hiveDriver.run(ddl);
if (response.getResponseCode() > 0) {
throw new Exception(response.getErrorMessage());
}
}
示例6: executeHQL
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
/**
* only used by Deploy Util
*/
@Override
public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
CommandProcessorResponse response = getDriver().run(hql);
int retCode = response.getResponseCode();
if (retCode != 0) {
String err = response.getErrorMessage();
throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err);
}
}
示例7: run
import org.apache.hadoop.hive.ql.processors.CommandProcessorResponse; //导入方法依赖的package包/类
public CommandProcessorResponse run(String command) throws CommandNeedRetryException {
CommandProcessorResponse cpr = runInternal(command);
if(cpr.getResponseCode() == 0) {
return cpr;
}
SessionState ss = SessionState.get();
if(ss == null) {
return cpr;
}
MetaDataFormatter mdf = MetaDataFormatUtils.getFormatter(ss.getConf());
if(!(mdf instanceof JsonMetaDataFormatter)) {
return cpr;
}
/*Here we want to encode the error in machine readable way (e.g. JSON)
* Ideally, errorCode would always be set to a canonical error defined in ErrorMsg.
* In practice that is rarely the case, so the messy logic below tries to tease
* out canonical error code if it can. Exclude stack trace from output when
* the error is a specific/expected one.
* It's written to stdout for backward compatibility (WebHCat consumes it).*/
try {
if(downstreamError == null) {
mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState);
return cpr;
}
ErrorMsg canonicalErr = ErrorMsg.getErrorMsg(cpr.getResponseCode());
if(canonicalErr != null && canonicalErr != ErrorMsg.GENERIC_ERROR) {
/*Some HiveExceptions (e.g. SemanticException) don't set
canonical ErrorMsg explicitly, but there is logic
(e.g. #compile()) to find an appropriate canonical error and
return its code as error code. In this case we want to
preserve it for downstream code to interpret*/
mdf.error(ss.out, errorMessage, cpr.getResponseCode(), SQLState, null);
return cpr;
}
if(downstreamError instanceof HiveException) {
HiveException rc = (HiveException) downstreamError;
mdf.error(ss.out, errorMessage,
rc.getCanonicalErrorMsg().getErrorCode(), SQLState,
rc.getCanonicalErrorMsg() == ErrorMsg.GENERIC_ERROR ?
org.apache.hadoop.util.StringUtils.stringifyException(rc)
: null);
}
else {
ErrorMsg canonicalMsg =
ErrorMsg.getErrorMsg(downstreamError.getMessage());
mdf.error(ss.out, errorMessage, canonicalMsg.getErrorCode(),
SQLState, org.apache.hadoop.util.StringUtils.
stringifyException(downstreamError));
}
}
catch(HiveException ex) {
console.printError("Unable to JSON-encode the error",
org.apache.hadoop.util.StringUtils.stringifyException(ex));
}
return cpr;
}