本文整理汇总了Java中org.apache.hive.service.cli.HiveSQLException类的典型用法代码示例。如果您正苦于以下问题:Java HiveSQLException类的具体用法?Java HiveSQLException怎么用?Java HiveSQLException使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
HiveSQLException类属于org.apache.hive.service.cli包,在下文中一共展示了HiveSQLException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: waitForStart
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
private void waitForStart() throws InterruptedException, TimeoutException, HiveSQLException {
int waitTime = 0;
long pollPeriod = 100L;
long startupTimeout = 1000L * 1000L;
CLIServiceClient hiveClient = getClient();
SessionHandle sessionHandle;
do {
Thread.sleep(pollPeriod);
waitTime += pollPeriod;
if (waitTime > startupTimeout) {
throw new TimeoutException("Couldn't access new HiveServer2: " + getJdbcURL());
}
try {
Map<String, String> sessionConf = new HashMap<>();
sessionHandle = hiveClient.openSession("foo", "bar", sessionConf);
} catch (Exception e) {
continue;
}
hiveClient.closeSession(sessionHandle);
break;
} while (true);
}
示例2: close
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
@Override
public void close() throws HiveSQLException {
ClassLoader nonDBClassLoader = getSessionState().getConf().getClassLoader();
super.close();
// Release class loader resources
JavaUtils.closeClassLoadersTo(nonDBClassLoader, getClass().getClassLoader());
synchronized (sessionDbClassLoaders) {
for (Map.Entry<String, SessionClassLoader> entry : sessionDbClassLoaders.entrySet()) {
try {
// Closing session level classloaders up untill the db class loader if present, or null.
// When db class loader is null, the class loader in the session is a single class loader
// which stays as it is on database switch -- provided the new db doesn't have db jars.
// The following line will close class loaders made on top of db class loaders and will close
// only one classloader without closing the parents. In case of no db class loader, the session
// classloader will already have been closed by either super.close() or before this for loop.
JavaUtils.closeClassLoadersTo(entry.getValue(), getDbResService().getClassLoader(entry.getKey()));
} catch (Exception e) {
log.error("Error closing session classloader for session: {}", getSessionHandle().getSessionId(), e);
}
}
sessionDbClassLoaders.clear();
}
// reset classloader in close
Thread.currentThread().setContextClassLoader(LensSessionImpl.class.getClassLoader());
}
示例3: setSessionParameters
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
/**
* Sets the session parameter.
*
* @param sessionid the sessionid
* @param config map of string-string. each entry represents key and the value to be set for that key
*/
protected void setSessionParameters(LensSessionHandle sessionid, Map<String, String> config) {
log.info("Request to Set params:" + config);
try {
acquire(sessionid);
// set in session conf
for(Map.Entry<String, String> entry: config.entrySet()) {
String var = entry.getKey();
if (var.indexOf(SystemVariables.HIVECONF_PREFIX) == 0) {
var = var.substring(SystemVariables.HIVECONF_PREFIX.length());
}
getSession(sessionid).getSessionConf().set(var, entry.getValue());
String command = "set" + " " + entry.getKey() + "= " + entry.getValue();
closeCliServiceOp(getCliService().executeStatement(getHiveSessionHandle(sessionid), command, null));
}
// add to persist
getSession(sessionid).setConfig(config);
log.info("Set params:" + config);
} catch (HiveSQLException e) {
throw new WebApplicationException(e);
} finally {
release(sessionid);
}
}
示例4: restoreSession
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
/**
* Restore session from previous instance of lens server.
*
* @param sessionHandle the session handle
* @param userName the user name
* @param password the password
* @throws LensException the lens exception
*/
public void restoreSession(LensSessionHandle sessionHandle, String userName, String password) throws LensException {
HandleIdentifier handleIdentifier = new HandleIdentifier(sessionHandle.getPublicId(), sessionHandle.getSecretId());
SessionHandle hiveSessionHandle = new SessionHandle(new TSessionHandle(handleIdentifier.toTHandleIdentifier()));
try {
cliService.createSessionWithSessionHandle(hiveSessionHandle, userName, password,
new HashMap<String, String>());
LensSessionHandle restoredSession = new LensSessionHandle(hiveSessionHandle.getHandleIdentifier().getPublicId(),
hiveSessionHandle.getHandleIdentifier().getSecretId());
SESSION_MAP.put(restoredSession.getPublicId().toString(), restoredSession);
SessionUser sessionUser = SESSION_USER_INSTANCE_MAP.get(userName);
if (sessionUser == null) {
sessionUser = new SessionUser(userName);
SESSION_USER_INSTANCE_MAP.put(userName, sessionUser);
}
updateSessionsPerUser(userName);
} catch (HiveSQLException e) {
throw new LensException("Error restoring session " + sessionHandle, e);
}
}
示例5: closeSession
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
/**
* Close session.
*
* @param sessionHandle the session handle
* @throws LensException the lens exception
*/
public void closeSession(LensSessionHandle sessionHandle) throws LensException {
try {
LensSessionImpl session = getSession(sessionHandle);
boolean shouldDecrementOpenedSessionCount = !session.getLensSessionPersistInfo().isMarkedForClose();
if (session.activeOperationsPresent()) {
session.markForClose();
} else {
cliService.closeSession(getHiveSessionHandle(sessionHandle));
SESSION_MAP.remove(sessionHandle.getPublicId().toString());
log.info("Closed session {} for {} user", sessionHandle, session.getLoggedInUser());
}
if (shouldDecrementOpenedSessionCount) {
decrementSessionCountForUser(sessionHandle, session.getLoggedInUser());
}
if (!SESSION_MAP.containsKey(sessionHandle.getPublicId().toString())) {
// Inform query service
BaseLensService svc = LensServices.get().getService(QueryExecutionService.NAME);
if (svc instanceof QueryExecutionServiceImpl) {
((QueryExecutionServiceImpl) svc).closeDriverSessions(sessionHandle);
}
}
} catch (HiveSQLException e) {
throw new LensException(e);
}
}
示例6: getClient
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
@Override
public CLIServiceClient getClient() throws LensException {
if (!connected) {
try {
log.info("HiveDriver connecting to HiveServer @ {}:{}",
conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST),
conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT));
hs2Client = RetryingThriftCLIServiceClient.newRetryingCLIServiceClient(conf);
log.info("HiveDriver connected to HiveServer @ {}:{}",
conf.getVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST),
conf.getIntVar(HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT));
} catch (HiveSQLException e) {
throw new LensException(e);
}
connected = true;
}
return hs2Client;
}
示例7: testCreateWithDuplicates
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
@Test(expected = HiveSQLException.class)
public void testCreateWithDuplicates() throws Exception {
// load the raw data as a native, managed table
// and then insert its content into the external one
String localTable = createTable("createsourceduplicate");
String load = loadData("createsourceduplicate");
// create external table
String ddl =
"CREATE EXTERNAL TABLE createsaveduplicate ("
+ "id BIGINT, "
+ "name STRING, "
+ "links STRUCT<url:STRING, picture:STRING>) "
+ tableProps("hive/createsave",
"'" + ConfigurationOptions.ES_MAPPING_ID + "'='id'",
"'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='create'");
String selectTest = "SELECT s.name, struct(s.url, s.picture) FROM createsourceduplicate s";
// transfer data
String insert =
"INSERT OVERWRITE TABLE createsaveduplicate "
+ "SELECT s.id, s.name, named_struct('url', s.url, 'picture', s.picture) FROM createsourceduplicate s";
System.out.println(ddl);
System.out.println(server.execute(ddl));
System.out.println(server.execute(localTable));
System.out.println(server.execute(load));
System.out.println(server.execute(selectTest));
System.out.println(server.execute(insert));
}
示例8: testUpdateWithoutUpsert
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
@Test(expected = HiveSQLException.class)
public void testUpdateWithoutUpsert() throws Exception {
// load the raw data as a native, managed table
// and then insert its content into the external one
String localTable = createTable("updatewoupsertsource");
String load = loadData("updatewoupsertsource");
// create external table
String ddl =
"CREATE EXTERNAL TABLE updatewoupsertsave ("
+ "id BIGINT, "
+ "name STRING, "
+ "links STRUCT<url:STRING, picture:STRING>) "
+ tableProps("hive/updatewoupsertsave",
"'" + ConfigurationOptions.ES_MAPPING_ID + "'='id'",
"'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='update'");
String selectTest = "SELECT s.name, struct(s.url, s.picture) FROM updatewoupsertsource s";
// transfer data
String insert =
"INSERT OVERWRITE TABLE updatewoupsertsave "
+ "SELECT s.id, s.name, named_struct('url', s.url, 'picture', s.picture) FROM updatewoupsertsource s";
System.out.println(ddl);
System.out.println(server.execute(ddl));
System.out.println(server.execute(localTable));
System.out.println(server.execute(load));
System.out.println(server.execute(selectTest));
System.out.println(server.execute(insert));
}
示例9: testCreateWithDuplicates
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
@Test(expected = HiveSQLException.class)
public void testCreateWithDuplicates() throws Exception {
// load the raw data as a native, managed table
// and then insert its content into the external one
String localTable = createTable("jsoncreatesourceduplicate");
String load = loadData("jsoncreatesourceduplicate");
// create external table
String ddl =
"CREATE EXTERNAL TABLE jsoncreatesaveduplicate ("
+ "json STRING) "
+ tableProps("json-hive/createsave",
"'" + ConfigurationOptions.ES_MAPPING_ID + "'='number'",
"'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='create'");
String selectTest = "SELECT s.json FROM jsoncreatesourceduplicate s";
// transfer data
String insert =
"INSERT OVERWRITE TABLE jsoncreatesaveduplicate "
+ "SELECT s.json FROM jsoncreatesourceduplicate s";
System.out.println(ddl);
System.out.println(server.execute(ddl));
System.out.println(server.execute(localTable));
System.out.println(server.execute(load));
System.out.println(server.execute(selectTest));
System.out.println(server.execute(insert));
}
示例10: executeScript
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
public List<String> executeScript(String scriptFile, Map<String, String> params, List<String> excludes) {
HiveScript hiveScript = new HiveScript(scriptFile, params, excludes);
if (cluster == null) {
throw new IllegalStateException("No active cluster to run script with");
}
List<String> results = null;
try {
results = cluster.executeStatements(hiveScript.getStatements());
} catch (HiveSQLException e) {
throw new RuntimeException("Unable to execute script", e);
}
return results;
}
示例11: executeStatements
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
public List<String> executeStatements(List<String> statements) throws HiveSQLException {
List<String> results = new ArrayList<>();
for (String statement : statements) {
results.addAll(processStatement(statement));
}
return results;
}
示例12: deleteResource
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
/**
* {@inheritDoc}
*/
@Override
public void deleteResource(LensSessionHandle sessionid, String type, String path) {
String command = "delete " + type.toLowerCase() + " " + path;
try {
acquire(sessionid);
closeCliServiceOp(getCliService().executeStatement(getHiveSessionHandle(sessionid), command, null));
getSession(sessionid).removeResource(type, path);
} catch (HiveSQLException e) {
throw new WebApplicationException(e);
} finally {
release(sessionid);
}
}
示例13: closeCliServiceOp
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
/**
* Close operation created for underlying CLI service
* @param op operation handle
*/
private void closeCliServiceOp(OperationHandle op) {
if (op != null) {
try {
getCliService().closeOperation(op);
} catch (HiveSQLException e) {
log.error("Error closing operation " + op.getHandleIdentifier(), e);
}
}
}
示例14: getSession
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
/**
* Gets the session.
*
* @param sessionHandle the session handle
* @return the session
*/
public LensSessionImpl getSession(LensSessionHandle sessionHandle) {
if (sessionHandle == null) {
throw new ClientErrorException("Session is null", 400);
}
try {
return ((LensSessionImpl) getSessionManager().getSession(getHiveSessionHandle(sessionHandle)));
} catch (HiveSQLException exc) {
log.warn("Session {} not found", sessionHandle.getPublicId(), exc);
// throw resource gone exception (410)
throw new ClientErrorException("Session " + sessionHandle.getPublicId() + " is invalid " + sessionHandle,
Response.Status.GONE, exc);
}
}
示例15: testCreateWithDuplicates
import org.apache.hive.service.cli.HiveSQLException; //导入依赖的package包/类
@Test(expected = HiveSQLException.class)
public void testCreateWithDuplicates() throws Exception {
// load the raw data as a native, managed table
// and then insert its content into the external one
String localTable = createTable("createsourceduplicate");
String load = loadData("createsourceduplicate");
// create external table
String ddl =
"CREATE EXTERNAL TABLE createsaveduplicate ("
+ "id BIGINT, "
+ "name STRING, "
+ "links STRUCT<url:STRING, picture:STRING>) "
+ tableProps("hive-createsave/data",
"'" + ConfigurationOptions.ES_MAPPING_ID + "'='id'",
"'" + ConfigurationOptions.ES_WRITE_OPERATION + "'='create'");
String selectTest = "SELECT s.name, struct(s.url, s.picture) FROM createsourceduplicate s";
// transfer data
String insert =
"INSERT OVERWRITE TABLE createsaveduplicate "
+ "SELECT s.id, s.name, named_struct('url', s.url, 'picture', s.picture) FROM createsourceduplicate s";
System.out.println(ddl);
System.out.println(server.execute(ddl));
System.out.println(server.execute(localTable));
System.out.println(server.execute(load));
System.out.println(server.execute(selectTest));
System.out.println(server.execute(insert));
}