本文整理汇总了Java中org.apache.hadoop.hive.ql.CommandNeedRetryException类的典型用法代码示例。如果您正苦于以下问题:Java CommandNeedRetryException类的具体用法?Java CommandNeedRetryException怎么用?Java CommandNeedRetryException使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
CommandNeedRetryException类属于org.apache.hadoop.hive.ql包,在下文中一共展示了CommandNeedRetryException类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: executeQuery
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
/**
* Execute the give <i>query</i> on given <i>hiveDriver</i> instance. If a {@link CommandNeedRetryException}
* exception is thrown, it tries upto 3 times before returning failure.
* @param hiveDriver
* @param query
*/
public static void executeQuery(Driver hiveDriver, String query) {
CommandProcessorResponse response = null;
boolean failed = false;
int retryCount = 3;
try {
response = hiveDriver.run(query);
} catch(CommandNeedRetryException ex) {
if (--retryCount == 0) {
failed = true;
}
}
if (failed || response.getResponseCode() != 0 ) {
throw new RuntimeException(String.format("Failed to execute command '%s', errorMsg = '%s'",
query, (response != null ? response.getErrorMessage() : "")));
}
}
示例2: executeHQL
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
/**
*
* @param hql
* @throws CommandNeedRetryException
* @throws IOException
*/
public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
CommandProcessorResponse response = getDriver().run(hql);
int retCode = response.getResponseCode();
if (retCode != 0) {
String err = response.getErrorMessage();
throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err);
}
}
示例3: parseTest
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
@Test
public void parseTest()
throws CommandNeedRetryException, SemanticException, ParseException {
String hiveQl = "select t1.c2 from (select t2.column2 c2, t3.column3 from db1.table2 t2 join db2.table3 t3 on t2.x = t3.y) t1";
HiveViewDependency hiveViewDependency = new HiveViewDependency();
String[] result = hiveViewDependency.getViewDependency(hiveQl);
String[] expctedResult = new String[]{"db1.table2", "db2.table3"};
Assert.assertEquals(expctedResult, result);
}
示例4: listRecordsInTable
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
public static ArrayList<String> listRecordsInTable(Driver driver, String dbName, String tblName)
throws CommandNeedRetryException, IOException {
driver.run("select * from " + dbName + "." + tblName);
ArrayList<String> res = new ArrayList<String>();
driver.getResults(res);
return res;
}
示例5: listRecordsInPartition
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
public static ArrayList<String> listRecordsInPartition(Driver driver, String dbName,
String tblName, String continent,
String country)
throws CommandNeedRetryException, IOException {
driver.run("select * from " + dbName + "." + tblName + " where continent='"
+ continent + "' and country='" + country + "'");
ArrayList<String> res = new ArrayList<String>();
driver.getResults(res);
return res;
}
示例6: runDDL
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
private static boolean runDDL(Driver driver, String sql) throws QueryFailedException {
int retryCount = 1; // # of times to retry if first attempt fails
for (int attempt = 0; attempt <= retryCount; ++attempt) {
try {
driver.run(sql);
return true;
} catch (CommandNeedRetryException e) {
if (attempt == retryCount) {
throw new QueryFailedException(sql, e);
}
continue;
}
} // for
return false;
}
示例7: executeHQL
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
/**
* only used by Deploy Util
*/
@Override
public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
CommandProcessorResponse response = getDriver().run(hql);
int retCode = response.getResponseCode();
if (retCode != 0) {
String err = response.getErrorMessage();
throw new IOException("Failed to execute hql [" + hql + "], error message is: " + err);
}
}
示例8: executeSQL
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
private void executeSQL(String[] sqls) throws CommandNeedRetryException, IOException, SQLException {
Connection con = SqlUtil.getConnection(dbconf);
for (String sql : sqls) {
logger.info(String.format(sql));
SqlUtil.execUpdateSQL(con, sql);
}
DBUtils.closeQuietly(con);
}
示例9: testSingleWriterSimplePartitionedTable
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
@Test
public void testSingleWriterSimplePartitionedTable()
throws EventDeliveryException, IOException, CommandNeedRetryException {
int totalRecords = 4;
int batchSize = 2;
int batchCount = totalRecords / batchSize;
Context context = new Context();
context.put("hive.metastore", metaStoreURI);
context.put("hive.database",dbName);
context.put("hive.table",tblName);
context.put("hive.partition", PART1_VALUE + "," + PART2_VALUE);
context.put("autoCreatePartitions","false");
context.put("batchSize","" + batchSize);
context.put("serializer", HiveDelimitedTextSerializer.ALIAS);
context.put("serializer.fieldnames", COL1 + ",," + COL2 + ",");
context.put("heartBeatInterval", "0");
Channel channel = startSink(sink, context);
List<String> bodies = Lists.newArrayList();
// push the events in two batches
Transaction txn = channel.getTransaction();
txn.begin();
for (int j = 1; j <= totalRecords; j++) {
Event event = new SimpleEvent();
String body = j + ",blah,This is a log message,other stuff";
event.setBody(body.getBytes());
bodies.add(body);
channel.put(event);
}
// execute sink to process the events
txn.commit();
txn.close();
checkRecordCountInTable(0, dbName, tblName);
for (int i = 0; i < batchCount ; i++) {
sink.process();
}
sink.stop();
checkRecordCountInTable(totalRecords, dbName, tblName);
}
示例10: testHeartBeat
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
@Test
public void testHeartBeat()
throws EventDeliveryException, IOException, CommandNeedRetryException {
int batchSize = 2;
int batchCount = 3;
int totalRecords = batchCount * batchSize;
Context context = new Context();
context.put("hive.metastore", metaStoreURI);
context.put("hive.database", dbName);
context.put("hive.table", tblName);
context.put("hive.partition", PART1_VALUE + "," + PART2_VALUE);
context.put("autoCreatePartitions","true");
context.put("batchSize","" + batchSize);
context.put("serializer", HiveDelimitedTextSerializer.ALIAS);
context.put("serializer.fieldnames", COL1 + ",," + COL2 + ",");
context.put("hive.txnsPerBatchAsk", "20");
context.put("heartBeatInterval", "3"); // heartbeat in seconds
Channel channel = startSink(sink, context);
List<String> bodies = Lists.newArrayList();
// push the events in two batches
for (int i = 0; i < batchCount; i++) {
Transaction txn = channel.getTransaction();
txn.begin();
for (int j = 1; j <= batchSize; j++) {
Event event = new SimpleEvent();
String body = i * j + ",blah,This is a log message,other stuff";
event.setBody(body.getBytes());
bodies.add(body);
channel.put(event);
}
// execute sink to process the events
txn.commit();
txn.close();
sink.process();
sleep(3000); // allow heartbeat to happen
}
sink.stop();
checkRecordCountInTable(totalRecords, dbName, tblName);
}
示例11: checkRecordCountInTable
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
private void checkRecordCountInTable(int expectedCount, String db, String tbl)
throws CommandNeedRetryException, IOException {
int count = TestUtil.listRecordsInTable(driver, db, tbl).size();
Assert.assertEquals(expectedCount, count);
}
示例12: checkRecordCountInTable
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
private void checkRecordCountInTable(int expectedCount)
throws CommandNeedRetryException, IOException {
int count = TestUtil.listRecordsInTable(driver, dbName, tblName).size();
Assert.assertEquals(expectedCount, count);
}
示例13: executeQuery
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
/** Executes the passed query on the embedded metastore service. */
void executeQuery(String query) throws CommandNeedRetryException {
driver.run(query);
}
示例14: reCreateTestTable
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
private void reCreateTestTable() throws CommandNeedRetryException {
service.executeQuery("drop table " + TEST_TABLE);
service.executeQuery("create table " + TEST_TABLE + "(mycol1 string, mycol2 int)");
}
示例15: executeHQL
import org.apache.hadoop.hive.ql.CommandNeedRetryException; //导入依赖的package包/类
@Override
public void executeHQL(String hql) throws CommandNeedRetryException, IOException {
throw new UnsupportedOperationException();
}