本文整理汇总了Java中org.apache.hadoop.hive.ql.metadata.Hive.get方法的典型用法代码示例。如果您正苦于以下问题:Java Hive.get方法的具体用法?Java Hive.get怎么用?Java Hive.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.ql.metadata.Hive
的用法示例。
在下文中一共展示了Hive.get方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: beforeTest
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
/**
* Before test.
*
* @throws Exception the exception
*/
@BeforeTest
public void beforeTest() throws Exception {
// Check if hadoop property set
System.out.println("###HADOOP_PATH " + System.getProperty("hadoop.bin.path"));
assertNotNull(System.getProperty("hadoop.bin.path"));
createDriver();
ss = new SessionState(hiveConf, "testuser");
SessionState.start(ss);
Hive client = Hive.get(hiveConf);
Database database = new Database();
database.setName(dataBase);
client.createDatabase(database, true);
SessionState.get().setCurrentDatabase(dataBase);
sessionid = SessionState.get().getSessionId();
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, false);
QueryContext context = createContext("USE " + dataBase, this.queryConf);
driver.execute(context);
driverConf.setBoolean(LensConfConstants.QUERY_ADD_INSERT_OVEWRITE, true);
driverConf.setBoolean(LensConfConstants.QUERY_PERSISTENT_RESULT_INDRIVER, true);
}
示例2: initialize
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
/**
* Initialize.
*
* @param conf the conf
*/
public void initialize(HiveConf conf) {
String temp = conf.get(LensConfConstants.STATISTICS_WAREHOUSE_KEY, LensConfConstants.DEFAULT_STATISTICS_WAREHOUSE);
warehousePath = new Path(temp);
database = conf.get(LensConfConstants.STATISTICS_DATABASE_KEY, LensConfConstants.DEFAULT_STATISTICS_DATABASE);
this.conf = conf;
try {
client = Hive.get(conf);
} catch (Exception e) {
LOG.error("Unable to connect to hive metastore", e);
throw new IllegalArgumentException("Unable to connect to hive metastore", e);
}
}
示例3: tearDown
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
@AfterClass
public void tearDown() throws Exception {
Hive hive = Hive.get(conf);
for (String db : testDatabases) {
hive.dropDatabase(db, true, true);
}
}
示例4: createTempMetastoreTable
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
/**
* Creates the temp metastore table.
*
* @param dataLocation the data location
* @param metadata the metadata
* @return the string
* @throws HiveException the hive exception
*/
protected String createTempMetastoreTable(String dataLocation, QueryResultSetMetadata metadata) throws HiveException {
String tableName = "lens_rdd_" + UUID.randomUUID().toString().replace("-", "_");
Hive hiveClient = Hive.get(HIVE_CONF);
Table tbl = hiveClient.newTable("default." + tableName);
tbl.setTableType(TableType.MANAGED_TABLE);
tbl.setInputFormatClass(INPUT_FORMAT);
// String outputFormat = null;
// tbl.setOutputFormatClass(outputFormat);
// Add columns
for (ResultColumn rc : metadata.getColumns()) {
tbl.getCols().add(new FieldSchema(rc.getName(), toHiveType(rc.getType()), "default"));
System.out.println("@@@@ COL " + rc.getName() + " TYPE " + toHiveType(rc.getType()));
}
tbl.getPartCols().add(new FieldSchema(TEMP_TABLE_PART_COL, "string", "default"));
hiveClient.createTable(tbl);
log.info("Table {} created", tableName);
// Add partition to the table
AddPartitionDesc partitionDesc = new AddPartitionDesc("default", tableName, false);
Map<String, String> partSpec = new HashMap<String, String>();
partSpec.put(TEMP_TABLE_PART_COL, TEMP_TABLE_PART_VAL);
partitionDesc.addPartition(partSpec, dataLocation);
hiveClient.createPartitions(partitionDesc);
log.info("Created partition in {} for data in {}", tableName, dataLocation);
return tableName;
}
示例5: deleteTempTable
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
/**
* Delete temp table. This should be done to release underlying temp table.
*
* @throws LensException the lens exception
*/
public void deleteTempTable() throws LensException {
Hive hiveClient = null;
try {
hiveClient = Hive.get(HIVE_CONF);
hiveClient.dropTable("default." + tempTableName);
log.info("Dropped temp table {}", tempTableName);
} catch (HiveException e) {
throw new LensException(e);
}
}
示例6: setUp
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
@BeforeTest
public void setUp() throws Exception {
super.setUp();
Hive hive = Hive.get(new HiveConf());
Database db = new Database();
db.setName(TEST_DB);
hive.createDatabase(db, true);
LensClientConfig lensClientConfig = new LensClientConfig();
lensClientConfig.setLensDatabase(TEST_DB);
lensClientConfig.set(LensConfConstants.SERVER_BASE_URL,
"http://localhost:" + getTestPort() + "/lensapi");
LensClient client = new LensClient(lensClientConfig);
mlClient = new LensMLClient(client);
}
示例7: tearDown
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
@AfterTest
public void tearDown() throws Exception {
super.tearDown();
Hive hive = Hive.get(new HiveConf());
try {
hive.dropDatabase(TEST_DB);
} catch (Exception exc) {
// Ignore drop db exception
log.error("Exception while dropping database.", exc);
}
mlClient.close();
}
示例8: tearDown
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
@AfterTest
public void tearDown() throws Exception {
super.tearDown();
Hive hive = Hive.get(new HiveConf());
hive.dropDatabase(TEST_DB);
mlClient.close();
}
示例9: setup
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
/**
* Setup.
*
* @throws AlreadyExistsException the already exists exception
* @throws HiveException the hive exception
* @throws IOException Signals that an I/O exception has occurred.
*/
@BeforeTest
public void setup() throws AlreadyExistsException, HiveException, IOException {
SessionState.start(conf);
Hive client = Hive.get(conf);
Database database = new Database();
database.setName(TestDBStorage.class.getSimpleName());
client.createDatabase(database);
SessionState.get().setCurrentDatabase(TestDBStorage.class.getSimpleName());
}
示例10: HiveMetadata
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
public HiveMetadata(HiveConf conf) throws SqlXlateException {
try {
// prevent from calling
this.conf = conf;
db = Hive.get(conf);
tblRRMap = new HashMap<String, RowResolver>();
} catch (HiveException e) {
throw new SqlXlateException(null, "HiveException thrown : " + e);
}
}
示例11: createTestDatabaseResources
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
public static void createTestDatabaseResources(String[] testDatabases, HiveConf conf) throws Exception {
File srcJarDir = new File("target/testjars/");
if (!srcJarDir.exists()) {
// nothing to setup
return;
}
File resDir = new File("target/resources");
if (!resDir.exists()) {
resDir.mkdir();
}
// Create databases and resource dirs
Hive hive = Hive.get(conf);
File testJarFile = new File("target/testjars/test.jar");
File serdeJarFile = new File("target/testjars/serde.jar");
for (String db : testDatabases) {
Database database = new Database();
database.setName(db);
hive.createDatabase(database, true);
File dbDir = new File(resDir, db);
if (!dbDir.exists()) {
dbDir.mkdir();
}
// Add a jar in the directory
try {
String[] jarOrder = {
"x_" + db + ".jar",
"y_" + db + ".jar",
"z_" + db + ".jar",
"serde.jar",
};
// Jar order is -> z, y, x, File listing order is x, y, z
// We are explicitly specifying jar order
FileUtils.writeLines(new File(dbDir, "jar_order"), Arrays.asList(jarOrder[2], jarOrder[1],
jarOrder[0], jarOrder[3]));
FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[0]));
FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[1]));
FileUtils.copyFile(testJarFile, new File(dbDir, jarOrder[2]));
FileUtils.copyFile(serdeJarFile, new File(dbDir, jarOrder[3]));
} catch (FileNotFoundException fnf) {
log.error("File not found.", fnf);
}
}
}
示例12: tearDown
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
@AfterClass
public void tearDown() throws Exception {
Hive hive = Hive.get(conf);
hive.dropDatabase(DB1, true, true);
sessionService.stop();
}
示例13: testOpenSessionWithDatabase
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
@Test(dataProvider = "mediaTypeData")
public void testOpenSessionWithDatabase(MediaType mt) throws Exception {
// TEST1 - Check if call with database parameter sets current database
// Create the test DB
Hive hive = Hive.get(new HiveConf());
final String testDbName = TestSessionResource.class.getSimpleName();
Database testOpenDb = new Database();
testOpenDb.setName(testDbName);
hive.createDatabase(testOpenDb, true);
final WebTarget target = target().path("session");
final FormDataMultiPart mp = new FormDataMultiPart();
mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "foo"));
mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("database").build(), testDbName));
mp.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
new LensConf(), mt));
final LensSessionHandle handle = target.request(mt).post(Entity.entity(mp, MediaType.MULTIPART_FORM_DATA_TYPE),
LensSessionHandle.class);
Assert.assertNotNull(handle);
// Check if DB set in session service.
HiveSessionService service = LensServices.get().getService(SessionService.NAME);
LensSessionImpl session = service.getSession(handle);
Assert.assertEquals(session.getCurrentDatabase(), testDbName, "Expected current DB to be set to " + testDbName);
APIResult result = target.queryParam("sessionid", handle).request().delete(APIResult.class);
Assert.assertEquals(result.getStatus(), APIResult.Status.SUCCEEDED);
// TEST 2 - Try set database with invalid db name
final String invalidDB = testDbName + "_invalid_db";
final FormDataMultiPart form2 = new FormDataMultiPart();
form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("username").build(), "foo"));
form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("password").build(), "bar"));
form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("database").build(), invalidDB));
form2.bodyPart(new FormDataBodyPart(FormDataContentDisposition.name("sessionconf").fileName("sessionconf").build(),
new LensConf(), mt));
try {
final LensSessionHandle handle2 = target.request(mt).post(Entity.entity(form2,
MediaType.MULTIPART_FORM_DATA_TYPE), LensSessionHandle.class);
Assert.fail("Expected above call to fail with not found exception");
} catch (NotFoundException nfe) {
// PASS
}
}
示例14: getClient
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
private Hive getClient() throws HiveException {
return Hive.get(config);
}
示例15: dropSources
import org.apache.hadoop.hive.ql.metadata.Hive; //导入方法依赖的package包/类
public void dropSources(HiveConf conf, String dbName) throws Exception {
Hive metastore = Hive.get(conf);
metastore.dropDatabase(dbName, true, true, true);
}