本文整理汇总了Java中org.apache.hadoop.hive.conf.HiveConf.setVar方法的典型用法代码示例。如果您正苦于以下问题:Java HiveConf.setVar方法的具体用法?Java HiveConf.setVar怎么用?Java HiveConf.setVar使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.hive.conf.HiveConf
的用法示例。
在下文中一共展示了HiveConf.setVar方法的8个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: get
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
@Override
public CloseableMetaStoreClient get() {
LOG.debug("Creating tunnel: {}:? -> {} -> {}:{}", localHost, sshRoute, remoteHost, remotePort);
try {
TunnelConnectionManager tunnelConnectionManager = tunnelConnectionManagerFactory.create(sshRoute, localHost,
FIRST_AVAILABLE_PORT, remoteHost, remotePort);
int localPort = tunnelConnectionManager.getTunnel(remoteHost, remotePort).getAssignedLocalPort();
tunnelConnectionManager.open();
LOG.debug("Tunnel created: {}:{} -> {} -> {}:{}", localHost, localPort, sshRoute, remoteHost, remotePort);
localPort = tunnelConnectionManager.getTunnel(remoteHost, remotePort).getAssignedLocalPort();
HiveConf localHiveConf = new HiveConf(hiveConf);
String proxyMetaStoreUris = "thrift://" + localHost + ":" + localPort;
localHiveConf.setVar(ConfVars.METASTOREURIS, proxyMetaStoreUris);
LOG.info("Metastore URI {} is being proxied to {}", hiveConf.getVar(ConfVars.METASTOREURIS), proxyMetaStoreUris);
InvocationHandler handler = new TunnellingMetaStoreClientInvocationHandler(
metaStoreClientFactory.newInstance(localHiveConf, name), tunnelConnectionManager);
return (CloseableMetaStoreClient) Proxy.newProxyInstance(getClass().getClassLoader(), INTERFACES, handler);
} catch (Exception e) {
String message = String.format("Unable to establish SSH tunnel: '%s:?' -> '%s' -> '%s:%s'", localHost, sshRoute,
remoteHost, remotePort);
throw new MetaStoreClientException(message, e);
}
}
示例2: TestHiveWriter
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
public TestHiveWriter() throws Exception {
partVals = new ArrayList<String>(2);
partVals.add(PART1_VALUE);
partVals.add(PART2_VALUE);
metaStoreURI = null;
int callTimeoutPoolSize = 1;
callTimeoutPool = Executors.newFixedThreadPool(callTimeoutPoolSize,
new ThreadFactoryBuilder().setNameFormat("hiveWriterTest").build());
// 1) Start metastore
conf = new HiveConf(this.getClass());
TestUtil.setConfValues(conf);
if (metaStoreURI != null) {
conf.setVar(HiveConf.ConfVars.METASTOREURIS, metaStoreURI);
}
// 2) Setup Hive client
SessionState.start(new CliSessionState(conf));
driver = new Driver(conf);
}
示例3: prepare
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
@Before
public void prepare() throws Exception {
when(metaStoreClientSupplier.get()).thenReturn(mockMetaStoreClient);
when(replicaCatalog.getName()).thenReturn(NAME);
hiveConf = new HiveConf();
hiveConf.setVar(ConfVars.METASTOREURIS, REPLICA_META_STORE_URIS);
replica = newReplica(ReplicationMode.FULL);
tableLocation = temporaryFolder.newFolder("table_location").toURI().toString();
sourceTable = newTable();
existingPartition = newPartition("one", "two");
ColumnStatisticsObj columnStatisticsObj1 = new ColumnStatisticsObj(COLUMN_A, "string",
new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(0, 1)));
ColumnStatisticsObj columnStatisticsObj2 = new ColumnStatisticsObj(COLUMN_B, "string",
new ColumnStatisticsData(_Fields.LONG_STATS, new LongColumnStatsData(1, 2)));
columnStatisticsObjs = Arrays.asList(columnStatisticsObj1, columnStatisticsObj2);
ColumnStatisticsDesc statsDesc = new ColumnStatisticsDesc(true, DB_NAME, TABLE_NAME);
columnStatistics = new ColumnStatistics(statsDesc, columnStatisticsObjs);
tableAndStatistics = new TableAndStatistics(sourceTable, columnStatistics);
existingReplicaTable = new Table(sourceTable);
when(mockReplicaLocationManager.getTableLocation()).thenReturn(new Path(tableLocation));
when(mockReplicaLocationManager.getPartitionBaseLocation()).thenReturn(new Path(tableLocation));
when(mockMetaStoreClient.getTable(DB_NAME, TABLE_NAME)).thenReturn(existingReplicaTable);
}
示例4: createLocalHiveConf
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
private HiveConf createLocalHiveConf(
TunnelConnectionManager tunnelConnectionManager,
String localHost,
String remoteHost,
int remotePort,
HiveConf hiveConf) {
int localPort = tunnelConnectionManager.getTunnel(remoteHost, remotePort).getAssignedLocalPort();
String proxyMetaStoreUris = "thrift://" + localHost + ":" + localPort;
HiveConf localHiveConf = new HiveConf(hiveConf);
localHiveConf.setVar(HiveConf.ConfVars.METASTOREURIS, proxyMetaStoreUris);
return localHiveConf;
}
示例5: applyAuthorizationConfigPolicy
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
@Override
public void applyAuthorizationConfigPolicy(HiveConf hiveConf) throws HiveAuthzPluginException {
super.applyAuthorizationConfigPolicy(hiveConf);
hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_TABLE_OWNER_GRANTS, "");
}
示例6: newInstanceCannotConnectThrowsMetaStoreClientException
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
@Test(expected = MetaStoreClientException.class)
public void newInstanceCannotConnectThrowsMetaStoreClientException() throws Exception {
HiveConf conf = new HiveConf();
conf.setVar(ConfVars.METASTOREURIS, "thrift://ghost:1234");
factory.newInstance(conf, "name");
}
示例7: getWaggleDanceClient
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
private HiveMetaStoreClient getWaggleDanceClient() throws MetaException {
HiveConf conf = new HiveConf();
conf.setVar(ConfVars.METASTOREURIS, getWaggleDanceThriftUri());
conf.setBoolVar(ConfVars.METASTORE_EXECUTE_SET_UGI, true);
return new HiveMetaStoreClient(conf);
}
示例8: setConfValues
import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
/**
* Set up the configuration so it will use the DbTxnManager, concurrency will be set to true,
* and the JDBC configs will be set for putting the transaction and lock info in the embedded
* metastore.
* @param conf HiveConf to add these values to.
*/
public static void setConfValues(HiveConf conf) {
conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr);
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
conf.set("fs.raw.impl", RawFileSystem.class.getName());
}