本文整理匯總了Java中org.apache.hadoop.hive.conf.HiveConf.set方法的典型用法代碼示例。如果您正苦於以下問題:Java HiveConf.set方法的具體用法?Java HiveConf.set怎麽用?Java HiveConf.set使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類org.apache.hadoop.hive.conf.HiveConf
的用法示例。
在下文中一共展示了HiveConf.set方法的11個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: HiveMetaStore
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
public HiveMetaStore(Configuration conf, HdfsSinkConnectorConfig connectorConfig) throws HiveMetaStoreException {
HiveConf hiveConf = new HiveConf(conf, HiveConf.class);
String hiveConfDir = connectorConfig.getString(HdfsSinkConnectorConfig.HIVE_CONF_DIR_CONFIG);
String hiveMetaStoreURIs = connectorConfig.getString(HdfsSinkConnectorConfig.HIVE_METASTORE_URIS_CONFIG);
if (hiveMetaStoreURIs.isEmpty()) {
log.warn("hive.metastore.uris empty, an embedded Hive metastore will be "
+ "created in the directory the connector is started. "
+ "You need to start Hive in that specific directory to query the data.");
}
if (!hiveConfDir.equals("")) {
String hiveSitePath = hiveConfDir + "/hive-site.xml";
File hiveSite = new File(hiveSitePath);
if (!hiveSite.exists()) {
log.warn("hive-site.xml does not exist in provided Hive configuration directory {}.", hiveConf);
}
hiveConf.addResource(new Path(hiveSitePath));
}
hiveConf.set("hive.metastore.uris", hiveMetaStoreURIs);
try {
client = HCatUtil.getHiveMetastoreClient(hiveConf);
} catch (IOException | MetaException e) {
throw new HiveMetaStoreException(e);
}
}
示例2: prepHiveConfAndData
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
protected static void prepHiveConfAndData() throws Exception {
hiveConf = new HiveConf();
// Configure metastore persistence db location on local filesystem
final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true", getTempDir("metastore_db"));
hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl);
hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir"));
hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
// Set MiniDFS conf in HiveConf
hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));
whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));
studentData = getPhysicalFileFromResource("student.txt");
voterData = getPhysicalFileFromResource("voter.txt");
}
示例3: getAndSetDelegationToken
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
/**
* Helper method that gets the delegation token using <i>processHiveClient</i> for given <i>proxyUserName</i>
* and sets it in proxy user UserGroupInformation and proxy user HiveConf.
*/
protected static void getAndSetDelegationToken(final HiveConf proxyUserHiveConf, final UserGroupInformation proxyUGI,
final String proxyUserName, final HiveClient processHiveClient) {
checkNotNull(processHiveClient, "process user Hive client required");
checkNotNull(proxyUserHiveConf, "Proxy user HiveConf required");
checkNotNull(proxyUGI, "Proxy user UserGroupInformation required");
checkArgument(!Strings.isNullOrEmpty(proxyUserName), "valid proxy username required");
try {
final String delegationToken = processHiveClient.getDelegationToken(proxyUserName);
Utils.setTokenStr(proxyUGI, delegationToken, "DremioDelegationTokenForHiveMetaStoreServer");
proxyUserHiveConf.set("hive.metastore.token.signature", "DremioDelegationTokenForHiveMetaStoreServer");
} catch (Exception e) {
throw new RuntimeException("Couldn't generate Hive metastore delegation token for user " + proxyUserName);
}
}
示例4: HiveLanguageParser
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
public HiveLanguageParser(HiveConf hiveConfiguration) {
hiveConf = new HiveConf(hiveConfiguration);
if (hiveConf.get(HDFS_SESSION_PATH_KEY) == null) {
hiveConf.set(HDFS_SESSION_PATH_KEY, hdfsTemporaryDirectory(hiveConf));
}
if (hiveConf.get(LOCAL_SESSION_PATH_KEY) == null) {
hiveConf.set(LOCAL_SESSION_PATH_KEY, localTemporaryDirectory());
}
}
示例5: HiveAuthorizationHelper
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) {
authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED);
if (!authzEnabled) {
authorizerV2 = null;
return;
}
try {
final HiveConf hiveConfCopy = new HiveConf(hiveConf);
hiveConfCopy.set("user.name", user);
final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy,
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
SessionState ss = new SessionState(hiveConfCopy, user);
SessionState.start(ss);
authenticator.setSessionState(ss);
HiveAuthorizerFactory authorizerFactory =
HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Drill is emulating HS2 here
authorizerV2 = authorizerFactory.createHiveAuthorizer(
new HiveMetastoreClientFactory() {
@Override
public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException {
return mClient;
}
},
hiveConf, authenticator, authzContextBuilder.build());
authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy);
} catch (final HiveException e) {
throw new DrillRuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e);
}
logger.trace("Hive authorization enabled");
}
示例6: getConf
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
private Configuration getConf(final Map<String, String> hiveConfigOverride) {
final HiveConf hiveConf = new HiveConf();
for(Entry<String, String> prop : hiveConfigOverride.entrySet()) {
hiveConf.set(prop.getKey(), prop.getValue());
}
return hiveConf;
}
示例7: HiveSchemaFactory
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
public HiveSchemaFactory(HiveStoragePlugin plugin, String name, Map<String, String> hiveConfigOverride) throws ExecutionSetupException {
this.schemaName = name;
this.plugin = plugin;
this.hiveConfigOverride = hiveConfigOverride;
hiveConf = new HiveConf();
if (hiveConfigOverride != null) {
for (Map.Entry<String, String> entry : hiveConfigOverride.entrySet()) {
final String property = entry.getKey();
final String value = entry.getValue();
hiveConf.set(property, value);
logger.trace("HiveConfig Override {}={}", property, value);
}
}
isHS2DoAsSet = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
isDrillImpersonationEnabled = plugin.getContext().getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED);
try {
processUserMetastoreClient =
DrillHiveMetaStoreClient.createNonCloseableClientWithCaching(hiveConf, hiveConfigOverride);
} catch (MetaException e) {
throw new ExecutionSetupException("Failure setting up Hive metastore client.", e);
}
}
示例8: createHiveConf
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
private static HiveConf createHiveConf(final Map<String, String> hiveConfigOverride) {
final HiveConf hiveConf = new HiveConf();
for(Entry<String, String> config : hiveConfigOverride.entrySet()) {
final String key = config.getKey();
final String value = config.getValue();
hiveConf.set(key, value);
if(logger.isTraceEnabled()){
logger.trace("HiveConfig Override {}={}", key, value);
}
}
return hiveConf;
}
示例9: HiveAuthorizationHelper
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) {
authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED);
if (!authzEnabled) {
authorizerV2 = null;
return;
}
try {
final HiveConf hiveConfCopy = new HiveConf(hiveConf);
hiveConfCopy.set("user.name", user);
final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy,
HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
SessionState ss = new SessionState(hiveConfCopy, user);
SessionState.start(ss);
authenticator.setSessionState(ss);
HiveAuthorizerFactory authorizerFactory =
HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);
HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Dremio is emulating HS2 here
authorizerV2 = authorizerFactory.createHiveAuthorizer(
new HiveMetastoreClientFactory() {
@Override
public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException {
return mClient;
}
},
hiveConf, authenticator, authzContextBuilder.build());
authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy);
} catch (final HiveException e) {
throw new RuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e);
}
logger.trace("Hive authorization enabled");
}
示例10: newHiveConf
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
private HiveConf newHiveConf() {
HiveConf conf = new HiveConf(SessionState.class);
conf.set(ConfVars.METASTORECONNECTURLKEY.varname, String.format("jdbc:derby:;databaseName=%s;create=true", dbDir));
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
conf.set(ConfVars.METASTOREWAREHOUSE.varname, whDir);
conf.set("mapred.job.tracker", "local");
conf.set(ConfVars.SCRATCHDIR.varname, getTempDir("scratch_dir"));
conf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
conf.set(ConfVars.DYNAMICPARTITIONINGMODE.varname, "nonstrict");
return conf;
}
示例11: setConfValues
import org.apache.hadoop.hive.conf.HiveConf; //導入方法依賴的package包/類
/**
* Set up the configuration so it will use the DbTxnManager, concurrency will be set to true,
* and the JDBC configs will be set for putting the transaction and lock info in the embedded
* metastore.
* @param conf HiveConf to add these values to.
*/
public static void setConfValues(HiveConf conf) {
conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr);
conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
conf.set("fs.raw.impl", RawFileSystem.class.getName());
}