当前位置: 首页>>代码示例>>Java>>正文


Java HiveConf.set方法代码示例

本文整理汇总了Java中org.apache.hadoop.hive.conf.HiveConf.set方法的典型用法代码示例。如果您正苦于以下问题:Java HiveConf.set方法的具体用法?Java HiveConf.set怎么用?Java HiveConf.set使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在org.apache.hadoop.hive.conf.HiveConf的用法示例。


在下文中一共展示了HiveConf.set方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: HiveMetaStore

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
public HiveMetaStore(Configuration conf, HdfsSinkConnectorConfig connectorConfig) throws HiveMetaStoreException {
  HiveConf hiveConf = new HiveConf(conf, HiveConf.class);
  String hiveConfDir = connectorConfig.getString(HdfsSinkConnectorConfig.HIVE_CONF_DIR_CONFIG);
  String hiveMetaStoreURIs = connectorConfig.getString(HdfsSinkConnectorConfig.HIVE_METASTORE_URIS_CONFIG);
  if (hiveMetaStoreURIs.isEmpty()) {
    log.warn("hive.metastore.uris empty, an embedded Hive metastore will be "
             + "created in the directory the connector is started. "
             + "You need to start Hive in that specific directory to query the data.");
  }
  if (!hiveConfDir.equals("")) {
    String hiveSitePath = hiveConfDir + "/hive-site.xml";
    File hiveSite = new File(hiveSitePath);
    if (!hiveSite.exists()) {
      log.warn("hive-site.xml does not exist in provided Hive configuration directory {}.", hiveConf);
    }
    hiveConf.addResource(new Path(hiveSitePath));
  }
  hiveConf.set("hive.metastore.uris", hiveMetaStoreURIs);
  try {
    client = HCatUtil.getHiveMetastoreClient(hiveConf);
  } catch (IOException | MetaException e) {
    throw new HiveMetaStoreException(e);
  }
}
 
开发者ID:jiangxiluning,项目名称:kafka-connect-hdfs,代码行数:25,代码来源:HiveMetaStore.java

示例2: prepHiveConfAndData

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
protected static void prepHiveConfAndData() throws Exception {
  hiveConf = new HiveConf();

  // Configure metastore persistence db location on local filesystem
  final String dbUrl = String.format("jdbc:derby:;databaseName=%s;create=true",  getTempDir("metastore_db"));
  hiveConf.set(ConfVars.METASTORECONNECTURLKEY.varname, dbUrl);

  hiveConf.set(ConfVars.SCRATCHDIR.varname, "file:///" + getTempDir("scratch_dir"));
  hiveConf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));

  // Set MiniDFS conf in HiveConf
  hiveConf.set(FS_DEFAULT_NAME_KEY, dfsConf.get(FS_DEFAULT_NAME_KEY));

  whDir = hiveConf.get(ConfVars.METASTOREWAREHOUSE.varname);
  FileSystem.mkdirs(fs, new Path(whDir), new FsPermission((short) 0777));

  studentData = getPhysicalFileFromResource("student.txt");
  voterData = getPhysicalFileFromResource("voter.txt");
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:20,代码来源:BaseTestHiveImpersonation.java

示例3: getAndSetDelegationToken

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
/**
 * Helper method that gets the delegation token using <i>processHiveClient</i> for given <i>proxyUserName</i>
 * and sets it in proxy user UserGroupInformation and proxy user HiveConf.
 */
protected static void getAndSetDelegationToken(final HiveConf proxyUserHiveConf, final UserGroupInformation proxyUGI,
    final String proxyUserName, final HiveClient processHiveClient) {
  checkNotNull(processHiveClient, "process user Hive client required");
  checkNotNull(proxyUserHiveConf, "Proxy user HiveConf required");
  checkNotNull(proxyUGI, "Proxy user UserGroupInformation required");
  checkArgument(!Strings.isNullOrEmpty(proxyUserName), "valid proxy username required");

  try {
    final String delegationToken = processHiveClient.getDelegationToken(proxyUserName);
    Utils.setTokenStr(proxyUGI, delegationToken, "DremioDelegationTokenForHiveMetaStoreServer");
    proxyUserHiveConf.set("hive.metastore.token.signature", "DremioDelegationTokenForHiveMetaStoreServer");
  } catch (Exception e) {
    throw new RuntimeException("Couldn't generate Hive metastore delegation token for user " + proxyUserName);
  }
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:20,代码来源:HiveClient.java

示例4: HiveLanguageParser

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
public HiveLanguageParser(HiveConf hiveConfiguration) {
  hiveConf = new HiveConf(hiveConfiguration);
  if (hiveConf.get(HDFS_SESSION_PATH_KEY) == null) {
    hiveConf.set(HDFS_SESSION_PATH_KEY, hdfsTemporaryDirectory(hiveConf));
  }
  if (hiveConf.get(LOCAL_SESSION_PATH_KEY) == null) {
    hiveConf.set(LOCAL_SESSION_PATH_KEY, localTemporaryDirectory());
  }
}
 
开发者ID:HotelsDotCom,项目名称:circus-train,代码行数:10,代码来源:HiveLanguageParser.java

示例5: HiveAuthorizationHelper

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) {
  authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED);
  if (!authzEnabled) {
    authorizerV2 = null;
    return;
  }

  try {
    final HiveConf hiveConfCopy = new HiveConf(hiveConf);
    hiveConfCopy.set("user.name", user);

    final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
    SessionState ss = new SessionState(hiveConfCopy, user);
    SessionState.start(ss);

    authenticator.setSessionState(ss);

    HiveAuthorizerFactory authorizerFactory =
        HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);

    HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
    authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Drill is emulating HS2 here

    authorizerV2 = authorizerFactory.createHiveAuthorizer(
        new HiveMetastoreClientFactory() {
          @Override
          public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException {
            return mClient;
          }
        },
        hiveConf, authenticator, authzContextBuilder.build());

    authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy);
  } catch (final HiveException e) {
    throw new DrillRuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e);
  }

  logger.trace("Hive authorization enabled");
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:41,代码来源:HiveAuthorizationHelper.java

示例6: getConf

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
private Configuration getConf(final Map<String, String> hiveConfigOverride) {
  final HiveConf hiveConf = new HiveConf();
  for(Entry<String, String> prop : hiveConfigOverride.entrySet()) {
    hiveConf.set(prop.getKey(), prop.getValue());
  }

  return hiveConf;
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:9,代码来源:HiveDrillNativeScanBatchCreator.java

示例7: HiveSchemaFactory

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
public HiveSchemaFactory(HiveStoragePlugin plugin, String name, Map<String, String> hiveConfigOverride) throws ExecutionSetupException {
  this.schemaName = name;
  this.plugin = plugin;

  this.hiveConfigOverride = hiveConfigOverride;
  hiveConf = new HiveConf();
  if (hiveConfigOverride != null) {
    for (Map.Entry<String, String> entry : hiveConfigOverride.entrySet()) {
      final String property = entry.getKey();
      final String value = entry.getValue();
      hiveConf.set(property, value);
      logger.trace("HiveConfig Override {}={}", property, value);
    }
  }

  isHS2DoAsSet = hiveConf.getBoolVar(ConfVars.HIVE_SERVER2_ENABLE_DOAS);
  isDrillImpersonationEnabled = plugin.getContext().getConfig().getBoolean(ExecConstants.IMPERSONATION_ENABLED);

  try {
    processUserMetastoreClient =
        DrillHiveMetaStoreClient.createNonCloseableClientWithCaching(hiveConf, hiveConfigOverride);
  } catch (MetaException e) {
    throw new ExecutionSetupException("Failure setting up Hive metastore client.", e);
  }
}
 
开发者ID:skhalifa,项目名称:QDrill,代码行数:26,代码来源:HiveSchemaFactory.java

示例8: createHiveConf

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
private static HiveConf createHiveConf(final Map<String, String> hiveConfigOverride) {
  final HiveConf hiveConf = new HiveConf();
  for(Entry<String, String> config : hiveConfigOverride.entrySet()) {
    final String key = config.getKey();
    final String value = config.getValue();
    hiveConf.set(key, value);
    if(logger.isTraceEnabled()){
      logger.trace("HiveConfig Override {}={}", key, value);
    }
  }
  return hiveConf;
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:13,代码来源:HiveStoragePlugin.java

示例9: HiveAuthorizationHelper

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
public HiveAuthorizationHelper(final IMetaStoreClient mClient, final HiveConf hiveConf, final String user) {
  authzEnabled = hiveConf.getBoolVar(ConfVars.HIVE_AUTHORIZATION_ENABLED);
  if (!authzEnabled) {
    authorizerV2 = null;
    return;
  }

  try {
    final HiveConf hiveConfCopy = new HiveConf(hiveConf);
    hiveConfCopy.set("user.name", user);

    final HiveAuthenticationProvider authenticator = HiveUtils.getAuthenticator(hiveConfCopy,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER);
    SessionState ss = new SessionState(hiveConfCopy, user);
    SessionState.start(ss);

    authenticator.setSessionState(ss);

    HiveAuthorizerFactory authorizerFactory =
        HiveUtils.getAuthorizerFactory(hiveConfCopy, HiveConf.ConfVars.HIVE_AUTHORIZATION_MANAGER);

    HiveAuthzSessionContext.Builder authzContextBuilder = new HiveAuthzSessionContext.Builder();
    authzContextBuilder.setClientType(CLIENT_TYPE.HIVESERVER2); // Dremio is emulating HS2 here

    authorizerV2 = authorizerFactory.createHiveAuthorizer(
        new HiveMetastoreClientFactory() {
          @Override
          public IMetaStoreClient getHiveMetastoreClient() throws HiveAuthzPluginException {
            return mClient;
          }
        },
        hiveConf, authenticator, authzContextBuilder.build());

    authorizerV2.applyAuthorizationConfigPolicy(hiveConfCopy);
  } catch (final HiveException e) {
    throw new RuntimeException("Failed to initialize Hive authorization components: " + e.getMessage(), e);
  }

  logger.trace("Hive authorization enabled");
}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:41,代码来源:HiveAuthorizationHelper.java

示例10: newHiveConf

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
private HiveConf newHiveConf() {
  HiveConf conf = new HiveConf(SessionState.class);

  conf.set(ConfVars.METASTORECONNECTURLKEY.varname, String.format("jdbc:derby:;databaseName=%s;create=true", dbDir));
  conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
  conf.set(ConfVars.METASTOREWAREHOUSE.varname, whDir);
  conf.set("mapred.job.tracker", "local");
  conf.set(ConfVars.SCRATCHDIR.varname,  getTempDir("scratch_dir"));
  conf.set(ConfVars.LOCALSCRATCHDIR.varname, getTempDir("local_scratch_dir"));
  conf.set(ConfVars.DYNAMICPARTITIONINGMODE.varname, "nonstrict");

  return conf;

}
 
开发者ID:dremio,项目名称:dremio-oss,代码行数:15,代码来源:HiveTestDataGenerator.java

示例11: setConfValues

import org.apache.hadoop.hive.conf.HiveConf; //导入方法依赖的package包/类
/**
 * Set up the configuration so it will use the DbTxnManager, concurrency will be set to true,
 * and the JDBC configs will be set for putting the transaction and lock info in the embedded
 * metastore.
 * @param conf HiveConf to add these values to.
 */
public static void setConfValues(HiveConf conf) {
  conf.setVar(HiveConf.ConfVars.HIVE_TXN_MANAGER, txnMgr);
  conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, true);
  conf.set("fs.raw.impl", RawFileSystem.class.getName());
}
 
开发者ID:moueimei,项目名称:flume-release-1.7.0,代码行数:12,代码来源:TestUtil.java


注:本文中的org.apache.hadoop.hive.conf.HiveConf.set方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。