本文整理汇总了Java中org.apache.hadoop.hive.cli.CliDriver类的典型用法代码示例。如果您正苦于以下问题:Java CliDriver类的具体用法?Java CliDriver怎么用?Java CliDriver使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
CliDriver类属于org.apache.hadoop.hive.cli包,在下文中一共展示了CliDriver类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: HiveExec
import org.apache.hadoop.hive.cli.CliDriver; //导入依赖的package包/类
/**
* HiveExec constructor
* @param config HDFS Connector configuration
*/
public HiveExec(HdfsSinkConnectorConfig config) {
hiveConf = new HiveConf();
String hiveConfDir = config.getString(HdfsSinkConnectorConfig.HIVE_CONF_DIR_CONFIG);
hiveConf.addResource(new Path(hiveConfDir, "hive-site.xml"));
SessionState.start(new CliSessionState(hiveConf));
cliDriver = new CliDriver();
}
示例2: getHiveQueryExecutor
import org.apache.hadoop.hive.cli.CliDriver; //导入依赖的package包/类
public static HiveQueryExecutor getHiveQueryExecutor() {
HiveQueryExecutorModule hqem = new HiveQueryExecutorModule();
try {
return new RealHiveQueryExecutor(hqem.provideHiveConf(),
hqem.provideCliSessionState(), new CliDriver());
} catch (Exception e) {
throw new RuntimeException(e);
}
}
示例3: main
import org.apache.hadoop.hive.cli.CliDriver; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
CliDriver.main(new String[] { "-f", "./src/test/java/test.hive" });
}
示例4: RealHiveQueryExecutor
import org.apache.hadoop.hive.cli.CliDriver; //导入依赖的package包/类
public RealHiveQueryExecutor(HiveConf hiveConf, CliSessionState ss,
CliDriver cli) throws Exception {
LOG.info("HiveConf = " + hiveConf);
LOG.info("According to the conf, we're talking to the Hive hosted at: "
+ HiveConf.getVar(hiveConf, METASTORECONNECTURLKEY));
// Expand out the hive aux jars since there was no shell script to do it
// for us
String orig = HiveConf.getVar(hiveConf, HIVEAUXJARS);
String expanded = HiveUtils.expandHiveAuxJarsPath(orig);
if (orig == null || orig.equals(expanded)) {
LOG.info("Hive aux jars variable not expanded");
} else {
LOG.info("Expanded aux jars variable from [" + orig + "] to [" + expanded
+ "]");
HiveConf.setVar(hiveConf, HIVEAUXJARS, expanded);
}
OptionsProcessor op = new OptionsProcessor();
if (!op.process_stage1(new String[] {})) {
throw new IllegalArgumentException("Can't process empty args?!?");
}
if (!ShimLoader.getHadoopShims().usesJobShell()) {
// hadoop-20 and above - we need to augment classpath using hiveconf
// components
// see also: code in ExecDriver.java
ClassLoader loader = hiveConf.getClassLoader();
String auxJars = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVEAUXJARS);
LOG.info("Got auxJars = " + auxJars);
if (StringUtils.isNotBlank(auxJars)) {
loader =
Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
}
hiveConf.setClassLoader(loader);
Thread.currentThread().setContextClassLoader(loader);
}
this.ss = ss;
LOG.info("SessionState = " + ss);
ss.out = System.out;
ss.err = System.err;
ss.in = System.in;
if (!op.process_stage2(ss)) {
throw new IllegalArgumentException(
"Can't process arguments from session state");
}
this.cli = cli;
LOG.info("Cli = " + cli);
}