本文整理汇总了Scala中org.apache.hadoop.hive.conf.HiveConf类的典型用法代码示例。如果您正苦于以下问题:Scala HiveConf类的具体用法?Scala HiveConf怎么用?Scala HiveConf使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了HiveConf类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: HiveConfig
//设置package包名称以及导入依赖的类
package io.eels.component.hive
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.{FileSystem, Path}
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient
object HiveConfig {
implicit val conf = new Configuration
conf.addResource(new Path("/home/sam/development/hadoop-2.7.2/etc/hadoop/core-site.xml"))
conf.addResource(new Path("/home/sam/development/hadoop-2.7.2/etc/hadoop/hdfs-site.xml"))
conf.reloadConfiguration()
implicit val fs = FileSystem.get(conf)
implicit val hiveConf = new HiveConf()
hiveConf.addResource(new Path("/home/sam/development/hive-2.1.0-bin/conf/hive-site.xml"))
hiveConf.reloadConfiguration()
implicit val client = new HiveMetaStoreClient(hiveConf)
}
示例2: FetchSpecMain
//设置package包名称以及导入依赖的类
package io.eels.cli
import java.io.PrintStream
import io.eels.{Constants, SourceParser}
import io.eels.component.hive.{HiveSource, HiveSpec}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hive.conf.HiveConf
object FetchSpecMain {
implicit val fs = FileSystem.get(new Configuration)
implicit val hiveConf = new HiveConf
def apply(args: Seq[String], out: PrintStream = System.out): Unit = {
val parser = new scopt.OptionParser[Options]("eel") {
head("eel fetch-spec", Constants.EelVersion)
opt[String]("dataset") required() action { (source, o) =>
o.copy(source = source)
} text "specify dataset, eg hive:database:table"
}
parser.parse(args, Options()) match {
case Some(options) =>
val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}"))
val source = builder()
source match {
case hive: HiveSource =>
val spec = hive.spec
val json = HiveSpec.writeAsJson(spec.copy(tables = spec.tables.filter(_.tableName == hive.tableName)))
println(json)
case _ =>
sys.error(s"Unsupported source $source")
}
case _ =>
}
}
case class Options(source: String = null)
}
示例3: Main
//设置package包名称以及导入依赖的类
package io.eels.cli
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hive.conf.HiveConf
object Main extends App {
implicit val fs = FileSystem.get(new Configuration)
implicit val hiveConf = new HiveConf
// the first parameter determines the command to run, just like in git, eg git pull, or in hadoop, eg hadoop fs
val command = args.head
val params = args.tail
command match {
case "schema" => ShowSchemaMain(params)
case "stream" => StreamMain(params)
case "apply-spec" => ApplySpecMain(params)
case "fetch-spec" => FetchSpecMain(params)
case "analyze" => AnalyzeMain(params)
case other => System.err.println(s"Unknown command $other")
}
}
case class Options(from: String = "", to: String = "", workerThreads: Int = 1, sourceIOThreads: Int = 1)
示例4: ShowSchemaMain
//设置package包名称以及导入依赖的类
package io.eels.cli
import java.io.PrintStream
import io.eels.{Constants, SourceParser}
import io.eels.component.avro.AvroSchemaFn
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hive.conf.HiveConf
object ShowSchemaMain {
implicit val fs = FileSystem.get(new Configuration)
implicit val hiveConf = new HiveConf
def apply(args: Seq[String], out: PrintStream = System.out): Unit = {
val parser = new scopt.OptionParser[Options]("eel") {
head("eel schema", Constants.EelVersion)
opt[String]("source") required() action { (source, o) =>
o.copy(source = source)
} text "specify source, eg hive:database:table or parquet:/path/to/file"
}
parser.parse(args, Options()) match {
case Some(options) =>
val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}"))
val source = builder()
val schema = source.schema
val avroSchema = AvroSchemaFn.toAvro(schema)
out.println(avroSchema)
case _ =>
}
}
case class Options(source: String = "")
}
示例5: ApplySpecMain
//设置package包名称以及导入依赖的类
package io.eels.cli
import java.io.PrintStream
import java.nio.file.{Path, Paths}
import io.eels.{Constants, SourceParser}
import io.eels.component.hive.{HiveOps, HiveSource, HiveSpec}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.FileSystem
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.metastore.HiveMetaStoreClient
object ApplySpecMain {
implicit val fs = FileSystem.get(new Configuration)
implicit val hiveConf = new HiveConf
implicit val client = new HiveMetaStoreClient(hiveConf)
def apply(args: Seq[String], out: PrintStream = System.out): Unit = {
val parser = new scopt.OptionParser[Options]("eel") {
head("eel apply-spec", Constants.EelVersion)
opt[String]("dataset") required() action { (source, o) =>
o.copy(source = source)
} text "specify dataset, eg hive:database:table"
opt[String]("spec") required() action { (schema, o) =>
o.copy(specPath = Paths.get(schema))
} text "specify path to eel spec"
}
parser.parse(args, Options()) match {
case Some(options) =>
val builder = SourceParser(options.source).getOrElse(sys.error(s"Unsupported source ${options.source}"))
val source = builder()
source match {
case hive: HiveSource =>
HiveOps.applySpec(HiveSpec(options.specPath), false)
case _ =>
sys.error(s"Unsupported source $source")
}
case _ =>
}
}
case class Options(source: String = null, specPath: Path = null)
}
示例6: SharkSessionManager
//设置package包名称以及导入依赖的类
package shark.server
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hive.service.cli.session.SessionManager
import shark.Utils
class SharkSessionManager extends SessionManager {
override def init(hiveConf : HiveConf) {
this.synchronized {
val sharkOpManager = new SharkOperationManager
Utils.setSuperField("operationManager", sharkOpManager, this)
addService(sharkOpManager)
sharkInit(hiveConf)
}
}
}
示例7: SharkCLIService
//设置package包名称以及导入依赖的类
package shark.server
import org.apache.hive.service.cli.CLIService
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.hive.service.auth.HiveAuthFactory
import java.io.IOException
import org.apache.hive.service.ServiceException
import javax.security.auth.login.LoginException
import org.apache.spark.SparkEnv
import shark.{SharkServer, Utils}
class SharkCLIService extends CLIService {
override def init(hiveConf: HiveConf) {
this.synchronized {
Utils.setSuperField("hiveConf", hiveConf, this)
val sharkSM = new SharkSessionManager
Utils.setSuperField("sessionManager", sharkSM, this)
addService(sharkSM)
try {
HiveAuthFactory.loginFromKeytab(hiveConf)
val serverUserName = ShimLoader.getHadoopShims
.getShortUserName(ShimLoader.getHadoopShims.getUGIForConf(hiveConf))
Utils.setSuperField("serverUserName", serverUserName, this)
} catch {
case e: IOException => {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
}
case e: LoginException => {
throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
}
}
// Make sure the ThreadLocal SparkEnv reference is the same for all threads.
SparkEnv.set(SharkServer.sparkEnv)
sharkInit(hiveConf)
}
}
}