当前位置: 首页>>代码示例>>Scala>>正文


Scala SparkEnv类代码示例

本文整理汇总了Scala中org.apache.spark.SparkEnv的典型用法代码示例。如果您正苦于以下问题:Scala SparkEnv类的具体用法?Scala SparkEnv怎么用?Scala SparkEnv使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了SparkEnv类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: Main

//设置package包名称以及导入依赖的类
package onextent.demo

import org.apache.spark.SparkEnv
import org.rogach.scallop.ScallopConf

object Main {
  def main(args: Array[String]) {
    object Args extends ScallopConf(args) {
      val msg = opt[String]("message", descr = "say something", default = Some("world"))
    }
    SparkEnv.get
    Args.verify()
    println(s"Hello ${Args.msg.getOrElse("what?")}")
  }
} 
开发者ID:navicore,项目名称:docker-entrypoint-cmd-demo,代码行数:16,代码来源:Main.scala

示例2: RedisShuffleWriter

//设置package包名称以及导入依赖的类
package org.apache.spark.shuffle.redis

import org.apache.spark.internal.Logging
import org.apache.spark.scheduler.MapStatus
import org.apache.spark.shuffle.{ShuffleHandle, ShuffleWriter}
import org.apache.spark.{SparkEnv, TaskContext}
import redis.clients.jedis.{Jedis, JedisPool}

class RedisShuffleWriter[K, V](
  handle: ShuffleHandle,
  mapId: Int,
  context: TaskContext)
  extends ShuffleWriter[K, V] with Logging {

  private val dep = handle.asInstanceOf[RedisShuffleHandle[Any, Any, Any]].dependency

  private val blockManager = SparkEnv.get.blockManager

  private val jedisPool = new JedisPool()

  private var sorter: RedisSorter[Any, Any, Any] = null

  // Are we in the process of stopping? Because map tasks can call stop() with success = true
  // and then call stop() with success = false if they get an exception, we want to make sure
  // we don't try deleting files, etc twice.
  private var stopping = false

  private var mapStatus: MapStatus = null

  
  override def stop(success: Boolean): Option[MapStatus] = {
    try {
      if (stopping) {
        return None
      }
      stopping = true
      if (success) {
        return Option(mapStatus)
      } else {
        if (sorter != null) {
          sorter.clean()
          sorter = null
        }
        return None
      }
    } finally {
      jedisPool.close()
    }
  }
} 
开发者ID:ambling,项目名称:RedisShuffleManager,代码行数:51,代码来源:RedisShuffleWriter.scala

示例3: SharkCLIService

//设置package包名称以及导入依赖的类
package shark.server

import org.apache.hive.service.cli.CLIService
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.shims.ShimLoader
import org.apache.hive.service.auth.HiveAuthFactory
import java.io.IOException
import org.apache.hive.service.ServiceException
import javax.security.auth.login.LoginException
import org.apache.spark.SparkEnv
import shark.{SharkServer, Utils}

class SharkCLIService extends CLIService {
  override def init(hiveConf: HiveConf) {
    this.synchronized {
      Utils.setSuperField("hiveConf", hiveConf, this)
      val sharkSM = new SharkSessionManager
      Utils.setSuperField("sessionManager", sharkSM, this)
      addService(sharkSM)
      try {
        HiveAuthFactory.loginFromKeytab(hiveConf)
        val serverUserName = ShimLoader.getHadoopShims
          .getShortUserName(ShimLoader.getHadoopShims.getUGIForConf(hiveConf))
        Utils.setSuperField("serverUserName", serverUserName, this)
      } catch {
        case e: IOException => {
          throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
        }
        case e: LoginException => {
          throw new ServiceException("Unable to login to kerberos with given principal/keytab", e)
        }
      }
      // Make sure the ThreadLocal SparkEnv reference is the same for all threads.
      SparkEnv.set(SharkServer.sparkEnv)
      sharkInit(hiveConf)
    }
  }
} 
开发者ID:palmer0914,项目名称:shark,代码行数:39,代码来源:SharkCLIService.scala


注:本文中的org.apache.spark.SparkEnv类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。