当前位置: 首页>>代码示例>>Scala>>正文


Scala SparkLauncher类代码示例

本文整理汇总了Scala中org.apache.spark.launcher.SparkLauncher的典型用法代码示例。如果您正苦于以下问题:Scala SparkLauncher类的具体用法?Scala SparkLauncher怎么用?Scala SparkLauncher使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了SparkLauncher类的3个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: SparkJobActor

//设置package包名称以及导入依赖的类
package com.example

import akka.actor.{Actor, ActorLogging, ActorRef}
import org.apache.spark.launcher.{SparkAppHandle, SparkLauncher}


class SparkJobActor extends Actor with ActorLogging {
  import SparkJobActor._

  def receive: Receive = {
    case Launch(appResource, mainClass, master, conf) => {
      val launcher = new SparkLauncher()
        .setAppResource(appResource)
        .setMainClass(mainClass)
        .setMaster(master)
      for ((key, value) <- conf.toList) {
        launcher.setConf(key, value)
      }

      val listener = new SparkAppHandle.Listener {
        override def infoChanged(handle: SparkAppHandle): Unit = {}
        override def stateChanged(handle: SparkAppHandle): Unit = self ! StateChanged
      }

      val handle = launcher.startApplication(listener)
      context become launched(handle, sender)
    }
  }

  def launched(handle: SparkAppHandle, origSender: ActorRef): Receive = {
    case StateChanged => {
      if (handle.getState.isFinal) {
        origSender ! Finished(handle.getState)
        context stop self
      }
    }
  }
}

object SparkJobActor {
  case class Launch(appResource: String, mainClass: String, master: String, conf: Map[String, String])
  case object StateChanged
  case class Finished(state: SparkAppHandle.State)
} 
开发者ID:njrich28,项目名称:akka-spark,代码行数:45,代码来源:SparkJobActor.scala

示例2: LauncherA

//设置package包名称以及导入依赖的类
import org.apache.spark.launcher.SparkLauncher

object LauncherA {
	def main(args:Array[String]):Unit = {
		println("LauncherA")
		val spark = new SparkLauncher()
							//.setSparkHome(sys.env("SPARK_HOME"))
							.setAppResource("0003_parallelizedcollection_2.11-1.0.jar")
							//.setMainClass("ParallelizedCollection")
							.setMaster("local[*]")
							.startApplication()
	}
} 
开发者ID:joseedgardo2000,项目名称:BigData,代码行数:14,代码来源:LauncherA.scala

示例3: SparkController

//设置package包名称以及导入依赖的类
package controllers

import javax.inject.{Inject, Singleton}

import org.apache.spark.launcher.SparkLauncher
import org.apache.spark.sql.DataFrame
import play.api.mvc._
import play.api.mvc.Controller
import spark.SparkCommons

import play.api.Play.current

@Singleton
class SparkController @Inject() extends Controller {

  val dataFile = "resources/tweet-json"
  lazy val rdd = SparkCommons.sqlContext.read.json(dataFile)

  def toJsonString(rdd:DataFrame):String = "["+rdd.toJSON.collect.toList.mkString(",\n")+"]"

  def count = Action {
    println(libs)
    Ok(rdd.count.toString)
  }

  def list = Action {
    Ok(toJsonString(rdd))
  }

  def filter(text:String) = Action {
    Ok(toJsonString(rdd.filter(rdd("text").contains(text))))
  }

  //run activator dist to generate zip, and unzip it
  //import import play.api.Play.current,
  def libs : Seq[String] = {
    val libDir = play.api.Play.application.getFile("target/universal/first-player-1.0-SNAPSHOT/lib")
    return if ( libDir.exists ) {
      libDir.listFiles().map(_.getCanonicalFile().getAbsolutePath()).filter(_.endsWith(".jar"))
    } else {
      throw new IllegalStateException(s"lib dir is missing: $libDir")
    }
  }

  
  def launcher() = Action {
    val spark = new SparkLauncher()
      .setAppResource(SparkCommons.sparkLibPath)
      .setMainClass("spark.SimpleApp.main")
      .setMaster("mesos://192.168.6.52:5050")
      .setSparkHome("/usr/install/spark-1.6.2-bin-2.4.1-new")
      .setConf(SparkLauncher.DRIVER_MEMORY, "1g")
      .launch();
    spark.waitFor();
    Ok("...")
  }

} 
开发者ID:zqhxuyuan,项目名称:first-player,代码行数:59,代码来源:SparkController.scala


注:本文中的org.apache.spark.launcher.SparkLauncher类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。