当前位置: 首页>>代码示例>>Scala>>正文


Scala PropertyConfigurator类代码示例

本文整理汇总了Scala中org.apache.log4j.PropertyConfigurator的典型用法代码示例。如果您正苦于以下问题:Scala PropertyConfigurator类的具体用法?Scala PropertyConfigurator怎么用?Scala PropertyConfigurator使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。


在下文中一共展示了PropertyConfigurator类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。

示例1: main

//设置package包名称以及导入依赖的类
package com.aluxian.tweeather.scripts

import java.io.File

import org.apache.hadoop.fs.FileSystem
import org.apache.log4j.PropertyConfigurator
import org.apache.spark.sql.SQLContext
import org.apache.spark.streaming.{Minutes, Seconds}
import org.apache.spark.{SparkConf, SparkContext}


trait Script {

  protected lazy val streamingTimeout = sys.props.get("tw.streaming.timeout") // in seconds
    .map(_.toLong * 1000).getOrElse(-1L)
  protected lazy val streamingInterval = sys.props.get("tw.streaming.interval") // in seconds
    .map(s => Seconds(s.toLong)).getOrElse(Minutes(5))

  protected lazy val scriptName = "Tweeather_" + getClass.getSimpleName.stripSuffix("$")
  protected lazy val sc = new SparkContext(
    new SparkConf()
      .setIfMissing("spark.app.name", scriptName)
      .setIfMissing("spark.eventLog.dir", "tw/logs")
      .setIfMissing("spark.eventLog.enabled", "true")
      .setIfMissing("spark.streaming.stopGracefullyOnShutdown", "true")
      .setIfMissing("spark.streaming.blockInterval", "30s")
  )

  protected lazy val hdfs = FileSystem.get(sc.hadoopConfiguration)
  protected lazy val sqlc = new SQLContext(sc)

  def main(args: Array[String]) {
    // Log4j properties
    Option(getClass.getResource("/com/aluxian/tweeather/res/log4j.properties")) match {
      case Some(url) => PropertyConfigurator.configure(url)
      case None => System.err.println("Unable to load log4j.properties")
    }

    // Ensure the event log directory exists
    new File("tw/logs").mkdirs()
  }

} 
开发者ID:cnajeefa,项目名称:Tourism-Sentiment-Analysis,代码行数:44,代码来源:Script.scala

示例2: PairRDDOperations

//设置package包名称以及导入依赖的类
import org.apache.log4j.PropertyConfigurator
import org.apache.spark.{SparkConf, SparkContext}


object PairRDDOperations {
  def main(args: Array[String]): Unit = {
    PropertyConfigurator.configure("file/log4j.properties")
    val conf = new SparkConf().setAppName("PairRDDOperations").setMaster("local")
    val sc = new SparkContext(conf)

    val lines = sc.textFile("file/test")
    // top N
    lines.flatMap(x => x.split(" "))
      .map(x => (x, 1)).reduceByKey((x, y) => x + y)
      .sortBy(x => x._2, false).take(5).foreach(println)

    val lines2 = sc.textFile("file/README.md")
    // top N
    lines2.filter(x => x != "").flatMap(x => x.split(" "))
      .map(x => (x, 1)).reduceByKey((x, y) => x + y).sortBy(x => x._2, false).take(5).foreach(println)

    //groupByKey
    lines.flatMap(x => x.split(" ")).map(x => (x,1)).groupByKey().foreach(println)


  }
} 
开发者ID:Larry3z,项目名称:SparkPractice,代码行数:28,代码来源:PairRDDOperations.scala

示例3: BasicOperations

//设置package包名称以及导入依赖的类
import org.apache.log4j.PropertyConfigurator
import org.apache.spark.{SparkConf, SparkContext}


object BasicOperations {
  def main(args: Array[String]): Unit = {
    PropertyConfigurator.configure("file/log4j.properties")
    val conf = new SparkConf().setAppName("BasicOperations").setMaster("local")
    val sc = new SparkContext(conf)

    // spark???RDD??????????????????????
    val outerRDDs = sc.textFile("file/README.md")
    val innerRDDs = sc.parallelize(List("hello world", "hello scala"))

    //RDD????transform?action?????
    val ss = innerRDDs.map(_.contains()).cache()
    innerRDDs.map(x => x.toUpperCase).foreach(println)
    outerRDDs.map(x => x.toUpperCase).filter(x => !x.contains("SPARK") && x != "").foreach(println)
  }
} 
开发者ID:Larry3z,项目名称:SparkPractice,代码行数:21,代码来源:BasicOperations.scala

示例4: RDDActionOperations

//设置package包名称以及导入依赖的类
import org.apache.log4j.PropertyConfigurator
import org.apache.spark.{SparkConf, SparkContext}


object RDDActionOperations {
  def main(args: Array[String]): Unit = {
    PropertyConfigurator.configure("file/log4j.properties")
    val conf = new SparkConf().setAppName("RDDActionOperations").setMaster("local")
    val sc = new SparkContext(conf)

    val lines = sc.parallelize(List("Hello world", "Hello scala"))
    // map reduce
    lines.flatMap(x => x.split(" ")).map(x => (x, 1))
      .reduceByKey((x, y) => x + y).foreach(println)
    //countByValue word count
    lines.flatMap(x => x.split(" ")).countByValue().foreach(println)
    // collect
    val lines2 = sc.parallelize(List(1, 2, 3, 4, 5))
    lines2.collect().foreach(println)
    // take
    lines2.take(2).foreach(println)
    // top
    lines2.top(2).foreach(println)
  }
} 
开发者ID:Larry3z,项目名称:SparkPractice,代码行数:26,代码来源:RDDActionOperations.scala

示例5: RecommendationExample

//设置package包名称以及导入依赖的类
import org.apache.log4j.PropertyConfigurator
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.mllib.recommendation.ALS
import org.apache.spark.mllib.recommendation.MatrixFactorizationModel
import org.apache.spark.mllib.recommendation.Rating

object RecommendationExample {
  def main(args: Array[String]): Unit = {
    PropertyConfigurator.configure("file/log4j.properties")
    val conf = new SparkConf().setAppName("CollaborativeFilteringExample").setMaster("local")
    val sc = new SparkContext(conf)
    // Load and parse the data
    val data = sc.textFile("file/test.data")
    val ratings = data.map(_.split(',') match { case Array(user, item, rate) =>
      Rating(user.toInt, item.toInt, rate.toDouble)
    })

    // Build the recommendation model using ALS
    val rank = 10
    val numIterations = 10
    val model = ALS.train(ratings, rank, numIterations, 0.01)

    // Evaluate the model on rating data
    val usersProducts = ratings.map { case Rating(user, product, rate) =>
      (user, product)
    }
    val predictions =
      model.predict(usersProducts).map { case Rating(user, product, rate) =>
        ((user, product), rate)
      }
    val ratesAndPreds = ratings.map { case Rating(user, product, rate) =>
      ((user, product), rate)
    }.join(predictions)
    val MSE = ratesAndPreds.map { case ((user, product), (r1, r2)) =>
      val err = (r1 - r2)
      err * err
    }.mean()
    println("----------------------------------------")
    println("-------Mean Squared Error = " + MSE)
    println("----------------------------------------")

    // Save and load model
    model.save(sc, "target/tmp/myCollaborativeFilter")
    val sameModel = MatrixFactorizationModel.load(sc, "target/tmp/myCollaborativeFilter")
    sameModel.userFeatures.foreach(println)
    val proFCounts = sameModel.productFeatures.count()
    println(proFCounts)

  }
}
// scalastyle:on println 
开发者ID:Larry3z,项目名称:SparkPractice,代码行数:52,代码来源:RecommendationExample.scala

示例6: Durak

//设置package包名称以及导入依赖的类
package de.htwg.se.durak

import de.htwg.se.durak.aview.tui.Tui
import de.htwg.se.durak.controller.impl.GameRound
import org.apache.log4j.PropertyConfigurator

import scala.io.StdIn._
import de.htwg.se.durak.aview.gui.SwingGui
import de.htwg.se.durak.controller.GameRoundController
import com.google.inject.Guice
import de.htwg.se.durak.controller.GameRoundControllerFactory

object Durak {
  def main(args: Array[String]): Unit = {

    PropertyConfigurator.configure("log4j.properties")
    
    enterPlayerNames

    val playerNames = processPlayerNamesInput(readLine())
    //val playerNames = processPlayerNamesInput("Jakob,Kathrin")
    val injector = Guice.createInjector(new DurakModule)
    val controllerFactory = injector.getInstance(classOf[GameRoundControllerFactory])
    val controller = controllerFactory.create(playerNames)
    val tui = new Tui(controller)
    val gui = new SwingGui(controller)
    tui.printTui
    while (tui.processInputLine(readLine())) {}
  }

  def enterPlayerNames = {
    println("Durak")
    println
    println("Please enter the names of the players, separed by comma:")
  }

  def processPlayerNamesInput(input: String): List[String] = input.split(",").map(_.trim).toList

} 
开发者ID:KatWol,项目名称:durak,代码行数:40,代码来源:Durak.scala

示例7: LogConfiguration

//设置package包名称以及导入依赖的类
package org.dmonix.area51.akka

import java.io.ByteArrayInputStream
import java.util.Locale

import com.typesafe.config.ConfigFactory
import com.typesafe.config.Config

import org.apache.log4j.PropertyConfigurator

object LogConfiguration {
  def config: Config = config("on")
  def config(s: String):Config = ConfigFactory.parseString(
  s"""akka.loggers = [akka.testkit.TestEventListener] # makes both log-snooping and logging work
    |       akka.loglevel = "DEBUG"
    |       akka.actor.debug.receive = $s""".stripMargin)
}


trait LogConfiguration {
  // Configure language for proper logging outputs
  Locale.setDefault(Locale.US)
  System.setProperty("user.country", Locale.US.getCountry)
  System.setProperty("user.language", Locale.US.getLanguage)
  System.setProperty("user.variant", Locale.US.getVariant)

  PropertyConfigurator.configure(new ByteArrayInputStream(logCfg.getBytes))

  private def logCfg = """log4j.rootLogger=DEBUG, consoleAppender
                         |
                         |log4j.logger.com=INFO
                         |log4j.logger.org.dmonix=DEBUG
                         |log4j.logger.org.eclipse=WARN
                         |log4j.logger.org.apache=WARN
                         |              |
                         |log4j.appender.consoleAppender=org.apache.log4j.ConsoleAppender
                         |log4j.appender.consoleAppender.layout=org.apache.log4j.PatternLayout
                         |log4j.appender.consoleAppender.layout.ConversionPattern=%d [%15.15t] %-5p [%c] %m%n""".stripMargin

} 
开发者ID:pnerg,项目名称:area51-akka,代码行数:41,代码来源:LogConfiguration.scala


注:本文中的org.apache.log4j.PropertyConfigurator类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。