本文整理汇总了Scala中org.apache.log4j.LogManager类的典型用法代码示例。如果您正苦于以下问题:Scala LogManager类的具体用法?Scala LogManager怎么用?Scala LogManager使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了LogManager类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: ModelBuilder
//设置package包名称以及导入依赖的类
package org.wikimedia.research.recommendation.job.translation
import java.io.File
import org.apache.log4j.{LogManager, Logger}
import org.apache.spark.sql.{DataFrame, SparkSession}
import scala.collection.parallel.mutable.ParArray
object ModelBuilder {
val log: Logger = LogManager.getLogger(ModelBuilder.getClass)
def buildModels(spark: SparkSession,
modelsOutputDir: Option[File],
sites: ParArray[String],
featureData: DataFrame): Unit = {
log.info("Building Models")
sites.foreach(target =>
try {
log.info("Building model for " + target)
log.info("Getting work data for " + target)
val workData: DataFrame = Utils.getWorkData(spark, featureData, target)
val Array(trainingData, testData) = workData.randomSplit(Array(0.7, 0.3))
log.info("Training model for " + target)
val model = Utils.REGRESSOR.fit(trainingData)
log.info("Writing model to file for " + target)
modelsOutputDir.foreach(o => model.write.save(new File(o, target).getAbsolutePath))
log.info("Testing model for " + target)
val predictions = model.transform(testData)
val rmse = Utils.EVALUATOR.evaluate(predictions)
log.info("Root Mean Squared Error (RMSE) on test data for " + target + " = " + rmse)
} catch {
case unknown: Throwable => log.error("Build model for " + target + " failed", unknown)
}
)
}
}
示例2: ScorePredictor
//设置package包名称以及导入依赖的类
package org.wikimedia.research.recommendation.job.translation
import java.io.File
import org.apache.log4j.{LogManager, Logger}
import org.apache.spark.ml.regression.RandomForestRegressionModel
import org.apache.spark.sql.types.{DoubleType, StringType, StructField, StructType}
import org.apache.spark.sql.{DataFrame, Row, SaveMode, SparkSession}
import scala.collection.parallel.mutable.ParArray
object ScorePredictor {
val log: Logger = LogManager.getLogger(ScorePredictor.getClass)
def predictScores(spark: SparkSession,
modelsInputDir: File,
predictionsOutputDir: Option[File],
sites: ParArray[String],
featureData: DataFrame): Unit = {
log.info("Scoring items")
val predictions: Array[DataFrame] = sites.map(target => {
try {
log.info("Scoring for " + target)
log.info("Getting work data for " + target)
val workData: DataFrame = Utils.getWorkData(spark, featureData, target, exists = false)
log.info("Loading model for " + target)
val model = RandomForestRegressionModel.load(
new File(modelsInputDir, target).getAbsolutePath)
log.info("Scoring data for " + target)
val predictions = model
.setPredictionCol(target)
.transform(workData)
.select("id", target)
predictions
} catch {
case unknown: Throwable =>
log.error("Score for " + target + " failed", unknown)
val schema = StructType(Seq(
StructField("id", StringType, nullable = false),
StructField(target, DoubleType, nullable = true)))
spark.createDataFrame(spark.sparkContext.emptyRDD[Row], schema)
}
}).toArray
val predictedScores = predictions.reduce((left, right) => left.join(right, Seq("id"), "outer"))
log.info("Saving predictions")
predictionsOutputDir.foreach(f = o =>
predictedScores.coalesce(1)
.write
.mode(SaveMode.ErrorIfExists)
.option("header", value = true)
.option("compression", "bzip2")
.csv(new File(o, "allPredictions").getAbsolutePath))
}
}
示例3: MyLog1
//设置package包名称以及导入依赖的类
package com.chapter14.Serilazition
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.log4j.LogManager
import org.apache.log4j.Level
import org.apache.log4j.Logger
object MyLog1 extends Serializable {
def main(args: Array[String]):Unit= {
// Stting logger level as WARN
val log = LogManager.getRootLogger
log.setLevel(Level.WARN)
@transient lazy val log2 = org.apache.log4j.LogManager.getLogger("myLogger")
// Creating Spark Context
val conf = new SparkConf().setAppName("My App").setMaster("local[*]")
val sc = new SparkContext(conf)
//Started the computation and printing the logging inforamtion
//log.warn("Started")
//val i = 0
val data = sc.parallelize(0 to 100000)
data.foreach(i => log.info("My number"+ i))
data.collect()
log.warn("Finished")
}
}
示例4: MyMapper
//设置package包名称以及导入依赖的类
package com.chapter16.SparkTesting
import org.apache.log4j.{ Level, LogManager }
import org.apache.spark._
import org.apache.spark.rdd.RDD
import org.apache.spark.sql.SparkSession
class MyMapper(n: Int) extends Serializable {
@transient lazy val log = org.apache.log4j.LogManager.getLogger("myLogger")
def logMapper(rdd: RDD[Int]): RDD[String] =
rdd.map { i =>
log.warn("mapping: " + i)
(i + n).toString
}
}
//Companion object
object MyMapper {
def apply(n: Int): MyMapper = new MyMapper(n)
}
//Main object
object myCustomLogwithClosureSerializable {
def main(args: Array[String]) {
val log = LogManager.getRootLogger
log.setLevel(Level.WARN)
val spark = SparkSession
.builder
.master("local[*]")
.config("spark.sql.warehouse.dir", "E:/Exp/")
.appName("Testing")
.getOrCreate()
log.warn("Started")
val data = spark.sparkContext.parallelize(1 to 100000)
val mapper = MyMapper(1)
val other = mapper.logMapper(data)
other.collect()
log.warn("Finished")
}
}
开发者ID:PacktPublishing,项目名称:Scala-and-Spark-for-Big-Data-Analytics,代码行数:41,代码来源:myCustomLogwithClosureSerializable.scala
示例5: myCustomLogwithoutSerializable
//设置package包名称以及导入依赖的类
package com.chapter16.SparkTesting
import org.apache.log4j.LogManager
import org.apache.log4j.Level
import org.apache.spark.sql.SparkSession
object myCustomLogwithoutSerializable {
def main(args: Array[String]): Unit = {
val log = LogManager.getRootLogger
//Everything is printed as INFO onece the log level is set to INFO untill you set the level to new level for example WARN.
log.setLevel(Level.INFO)
log.info("Let's get started!")
// Setting logger level as WARN: after that nothing prints other then WARN
log.setLevel(Level.WARN)
// Creating Spark Session
val spark = SparkSession
.builder
.master("local[*]")
.config("spark.sql.warehouse.dir", "E:/Exp/")
.appName("Logging")
.getOrCreate()
// These will note be printed!
log.info("Get prepared!")
log.trace("Show if there is any ERROR!")
//Started the computation and printing the logging information
log.warn("Started")
spark.sparkContext.parallelize(1 to 5).foreach(println)
log.warn("Finished")
}
}
示例6: myCustomLogwithClosure
//设置package包名称以及导入依赖的类
package com.chapter14.Serilazition
import org.apache.log4j.LogManager
import org.apache.log4j.Level
import org.apache.log4j.Logger
import org.apache.spark.sql.SparkSession
object myCustomLogwithClosure extends Serializable {
def main(args: Array[String]): Unit = {
val log = LogManager.getRootLogger
//Everything is printed as INFO onece the log level is set to INFO untill you set the level to new level for example WARN.
log.setLevel(Level.INFO)
log.info("Let's get started!")
// Setting logger level as WARN: after that nothing prints other then WARN
log.setLevel(Level.WARN)
// Creating Spark Session
val spark = SparkSession
.builder
.master("local[*]")
.config("spark.sql.warehouse.dir", "E:/Exp/")
.appName("Logging")
.getOrCreate()
// These will note be printed!
log.info("Get prepared!")
log.trace("Show if there is any ERROR!")
//Started the computation and printing the logging information
log.warn("Started")
val data = spark.sparkContext.parallelize(0 to 100000)
data.foreach(i => log.info("My number"+ i))
data.collect()
log.warn("Finished")
}
}
开发者ID:PacktPublishing,项目名称:Scala-and-Spark-for-Big-Data-Analytics,代码行数:39,代码来源:myCustomLogwithClosure.scala
示例7: Spark
//设置package包名称以及导入依赖的类
import org.apache.log4j.{Level, LogManager}
import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}
object Spark {
val conf = new SparkConf()
.setAppName("MOT Test")
.setMaster("local[8]")
.set("spark.default.parallelism", "8")
.set("spark.sql.shuffle.partitions", "8")
val sc = new SparkContext(conf)
LogManager.getRootLogger.setLevel(Level.ERROR)
val sqlContext = new SQLContext(Spark.sc)
sqlContext.setConf("spark.sql.shuffle.partitions", "8")
}
示例8: Holder
//设置package包名称以及导入依赖的类
package com.liuzix.SparkNative
import org.apache.log4j.{LogManager, Level}
object Holder extends Serializable {
@transient lazy val log = LogManager.getLogger("myLogger")
}
class TestNativeJob extends NativeJob[Int]{
def applyPartition (index: Integer, iter: Iterator[Int]) : Iterator[Int] = {
var lib = new NativeLibraryLoader[Int](this.binaryPath)
Holder.log.setLevel(Level.TRACE)
val myList = iter.toArray
myList.map( i => {
//Holder.log.info(s"Pushing $i")
lib.call.pushItem(i)
})
Holder.log.info("Pushed all data")
var ret = new Array[Int](myList.size)
lib.call.getResult(ret, myList.size)
ret.iterator
}
}
示例9: WeiboyiSeedProcessor
//设置package包名称以及导入依赖的类
package org.webant.plugin.weiboyi.processor
import com.google.gson.JsonParser
import org.apache.log4j.LogManager
import org.webant.plugin.weiboyi.data.WeiboyiDetailData
import org.webant.worker.processor.JsonPageProcessor
class WeiboyiSeedProcessor extends JsonPageProcessor[WeiboyiDetailData] {
private val logger = LogManager.getLogger(classOf[WeiboyiSeedProcessor])
regex = "http://chuanbo.weiboyi.com/hworder/weixin/filterlist/source/all"
protected override def links(): Iterable[String] = {
if (!isJson(content)) {
logger.error("no json content!")
return Iterable.empty
}
val parser = new JsonParser
val result = parser.parse(content).getAsJsonObject
if (!result.has("data"))
return Iterable.empty
val data = result.getAsJsonObject("data")
if (!data.has("total"))
return Iterable.empty
val size = 200
val total = data.get("total").getAsInt
val totalPage = Math.ceil(total.toFloat / size).toInt
(0 until totalPage).map(index => {
val offset = index * size
val url = s"http://chuanbo.weiboyi.com/hworder/weixin/filterlist/source/all?start=$offset&limit=$size"
url
})
}
}
示例10: ZhihuSeedProcessor
//设置package包名称以及导入依赖的类
package org.webant.plugin.zhihu.processor
import org.apache.log4j.LogManager
import org.webant.plugin.weiboyi.data.WeiboyiDetailData
import org.webant.worker.processor.JsonPageProcessor
import scala.io.Source
class ZhihuSeedProcessor extends JsonPageProcessor[WeiboyiDetailData] {
private val logger = LogManager.getLogger(classOf[ZhihuSeedProcessor])
regex = "https://www.zhihu.com/"
override def links(): Iterable[String] = {
// val ids = loadIds("D:\\workspace\\webant\\data\\zhihu\\uid_idsn.csv")
val ids = loadIds("./data/zhihu/uid_idsn.csv")
if (ids == null || ids.isEmpty)
return Iterable.empty
ids.map(id =>
s"https://www.zhihu.com/api/v4/members/$id/answers?offset=0&limit=20&sort_by=created&include=data[*].is_normal,is_collapsed,collapse_reason,suggest_edit,comment_count,can_comment,content,voteup_count,reshipment_settings,comment_permission,mark_infos,created_time,updated_time,review_info,relationship.is_authorized,voting,is_author,is_thanked,is_nothelp,upvoted_followees;data[*].author.badge[?(type=best_answerer)].topics")
}
private def loadIds(path: String): Iterable[String] = {
try {
Source.fromFile(path, "UTF-8").getLines().map(line => line.split(",")(1)).toSet
} catch {
case e: Exception =>
logger.error(s"load ids file failed! error: ${e.getMessage}")
Iterable.empty
}
}
}
示例11: FunStore
//设置package包名称以及导入依赖的类
package org.webant.plugin.mahua.store
import org.apache.commons.lang3.StringUtils
import org.apache.log4j.LogManager
import org.webant.extension.store.MysqlStore
import org.webant.plugin.fun.data.FunData
class FunStore extends MysqlStore[FunData] {
private val logger = LogManager.getLogger(classOf[FunStore])
private val table = "wefun"
override def upsert(list: Iterable[FunData]): Int = {
require(conn != null)
if (list == null || list.isEmpty) return 0
val fieldNames = Array("bizId", "userName", "avatarUrl", "profileUrl", "title", "content", "publishTime", "imgUrl", "imgWith", "imgHeight", "likeNum", "hateNum", "commentNum", "funType", "source", "srcId", "srcUrl", "crawlTime", "dataVersion", "dataCreateTime", "dataUpdateTime", "dataDeleteTime")
val columns = fieldNames.mkString("(", ",", ")")
val placeholders = list.map(_ => fieldNames.map(_ => "?").mkString("(", ",", ")")).mkString(",")
val sql = s"insert into $table $columns values $placeholders ON DUPLICATE KEY UPDATE dataVersion = dataVersion + 1, dataUpdateTime = now()"
val values = list.flatMap(data => {
val funType = if (StringUtils.isNotBlank(data.imgUrl)) "image" else "text"
Array(data.id, data.userName, data.avatarUrl, data.profileUrl, data.title, data.content, data.publishTime, data.imgUrl, data.imgWith, data.imgHeight, data.likeNum, data.hateNum, data.commentNum, funType, data.source, data.srcId, data.srcUrl, data.crawlTime, data.dataVersion, data.dataCreateTime, data.dataUpdateTime, data.dataDeleteTime)
}).toArray
var affectRowCount = 0
try {
affectRowCount = runner.update(conn, sql, values: _*)
} catch {
case e: Exception =>
logger.error(e.getMessage)
}
affectRowCount
}
override def upsert(data: FunData): Int = {
require(conn != null)
if (data == null) return 0
val funType = if (StringUtils.isNotBlank(data.imgUrl)) "image" else "text"
val fieldNames = Array("bizId", "userName", "avatarUrl", "profileUrl", "title", "content", "publishTime", "imgUrl", "imgWith", "imgHeight", "likeNum", "hateNum", "commentNum", "funType", "source", "srcId", "srcUrl", "crawlTime", "dataVersion", "dataCreateTime", "dataUpdateTime", "dataDeleteTime")
val columns = fieldNames.mkString("(", ",", ")")
val placeholders = fieldNames.map(_ => "?").mkString("(", ",", ")")
val sql = s"insert into $table $columns values $placeholders ON DUPLICATE KEY UPDATE dataVersion = dataVersion + 1, dataUpdateTime = now()"
val values = Array(data.id, data.userName, data.avatarUrl, data.profileUrl, data.title, data.content, data.publishTime, data.imgUrl, data.imgWith, data.imgHeight, data.likeNum, data.hateNum, data.commentNum, funType, data.source, data.srcId, data.srcUrl, data.crawlTime, data.dataVersion, data.dataCreateTime, data.dataUpdateTime, data.dataDeleteTime)
var affectRowCount = 0
try {
affectRowCount = runner.update(conn, sql, values: _*)
} catch {
case e: Exception =>
logger.error(e.getMessage)
}
affectRowCount
}
}
示例12: JokeDetailStore
//设置package包名称以及导入依赖的类
package org.webant.plugin.mahua.store
import org.apache.commons.lang3.StringUtils
import org.apache.log4j.LogManager
import org.webant.extension.store.MysqlStore
import org.webant.plugin.fun.data.FunData
class JokeDetailStore extends MysqlStore[FunData] {
private val logger = LogManager.getLogger(classOf[JokeDetailStore])
override def upsert(list: Iterable[FunData]): Int = {
require(conn != null)
if (list == null || list.isEmpty) return 0
list.map(upsert).sum
}
override def upsert(data: FunData): Int = {
require(conn != null)
if (data == null) return 0
var table: String = null
var fieldNames: Array[String] = null
var values: Array[Object] = null
if (StringUtils.isNotBlank(data.imgUrl)) {
table = "fun_image"
fieldNames = Array("id", "biz_id", "image_url", "content", "name", "profile_image", "like_num", "hate_num", "source", "publish_time", "data_create_time", "data_update_time", "data_delete_time")
values = Array(data.srcId, data.id, data.imgUrl, data.title, data.userName, data.avatarUrl, data.likeNum, data.hateNum, "mahua", data.publishTime, data.dataCreateTime, data.dataUpdateTime, data.dataDeleteTime)
} else {
table = "joke"
fieldNames = Array("id", "biz_id", "title", "content", "name", "profile_image", "like_num", "hate_num", "source", "publish_time", "data_create_time", "data_update_time", "data_delete_time")
values = Array(data.srcId, data.id, data.title, data.content, data.userName, data.avatarUrl, data.likeNum, data.hateNum, "mahua", data.publishTime, data.dataCreateTime, data.dataUpdateTime, data.dataDeleteTime)
}
val columns = fieldNames.mkString("(", ",", ")")
val placeholders = fieldNames.map(_ => "?").mkString("(", ",", ")")
val sql = s"insert into $table $columns values $placeholders ON DUPLICATE KEY UPDATE data_update_time = now()"
var affectRowCount = 0
try {
affectRowCount = runner.update(conn, sql, values: _*)
} catch {
case e: Exception =>
logger.error(e.getMessage)
}
affectRowCount
}
}