本文整理汇总了Scala中java.text.SimpleDateFormat类的典型用法代码示例。如果您正苦于以下问题:Scala SimpleDateFormat类的具体用法?Scala SimpleDateFormat怎么用?Scala SimpleDateFormat使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了SimpleDateFormat类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: DateUtil
//设置package包名称以及导入依赖的类
package com.github.youzp
import java.text.SimpleDateFormat
import java.util.{Calendar, Date}
object DateUtil {
private val dateFmt = "yyyy-MM-dd"
def today(): String = {
val date = new Date
val sdf = new SimpleDateFormat(dateFmt)
sdf.format(date)
}
def yesterday(): String = {
val calender = Calendar.getInstance()
calender.roll(Calendar.DAY_OF_YEAR, -1)
val sdf = new SimpleDateFormat(dateFmt)
sdf.format(calender.getTime())
}
def daysAgo(days: Int): String = {
val calender = Calendar.getInstance()
calender.roll(Calendar.DAY_OF_YEAR, -days)
val sdf = new SimpleDateFormat(dateFmt)
sdf.format(calender.getTime())
}
}
示例2: WSGitHubProjectSummary
//设置package包名称以及导入依赖的类
package models
import java.text.SimpleDateFormat
import java.util.Date
import play.api.libs.functional.syntax._
import play.api.libs.json._
case class WSGitHubProjectSummary(name: String,
fullName: String,
description: Option[String],
updated: Date,
url: String) {
def getFormatedDate: String = {
val dateFormat: SimpleDateFormat = new SimpleDateFormat("dd MMM yyyy")
dateFormat.format(updated)
}
}
object WSGitHubProjectSummary {
implicit val gitHubProjectSummaryReads: Reads[WSGitHubProjectSummary] = (
(JsPath \ "name").read[String] and
(JsPath \ "full_name").read[String] and
(JsPath \ "description").readNullable[String] and
(JsPath \ "updated_at").read[Date] and
(JsPath \ "html_url").read[String]
)(WSGitHubProjectSummary.apply _)
implicit val gitHubProjectSummaryWriters = new Writes[WSGitHubProjectSummary] {
def writes(gitHubProjectSummary: WSGitHubProjectSummary) = Json.obj(
"name" -> JsString(gitHubProjectSummary.name),
"full_name" -> JsString(gitHubProjectSummary.fullName),
"description" -> Json.toJson(gitHubProjectSummary.description),
"updated_at" -> gitHubProjectSummary.updated,
"url" -> JsString(gitHubProjectSummary.url)
)
}
}
case class WSGitHubProjectsSummary(totalCount: Int,
projects: Seq[WSGitHubProjectSummary])
object WSGitHubProjectsSummary {
implicit val gitHubProjectsSummaryReads: Reads[WSGitHubProjectsSummary] = (
(JsPath \ "total_count").read[Int] and
(JsPath \ "items").read[Seq[WSGitHubProjectSummary]]
)(WSGitHubProjectsSummary.apply _)
implicit val gitHubProjectsSummaryWriters = new Writes[WSGitHubProjectsSummary] {
def writes(gitHubProjectsSummary: WSGitHubProjectsSummary) = Json.obj(
"total_count" -> JsNumber(gitHubProjectsSummary.totalCount),
"items" -> gitHubProjectsSummary.projects
)
}
}
示例3: GMMClusteringPersist
//设置package包名称以及导入依赖的类
package org.sparksamples.gmm
import java.text.SimpleDateFormat
import org.apache.spark.SparkConf
import org.apache.spark.ml.clustering.{GaussianMixture}
import org.apache.spark.sql.SparkSession
object GMMClusteringPersist {
val PATH = "/home/ubuntu/work/spark-2.0.0-bin-hadoop2.7/"
val BASE = "./data/movie_lens_libsvm_2f"
val time = System.currentTimeMillis()
val formatter = new SimpleDateFormat("dd_MM_yyyy_hh_mm_ss")
import java.util.Calendar
val calendar = Calendar.getInstance()
calendar.setTimeInMillis(time)
val date_time = formatter.format(calendar.getTime())
def main(args: Array[String]): Unit = {
val spConfig = (new SparkConf).setMaster("local[1]").setAppName("SparkApp").
set("spark.driver.allowMultipleContexts", "true")
val spark = SparkSession
.builder()
.appName("Spark SQL Example")
.config(spConfig)
.getOrCreate()
val datasetUsers = spark.read.format("libsvm").load(
BASE + "/movie_lens_2f_users_libsvm/part-00000")
datasetUsers.show(3)
val gmmUsers = new GaussianMixture().setK(5).setSeed(1L)
gmmUsers.setMaxIter(20)
val modelUsers = gmmUsers.fit(datasetUsers)
val predictedDataSetUsers = modelUsers.transform(datasetUsers)
val predictionsUsers = predictedDataSetUsers.select("prediction").rdd.map(x=> x(0))
predictionsUsers.saveAsTextFile(BASE + "/prediction/" + date_time + "/gmm_2f_users")
val dataSetItems = spark.read.format("libsvm").load(BASE +
"/movie_lens_2f_items_libsvm/part-00000")
val gmmItems = new GaussianMixture().setK(5).setSeed(1L)
val modelItems = gmmItems.fit(dataSetItems)
val predictedDataSetItems = modelItems.transform(dataSetItems)
val predictionsItems = predictedDataSetItems.select("prediction").rdd.map(x=> x(0))
predictionsItems.saveAsTextFile(BASE + "/prediction/" + date_time + "/gmm_2f_items")
spark.stop()
}
}
开发者ID:PacktPublishing,项目名称:Machine-Learning-with-Spark-Second-Edition,代码行数:59,代码来源:GMMClusteringPersist.scala
示例4: Application
//设置package包名称以及导入依赖的类
import java.text.SimpleDateFormat
import java.util.concurrent.TimeUnit
import java.util.{Date, Properties}
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
object Application extends App {
val formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
val simIDs = 10000 to 99999 //99000
val brokers = "192.168.100.211:6667,192.168.100.212:6667,192.168.100.213:6667";
val topic = "newTest";
val props = new Properties
props.put("bootstrap.servers", brokers)
props.put("client.id", "Producer")
props.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer")
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
val producer = new KafkaProducer[Integer, String](props)
while (true) {
for (simID <- simIDs) {
val data = Data(
"64846867247",
"?D" + simID,
formatter.format(new Date()),
121.503,
31.3655,
78,
0,
42,
52806.7
)
// println(Data.getString(data))
producer.send(new ProducerRecord[Integer, String](topic, Data.getString(data)))
// TimeUnit.NANOSECONDS.sleep(100)
}
println("-------------------------------"+new Date())
TimeUnit.MINUTES.sleep(18)
}
}
示例5: WSGitHubCommit
//设置package包名称以及导入依赖的类
package models
import java.text.SimpleDateFormat
import java.util.Date
import play.api.libs.functional.syntax._
import play.api.libs.json.{JsPath, Json, Reads, Writes}
case class WSGitHubCommit(committer: String, date: Date) {
def getFormatedDate: String = {
val dateFormat: SimpleDateFormat = new SimpleDateFormat("yyyy-MM-dd")
dateFormat.format(date)
}
}
object WSGitHubCommit {
implicit val gitHubProjectSummaryReads: Reads[WSGitHubCommit] = (
(JsPath \ "email").read[String] and
(JsPath \ "date").read[Date]
)(WSGitHubCommit.apply _)
implicit val gitHubProjectSummaryWriters = new Writes[WSGitHubCommit] {
def writes(gitHubProjectSummary: WSGitHubCommit) = Json.obj(
"email" -> gitHubProjectSummary.committer,
"date" -> gitHubProjectSummary.date
)
}
}
示例6: Total
//设置package包名称以及导入依赖的类
package sample.stream_actor
import akka.Done
import akka.actor.Actor
import sample.stream_actor.Total.Increment
import java.text.SimpleDateFormat
import java.util.{Date, TimeZone}
object Total {
case class Increment(value: Long, avg: Double, id: String)
}
class Total extends Actor {
var total: Long = 0
override def receive: Receive = {
case Increment(value, avg, id) =>
println(s"Recieved $value new measurements from id: $id - Avg wind speed is: $avg")
total = total + value
val date = new Date()
val df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss")
df.setTimeZone(TimeZone.getTimeZone("Europe/Zurich"))
println(s"${df.format(date) } - Current total of all measurements: $total")
sender ! Done
}
}
示例7: VeChallengeRealTime
//设置package包名称以及导入依赖的类
package io.github.adrianulbona.ve
import java.text.SimpleDateFormat
import java.util.Date
import org.apache.spark.sql.SparkSession
import org.apache.spark.streaming.dstream.DStream
import org.apache.spark.streaming.twitter.TwitterUtils
import org.apache.spark.streaming.{Minutes, StreamingContext}
import twitter4j.Place
object VeChallengeRealTime {
def main(args: Array[String]) {
val spark = SparkSession.builder
.master("local[*]")
.appName("ve-challenge")
.getOrCreate()
val ssc = new StreamingContext(spark.sparkContext, Minutes(2))
val stream = TwitterUtils.createStream(ssc, None, Seq("challenge"))
val places: DStream[Place] = stream.map(status => Option(status.getPlace))
.filter(optionPlace => optionPlace.isDefined)
.map(place => place.get)
places.map(place => place.getCountryCode)
.countByValue()
.foreachRDD(batch => printStats(batch.sortBy({ case (_, count) => count }, ascending = false).take(5)))
ssc.start()
ssc.awaitTermination()
spark.stop()
}
def printStats(top5Countries: Array[(String, Long)]) {
println()
println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()))
top5Countries.foreach(println)
}
}
示例8: SRibbonClient
//设置package包名称以及导入依赖的类
package com.bob.scalatour.netflix
import java.text.SimpleDateFormat
import com.netflix.config.ConfigurationManager._
import feign.codec.ErrorDecoder
import feign.ribbon.LoadBalancingTarget
import feign._
import org.json4s._
import org.json4s.native.JsonMethods._
object SRibbonClient {
private def client: String = {
return "risk"
}
private trait Risk {
@RequestLine("GET /calculator/users/together?userId={uid}&date={udate}") def together(@Param("uid") owner: String, @Param("udate") udate: String): String
@RequestLine("GET /calculator/users/together?userId={uid}") def justUidtogether(@Param("uid") owner: String): String
@RequestLine("GET /") def index(): String
}
getConfigInstance.setProperty(client + ".ribbon.listOfServers", "******")
getConfigInstance.setProperty(client + ".ribbon.ReadTimeout", 5000)
getConfigInstance.setProperty(client + ".ribbon.MaxAutoRetries", 1)
getConfigInstance.setProperty(client + ".ribbon.MaxAutoRetriesNextServer", 1)
getConfigInstance.setProperty(client + ".ribbon.OkToRetryOnAllOperations", true)
getConfigInstance.setProperty(client + ".ribbon.ConnectTimeout", 5000)
class SErrorDecoder extends ErrorDecoder {
implicit val formats: Formats = new DefaultFormats {
override def dateFormatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'")
}
}
}
private val risk: Risk = Feign.builder()
.errorDecoder(new SErrorDecoder)
.target(LoadBalancingTarget.create(classOf[Risk], "http://" + client))
def run(uid: String, udate: String): String = {
if (udate.length() == 0) {
risk.justUidtogether(uid)
}
else {
risk.together(uid, udate)
}
}
}
示例9: CheckResult
//设置package包名称以及导入依赖的类
package it.agilelab.bigdata.DataQuality.checks
import java.text.SimpleDateFormat
import java.util.Calendar
case class CheckResult(
checkId: String,
checkName: String,
checkDescription: String,
checkedFile: String,
baseMetric: String,
comparedMetric: Option[String],
comparedThreshold: Double,
status: String,
message: String,
execData:String = {
val formatDate = new SimpleDateFormat("yyyy-MM-dd:hhmmss")
val now = Calendar.getInstance().getTime
formatDate.format(now)
}
)
示例10: getType
//设置package包名称以及导入依赖的类
package it.agilelab.bigdata.DataQuality.metrics
import java.text.SimpleDateFormat
import java.util.Calendar
trait MetricResult {
val id: String
val name: String
val result: Double
val execDate: String = {
val format = new SimpleDateFormat("yyyy-MM-dd")
format.format(Calendar.getInstance().getTime)
}
val sourceId: String
def getType: String
}
case class ColumnMetricResult(
id:String,
name: String,
sourceId: String,
columnName: String,
result: Double
) extends MetricResult {
override def getType = "Column"
}
case class FileMetricResult(
id:String,
name: String,
sourceId: String,
result: Double
) extends MetricResult {
override def getType: String = "File"
}
case class ComposedMetricResult(
id: String,
name: String,
sourceId: String,
formula: String,
result: Double
) extends MetricResult {
override def getType: String = "Composed"
}
示例11: GcdssTransformSuite
//设置package包名称以及导入依赖的类
package com.github.xubo245.gcdss.adam.postProcessing
import java.text.SimpleDateFormat
import java.util.Date
import com.github.xubo245.gcdss.utils.ADAMFunSuite
class GcdssTransformSuite extends ADAMFunSuite {
sparkTest("test:cloudBWAnewg38L50c10Nhs20Paired12time10num16k1.rg.adam") {
sc.stop()
val iString = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date())
val fqFile = "file/callVariant/input/sam/cloudBWAnewg38L50c10Nhs20Paired12time10num16k1.rg.adam"
val vcfFile = "file\\callVariant\\input\\vcf\\vcfSelectAddSequenceDictionaryWithChr.adam"
// val out = "file/callVariant/output/sam/orderedrecalibrate_base_qualities.sam"
val out = "file/callVariant/output/sam/orderedrecalibrate_base_qualitiescloudBWAnewg38L50c10Nhs20Paired12time10num16k1.rg" + iString + ".sam"
GcdssTransform.main(Array(fqFile, out, vcfFile))
}
sparkTest("test:cloudBWAnewg38L50c2000000Nhs20Paired12time10num16k1.rg.adam") {
sc.stop()
val iString = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date())
val fqFile = "file/callVariant/input/sam/cloudBWAnewg38L50c2000000Nhs20Paired12time10num16k1.rg.adam"
val vcfFile = "file\\callVariant\\input\\vcf\\vcfSelectAddSequenceDictionaryWithChr.adam"
// val out = "file/callVariant/output/sam/orderedrecalibrate_base_qualities.sam"
val out = "file/callVariant/output/sam/orderedrecalibrate_base_qualitiescloudBWAnewg38L50c2000000Nhs20Paired12time10num16k1.rg" + iString + ".sam"
GcdssTransform.main(Array(fqFile, out, vcfFile))
}
}
示例12: ReadPostProcessingSuite
//设置package包名称以及导入依赖的类
package com.github.xubo245.gcdss.adam.postProcessing
import java.text.SimpleDateFormat
import java.util.Date
import com.github.xubo245.gcdss.utils.GcdssAlignmentFunSuite
import org.bdgenomics.adam.cli.Transform
class ReadPostProcessingSuite extends GcdssAlignmentFunSuite {
val iString = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date())
test("test sort") {
val fqFile = "file/callVariant/input/sam/unordered.sam"
// val out = "file/callVariant/output/sam/ordered.sam"+iString
val out = "file/callVariant/output/sam/ordered"+iString+".sam"
ReadPostProcessing.sort(sc, fqFile, out)
}
test("test recalibrate_base_qualities with known_snps") {
val fqFile = "file/callVariant/input/sam/unordered.chr.sam"
val vcfFile = "file\\callVariant\\input\\vcf\\vcfSelectAddSequenceDictionaryWithChr.adam"
// val out = "file/callVariant/output/sam/orderedrecalibrate_base_qualities.sam"
val out = "file/callVariant/output/sam/orderedrecalibrate_base_qualities.sam"
ReadPostProcessing.BQSR(sc, fqFile, out,vcfFile)
}
test("test realign_indels") {
val fqFile = "file/callVariant/input/sam/unordered.sam"
// val out = "file/callVariant/output/sam/realign_indels.sam"
val out = "file/callVariant/output/sam/realign_indels"+iString+".sam"
ReadPostProcessing.realignIndel(sc, fqFile, out)
}
test("test mark_duplicate_reads") {
val fqFile = "file/callVariant/input/sam/unordered.sam"
// val out = "file/callVariant/output/sam/mark_duplicate_reads.sam"
val out = "file/callVariant/output/sam/mark_duplicate_reads"+iString+".sam"
ReadPostProcessing.sort(sc, fqFile, out)
}
}
示例13: DatasenderRunResultWriter
//设置package包名称以及导入依赖的类
package org.hpi.esb.datasender.output.writers
import java.text.SimpleDateFormat
import java.util.Date
import org.apache.kafka.clients.producer.KafkaProducer
import org.hpi.esb.commons.config.Configs
import org.hpi.esb.commons.config.Configs.BenchmarkConfig
import org.hpi.esb.commons.output.{CSVOutput, Tabulator}
import org.hpi.esb.commons.util.Logging
import org.hpi.esb.datasender.config._
import org.hpi.esb.datasender.metrics.MetricHandler
import org.hpi.esb.datasender.output.model.{ConfigValues, DatasenderResultRow, ResultValues}
class DatasenderRunResultWriter(config: Config, benchmarkConfig: BenchmarkConfig,
kafkaProducer: KafkaProducer[String, String]) extends Logging {
val currentTime = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date())
def outputResults(topicOffsets: Map[String, Long], expectedRecordNumber: Int): Unit = {
val metricHandler = new MetricHandler(kafkaProducer, topicOffsets, expectedRecordNumber)
val metrics = metricHandler.fetchMetrics()
val configValues = ConfigValues.get(ConfigHandler.config, Configs.benchmarkConfig)
val resultValues = new ResultValues(metrics)
val dataSenderResultRow = DatasenderResultRow(configValues, resultValues)
val table = dataSenderResultRow.toTable()
CSVOutput.write(table, ConfigHandler.resultsPath, ConfigHandler.resultFileName(currentTime))
logger.info(Tabulator.format(table))
}
}
示例14: ValidatorRunResultWriter
//设置package包名称以及导入依赖的类
package org.hpi.esb.datavalidator.output.writers
import java.text.SimpleDateFormat
import java.util.Date
import org.hpi.esb.commons.config.Configs
import org.hpi.esb.commons.output.{CSVOutput, Tabulator}
import org.hpi.esb.commons.util.Logging
import org.hpi.esb.datavalidator.configuration.Config.{resultFileName, resultsPath}
import org.hpi.esb.datavalidator.output.model.{ConfigValues, ResultValues}
import org.hpi.esb.datavalidator.validation.QueryValidationState
class ValidatorRunResultWriter extends Logging {
val currentTimeString: String = new SimpleDateFormat("yyyyMMddHHmmss").format(new Date())
def outputResults(queryValidationStates: List[QueryValidationState], startTime: Long): Unit = {
val endTime = System.currentTimeMillis() / 1000
val runTime = endTime - startTime
val table = createOutputTable(queryValidationStates, runTime)
CSVOutput.write(table, resultsPath, resultFileName(currentTimeString))
logger.info(Tabulator.format(table))
}
def createOutputTable(queryValidationStates: List[QueryValidationState], runTime: Long): List[List[String]] = {
val configValuesHeader = ConfigValues.header
val configValues = ConfigValues.get(Configs.benchmarkConfig).toList()
val resultValuesHeader = QueryValidationState.getHeader ++ List(ResultValues.VALIDATOR_RUNTIME)
val resultValues = queryValidationStates.map(
queryValidationState => getResultValues(queryValidationState, runTime)
)
val header = configValuesHeader ++ resultValuesHeader
val rows = resultValues.map(resultValueRow => configValues ++ resultValueRow)
val table = header :: rows
table
}
def getResultValues(queryValidationState: QueryValidationState, runTime: Long): List[String] = {
queryValidationState.getMeasuredResults ++ List(runTime.toString)
}
}
示例15: getCurrentTime
//设置package包名称以及导入依赖的类
package com.gochinatv.ytda
import java.text.SimpleDateFormat
import java.util.Calendar
def getCurrentTime(format:SimpleDateFormat):String = {
val now = Calendar.getInstance().getTime()
val times = format.format(now)
times
}
def dateAdd(date:String,interval:Int):String={
val calendar = Calendar.getInstance()
val de = yyyyMMdd.parse(date)
calendar.setTime(de)
calendar.add(Calendar.DATE,interval)
yyyyMMdd.format(calendar.getTime)
}
}