本文整理汇总了Scala中java.util.Properties类的典型用法代码示例。如果您正苦于以下问题:Scala Properties类的具体用法?Scala Properties怎么用?Scala Properties使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Properties类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: Access
//设置package包名称以及导入依赖的类
package hu.blackbelt.cd.bintray.deploy
import java.nio.file.{Files, StandardCopyOption}
import java.util.{Properties, UUID}
import awscala.s3.S3
import com.amazonaws.regions.Regions
import com.amazonaws.services.s3.model.GetObjectRequest
import hu.blackbelt.cd.bintray.VFS.FS
object Access {
val bintray_organization = "bintray.organization"
val bintray_user = "bintray.user"
val bintray_apikey = "bintray.apikey"
val aws_accessKeyId = "aws.accessKeyId"
val aws_secretKey = "aws.secretKey"
def collect = {
implicit val s3 = S3()(com.amazonaws.regions.Region.getRegion(Regions.EU_CENTRAL_1))
val destination = FS.getPath(s"/tmp/${UUID.randomUUID().toString}")
Files.createDirectories(destination)
val s3Object = s3.getObject(new GetObjectRequest("blackbelt-secrets", "bintray-deploy/access.properties"))
Files.copy(s3Object.getObjectContent, destination, StandardCopyOption.REPLACE_EXISTING)
import scala.collection.JavaConverters._
val prop = new Properties()
prop.load(Files.newInputStream(destination))
prop.entrySet().asScala.foreach {
(entry) => {
sys.props += ((entry.getKey.asInstanceOf[String], entry.getValue.asInstanceOf[String]))
}
}
}
}
示例2: HelloWorldSpring
//设置package包名称以及导入依赖的类
package helloworld
import java.io.FileInputStream
import java.util.Properties
import org.springframework.beans.factory.BeanFactory
import org.springframework.beans.factory.support.{DefaultListableBeanFactory, PropertiesBeanDefinitionReader}
object HelloWorldSpring extends App {
@throws(classOf[Exception])
val factory: BeanFactory = getBeanFactory
val mr: MessageRenderer = factory.getBean("renderer").asInstanceOf[MessageRenderer]
val mp: MessageProvider = factory.getBean("provider").asInstanceOf[MessageProvider]
mr.setMessageProvider(mp)
mr.render
@throws(classOf[Exception])
private def getBeanFactory: BeanFactory = {
val factory: DefaultListableBeanFactory = new DefaultListableBeanFactory
val rdr: PropertiesBeanDefinitionReader = new PropertiesBeanDefinitionReader(factory)
val props: Properties = new Properties
props.load(new FileInputStream("springdi_scala/src/helloworld/beans.properties"))
rdr.registerBeanDefinitions(props)
return factory
}
}
示例3: SimpleKafkaProducer
//设置package包名称以及导入依赖的类
package com.example
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import org.json4s.{DefaultFormats, jackson}
class SimpleKafkaProducer(kafkaSocket: Socket, topic: String, brokers: Int = 1) {
private val serializer = "org.apache.kafka.common.serialization.StringSerializer"
private def configuration = {
val props = new Properties()
props.put("bootstrap.servers", kafkaSocket.toString())
props.put("key.serializer", serializer)
props.put("value.serializer", serializer)
props
}
def send[T <: AnyRef](message: T) = {
implicit val serialization = jackson.Serialization
implicit val formats = DefaultFormats
val producer = new KafkaProducer[String, String](configuration)
val jsonMessage = serialization.write[T](message)
val data = new ProducerRecord[String, String](topic, jsonMessage)
producer.send(data)
producer.close()
}
}
示例4: Mailer
//设置package包名称以及导入依赖的类
package org.kirhgoff.lastobot
import java.util.Properties
import javax.mail.{Message, Session}
import javax.mail.internet.{InternetAddress, MimeMessage}
import scala.io.Source
object Mailer {
val host = "smtp.gmail.com"
val port = "587"
val address = "[email protected]"
val username = "lastobot"
val password = Source.fromFile(System.getProperty("user.home")
+ "/.lastobot/.mail").getLines.mkString
def sendMail(text:String, subject:String) = {
val properties = new Properties()
properties.put("mail.smtp.port", port)
properties.put("mail.smtp.auth", "true")
properties.put("mail.smtp.starttls.enable", "true")
val session = Session.getDefaultInstance(properties, null)
val message = new MimeMessage(session)
message.addRecipient(Message.RecipientType.TO, new InternetAddress(address));
message.setSubject(subject)
message.setContent(text, "text/html")
val transport = session.getTransport("smtp")
transport.connect(host, username, password)
transport.sendMessage(message, message.getAllRecipients)
}
def main(args:Array[String]) = {
sendMail("aaaa", "bbb")
}
}
示例5: ReadyKafkaProducer
//设置package包名称以及导入依赖的类
package com.bencassedy.readykafka.producer
import java.util.Properties
import java.util.concurrent.TimeUnit
import org.apache.kafka.clients.producer.{ProducerRecord, KafkaProducer}
import org.apache.kafka.common.serialization.{StringSerializer, StringDeserializer}
class ReadyKafkaProducer {
case class KafkaProducerConfigs(brokerList: String = "127.0.0.1:9092") {
val properties = new Properties()
properties.put("bootstrap.servers", brokerList)
properties.put("key.serializer", classOf[StringSerializer])
properties.put("value.serializer", classOf[StringSerializer])
// properties.put("serializer.class", classOf[StringDeserializer])
// properties.put("batch.size", 16384)
// properties.put("linger.ms", 1)
// properties.put("buffer.memory", 33554432)
}
val producer = new KafkaProducer[String, String](KafkaProducerConfigs().properties)
def produce(topic: String, messages: Iterable[String]): Unit = {
messages.foreach { m =>
producer.send(new ProducerRecord[String, String](topic, m))
}
producer.close(100L, TimeUnit.MILLISECONDS)
}
}
示例6: fillFromEnv
//设置package包名称以及导入依赖的类
package hu.blackbelt.cd.bintray.deploy
import java.io.{File, FileInputStream}
import java.util.Properties
import org.scalatest.{BeforeAndAfter, Suite}
trait Creds extends BeforeAndAfter {
this: Suite =>
def fillFromEnv(prop: Properties) = {
def put(key: String) = sys.env.get(key.replace('.','_')).map(prop.put(key.replace('_','.'), _))
put(Access.aws_accessKeyId)
put(Access.aws_secretKey)
put(Access.bintray_organization)
put(Access.bintray_user)
put(Access.bintray_apikey)
}
before {
import scala.collection.JavaConverters._
val prop = new Properties()
val propsFile = new File("env.properties")
if (propsFile.exists()) {
prop.load(new FileInputStream(propsFile))
} else {
fillFromEnv(prop)
}
prop.entrySet().asScala.foreach {
(entry) => {
sys.props += ((entry.getKey.asInstanceOf[String], entry.getValue.asInstanceOf[String]))
}
}
}
}
示例7: Generator
//设置package包名称以及导入依赖的类
package data.processing.kafkagenerator
import java.util.Properties
import java.util.concurrent.TimeUnit
import com.typesafe.config.ConfigFactory
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerRecord}
import com.github.andr83.scalaconfig._
import com.yammer.metrics.core.{MetricName, MetricsRegistry}
import data.processing.avro.AvroEncoder
import scala.concurrent.forkjoin.ThreadLocalRandom
object Generator {
val metricsRegistry = new MetricsRegistry
val config = ConfigFactory.load()
val props = config.getConfig("kafka-client").as[Properties]
val topic = config.getString("kafka-client.topic")
val numberOfUsers = config.getInt("generator.number.of.users")
val urls = config.getStringList("generator.urls")
val eventTypes = config.getStringList("generator.event.types")
val throughput = config.getInt("generator.throughput")
val avroEncoder = new AvroEncoder("/event-record.json")
def generateEvent() = {
val id = ThreadLocalRandom.current().nextLong()
val ts = java.lang.System.currentTimeMillis()
val userId = ThreadLocalRandom.current().nextInt(numberOfUsers).toHexString
val url = urls.get(ThreadLocalRandom.current().nextInt(urls.size()))
val eventType = eventTypes.get(ThreadLocalRandom.current().nextInt(eventTypes.size()))
(id, avroEncoder.encode((id, ts, userId, url, eventType)))
}
def main(args: Array[String]): Unit = {
val meter = metricsRegistry.newMeter(new MetricName("", "", ""), "", TimeUnit.SECONDS)
val producer = new KafkaProducer[String, Array[Byte]](props)
while(true) {
if (meter.meanRate < throughput) {
meter.mark()
val event = generateEvent()
producer.send(new ProducerRecord[String, Array[Byte]](topic, event._1.toString, event._2))
}
else {
Thread.sleep(1)
}
}
producer.flush()
producer.close()
}
}
示例8: fromData
//设置package包名称以及导入依赖的类
package akka.stream.alpakka.geode.internal.pdx
import java.util.Properties
import org.apache.geode.cache.Declarable
import org.apache.geode.pdx.{PdxReader, PdxSerializer, PdxWriter}
override def fromData(clazz: Class[_], in: PdxReader): AnyRef =
serializers
.get(clazz)
.map(_.fromData(clazz, in))
.orElse(serializers.collectFirst {
case (c, ser) if isPdxCompat(c, clazz) =>
val v = ser.fromData(clazz, in)
if (v != null) register(ser, clazz)
v
})
.orNull
override def init(props: Properties): Unit = {}
}
示例9: ClickhouseConnectionFactory
//设置package包名称以及导入依赖的类
package io.clickhouse.ext
import java.util.Properties
import ru.yandex.clickhouse.ClickHouseDataSource
import ru.yandex.clickhouse.settings.ClickHouseProperties
object ClickhouseConnectionFactory extends Serializable{
private val dataSources = scala.collection.mutable.Map[(String, Int), ClickHouseDataSource]()
def get(host: String, port: Int = 8123): ClickHouseDataSource ={
dataSources.get((host, port)) match {
case Some(ds) =>
ds
case None =>
val ds = createDatasource(host, port = port)
dataSources += ((host, port) -> ds)
ds
}
}
private def createDatasource(host: String, dbO: Option[String] = None, port: Int = 8123) = {
val props = new Properties()
dbO map {db => props.setProperty("database", db)}
val clickHouseProps = new ClickHouseProperties(props)
new ClickHouseDataSource(s"jdbc:clickhouse://$host:$port", clickHouseProps)
}
}
示例10: DataDriver
//设置package包名称以及导入依赖的类
package org.hpi.esb.datasender
import java.util.Properties
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig}
import org.hpi.esb.commons.config.Configs
import org.hpi.esb.commons.util.Logging
import org.hpi.esb.datasender.config._
import org.hpi.esb.datasender.output.writers.DatasenderRunResultWriter
import scala.io.Source
class DataDriver() extends Logging {
private val topics = Configs.benchmarkConfig.sourceTopics
private val config = ConfigHandler.config
private val dataReader = createDataReader(config.dataReaderConfig)
private val kafkaProducerProperties = createKafkaProducerProperties(config.kafkaProducerConfig)
private val kafkaProducer = new KafkaProducer[String, String](kafkaProducerProperties)
private val resultHandler = new DatasenderRunResultWriter(config, Configs.benchmarkConfig, kafkaProducer)
private val dataProducer = createDataProducer(kafkaProducer, dataReader, resultHandler)
def run(): Unit = {
dataProducer.execute()
}
def createKafkaProducerProperties(kafkaProducerConfig: KafkaProducerConfig): Properties = {
val props = new Properties()
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProducerConfig.bootstrapServers.get)
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, kafkaProducerConfig.keySerializerClass.get)
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, kafkaProducerConfig.valueSerializerClass.get)
props.put(ProducerConfig.ACKS_CONFIG, kafkaProducerConfig.acks.get)
props.put(ProducerConfig.BATCH_SIZE_CONFIG, kafkaProducerConfig.batchSize.get.toString)
props.put(ProducerConfig.LINGER_MS_CONFIG, kafkaProducerConfig.lingerTime.toString)
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, kafkaProducerConfig.bufferMemorySize.toString)
props
}
def createDataReader(dataReaderConfig: DataReaderConfig): DataReader = {
new DataReader(Source.fromFile(dataReaderConfig.dataInputPath.get),
dataReaderConfig.columns.get,
dataReaderConfig.columnDelimiter.get,
dataReaderConfig.dataColumnStart.get,
dataReaderConfig.readInRam)
}
def createDataProducer(kafkaProducer: KafkaProducer[String, String], dataReader: DataReader,
resultHandler: DatasenderRunResultWriter): DataProducer = {
val numberOfThreads = config.dataSenderConfig.numberOfThreads.get
val sendingInterval = Configs.benchmarkConfig.sendingInterval
val sendingIntervalTimeUnit = Configs.benchmarkConfig.getSendingIntervalTimeUnit()
val duration = Configs.benchmarkConfig.duration
val durationTimeUnit = Configs.benchmarkConfig.getDurationTimeUnit()
val singleColumnMode = config.dataSenderConfig.singleColumnMode
new DataProducer(resultHandler, kafkaProducer, dataReader, topics, numberOfThreads,
sendingInterval, sendingIntervalTimeUnit, duration, durationTimeUnit, singleColumnMode)
}
}
示例11: EmailParser
//设置package包名称以及导入依赖的类
package uk.pkerrigan.dmarcparser
import java.io.ByteArrayInputStream
import java.nio.charset.CodingErrorAction
import java.util.Properties
import java.util.zip.{GZIPInputStream, ZipInputStream}
import javax.activation.DataSource
import javax.mail.Session
import javax.mail.internet.MimeMessage
import scala.collection.JavaConverters._
import org.apache.commons.mail.util.MimeMessageParser
import uk.pkerrigan.dmarcparser.report.Feedback
import scala.io._
class EmailParser(parser: ParserTrait = new Parser()) extends EmailParserTrait{
implicit val codec = Codec("UTF-8")
codec.onMalformedInput(CodingErrorAction.REPLACE)
codec.onUnmappableCharacter(CodingErrorAction.REPLACE)
def parseEmail(email: String): Option[Feedback] = {
val s = Session.getDefaultInstance(new Properties())
val is = new ByteArrayInputStream(email.getBytes)
val message = new MimeMessage(s, is)
val messageParser = new MimeMessageParser(message).parse()
messageParser.getAttachmentList.asScala.headOption.flatMap(extract)
}
private def extract(a: DataSource): Option[Feedback] = a match {
case `a` if a.getContentType.equals("application/gzip") => extractGzip(a)
case `a` if a.getContentType.equals("application/x-gzip") => extractGzip(a)
case `a` if a.getContentType.equals("application/zip") => extractZip(a)
case `a` if a.getContentType.equals("application/x-zip-compressed") => extractZip(a)
case _ => None
}
private def extractZip(a: DataSource): Option[Feedback] = {
val zip = new ZipInputStream(a.getInputStream)
zip.getNextEntry
val rawXml = Source.fromInputStream(zip).mkString
if (rawXml == "") None else Some(parser.parse(rawXml))
}
private def extractGzip(a: DataSource): Option[Feedback] = {
val zip = new GZIPInputStream(a.getInputStream)
val rawXml = Source.fromInputStream(zip).mkString
if (rawXml == "") None else Some(parser.parse(rawXml))
}
}
示例12:
//设置package包名称以及导入依赖的类
package com.github.simonthecat.eventdrivenorders.productservice
import java.util.Properties
package object kafka {
val orderConsumerCfg: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("group.id", "product.consumers")
props.put("enable.auto.commit", "true")
props.put("auto.commit.interval.ms", "1000")
props.put("session.timeout.ms", "30000")
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props
}
val confirmationProducerCfg: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("acks", "all")
props.put("retries", "0")
props.put("batch.size", "16384")
props.put("linger.ms", "1")
props.put("buffer.memory", "33554432")
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props
}
}
示例13:
//设置package包名称以及导入依赖的类
package com.github.simonthecat.eventdrivenorders.orderservice
import java.util.Properties
package object kafka {
val orderConsumerCfg: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("group.id", "order.consumers")
props.put("enable.auto.commit", "true")
props.put("auto.commit.interval.ms", "1000")
props.put("session.timeout.ms", "30000")
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props
}
val storeConfirmationConsumer: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("group.id", "order.consumers")
props.put("enable.auto.commit", "true")
props.put("auto.commit.interval.ms", "1000")
props.put("session.timeout.ms", "30000")
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props
}
val producerCfg: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("acks", "all")
props.put("retries", "0")
props.put("batch.size", "16384")
props.put("linger.ms", "1")
props.put("buffer.memory", "33554432")
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props
}
}
示例14:
//设置package包名称以及导入依赖的类
package com.github.eventdrivenorders.api
import java.util.Properties
package object kafka {
val producerCfg: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("acks", "all")
props.put("retries", "0")
props.put("batch.size", "16384")
props.put("linger.ms", "1")
props.put("buffer.memory", "33554432")
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer")
props
}
val orderStatusConsumer: Properties = {
val props = new Properties()
props.put("bootstrap.servers", "localhost:9092")
props.put("group.id", "api.consumers")
props.put("enable.auto.commit", "true")
props.put("auto.commit.interval.ms", "1000")
props.put("session.timeout.ms", "30000")
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer")
props
}
}
示例15: Helpers
//设置package包名称以及导入依赖的类
package com.github.mmolimar.vkitm.utils
import java.util.Properties
import java.util.concurrent.{CancellationException, TimeUnit, Future => JFuture}
import com.typesafe.config.Config
import org.jboss.netty.util.{HashedWheelTimer, Timeout, TimerTask}
import scala.concurrent.{Future, Promise}
import scala.util.Try
object Helpers {
private val pollIntervalMs = 50L
private val timer = new HashedWheelTimer(pollIntervalMs, TimeUnit.MILLISECONDS)
implicit class JFutureHelpers[T](jf: JFuture[T]) {
def asScala: Future[T] = {
val promise = Promise[T]()
def checkCompletion(): Unit = {
if (jf.isCancelled) {
promise.failure(new CancellationException())
} else if (jf.isDone) {
promise.complete(Try(jf.get))
} else {
scheduleTimeout()
}
()
}
def scheduleTimeout(): Unit = {
timer.newTimeout(new TimerTask {
override def run(timeout: Timeout): Unit = checkCompletion()
}, pollIntervalMs, TimeUnit.MILLISECONDS)
()
}
checkCompletion()
promise.future
}
}
implicit def propsFromConfig(config: Config): Properties = {
import scala.collection.JavaConversions._
val props = new Properties()
val map: Map[String, Object] = config.entrySet().map({ entry =>
entry.getKey -> entry.getValue.unwrapped()
})(collection.breakOut)
props.putAll(map)
props
}
}