本文整理汇总了Scala中com.typesafe.config.ConfigValueFactory类的典型用法代码示例。如果您正苦于以下问题:Scala ConfigValueFactory类的具体用法?Scala ConfigValueFactory怎么用?Scala ConfigValueFactory使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了ConfigValueFactory类的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: CrawlerConfig
//设置package包名称以及导入依赖的类
package com.crawler.core.runners
import java.util
import akka.cluster.Member
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory}
import collection.JavaConversions._
object CrawlerConfig {
def getConfig(clusterName: String = "crawler",
masterIp: String = "127.0.0.1",
myIp: String = "127.0.0.1",
role: String): Config = {
val port = if(role.equals(CrawlerMaster.role)) 2551 else 0
val master = s"akka.tcp://[email protected]$masterIp:2551"
val actorRole = role match {
case CrawlerMaster.role => role
case CrawlerAgent.role => role
case _ => clientRole(role)
}
ConfigFactory.empty()
.withValue("akka.actor.provider", ConfigValueFactory.fromAnyRef("cluster"))
.withValue("akka.remote.netty.tcp.hostname", ConfigValueFactory.fromAnyRef(myIp))
.withValue("akka.remote.netty.tcp.port", ConfigValueFactory.fromAnyRef(port))
.withValue("akka.cluster.seed-nodes", ConfigValueFactory.fromIterable(util.Arrays.asList(master)))
.withValue("akka.cluster.roles", ConfigValueFactory.fromIterable(util.Arrays.asList(actorRole)))
.withValue("akka.remote.maximum-payload-bytes", ConfigValueFactory.fromAnyRef("30000000 bytes"))
.withValue("akka.remote.netty.tcp.message-frame-size", ConfigValueFactory.fromAnyRef("30000000b"))
.withValue("akka.remote.netty.tcp.send-buffer-size", ConfigValueFactory.fromAnyRef("30000000b"))
.withValue("akka.remote.netty.tcp.receive-buffer-size", ConfigValueFactory.fromAnyRef("30000000b"))
.withValue("akka.remote.netty.tcp.maximum-frame-size", ConfigValueFactory.fromAnyRef("30000000b"))
.withValue("akka.cluster.auto-down-unreachable-after", ConfigValueFactory.fromAnyRef("10s"))
.withValue("akka.actor.warn-about-java-serializer-usage", ConfigValueFactory.fromAnyRef("false"))
}
def clientRole(role: String) = s"$role-${CrawlerClient.role}"
def parseClientRole(role: String) = role.split("-")(0)
def isCrawlerAgent(member:Member): Boolean = member.hasRole(CrawlerAgent.role)
def isCrawlerClient(member:Member): Boolean = member.roles.head.contains(CrawlerClient.role)
}
示例2: MyConfluentRegistry
//设置package包名称以及导入依赖的类
package io.amient.affinity.systemtests.avro
import akka.actor.ActorSystem
import akka.serialization.SerializationExtension
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory}
import io.amient.affinity.core.serde.avro._
import io.amient.affinity.core.serde.avro.schema.CfAvroSchemaRegistry
import io.amient.affinity.testutil.SystemTestBaseWithConfluentRegistry
import org.scalatest.FlatSpec
class MyConfluentRegistry(config: Config) extends CfAvroSchemaRegistry(config) {
register(classOf[ID])
register(classOf[Base])
register(classOf[Composite])
}
class CfRegistryTest extends FlatSpec with SystemTestBaseWithConfluentRegistry {
val config = configure(ConfigFactory.defaultReference)
.withValue(AvroSerde.CONFIG_PROVIDER_CLASS, ConfigValueFactory.fromAnyRef(classOf[MyConfluentRegistry].getName))
assert(config.getString(CfAvroSchemaRegistry.CONFIG_CF_REGISTRY_URL_BASE) == registryUrl)
override def numPartitions = 2
"Confluent Schema Registry Provider" should "be available via akka SerializationExtension" in {
val system = ActorSystem.create("CfTest", config)
try {
val serialization = SerializationExtension(system)
val serde = serialization.serializerFor(classOf[ID])
assert(serde.fromBinary(serde.toBinary(ID(101))) == ID(101))
} finally {
system.terminate()
}
}
}
示例3: CsvFileDataManagerSpec
//设置package包名称以及导入依赖的类
package org.alghimo.sparkPipelines.dataManager
import com.typesafe.config.{ConfigFactory, ConfigValueFactory}
import org.alghimo.sparkPipelines.WithSharedSparkSession
import org.scalatest.{FlatSpec, Matchers}
/**
* Created by D-KR99TU on 14/03/2017.
*/
class CsvFileDataManagerSpec extends FlatSpec with Matchers with WithSharedSparkSession {
lazy val dataManager: CsvFileDataManager = {
val originalConfig = ConfigFactory.load("files_test")
val filePath = getClass.getResource("/files/test_file.csv").getPath
val file2Path = getClass.getResource("/files/test_separator.csv").getPath
val newConfig = originalConfig
.withValue("files.file.no_options.path", ConfigValueFactory.fromAnyRef(filePath))
.withValue("files.file.options.path", ConfigValueFactory.fromAnyRef(file2Path))
.withValue("files.file.with_filter.path", ConfigValueFactory.fromAnyRef(filePath))
.withValue("files.file.with_select.path", ConfigValueFactory.fromAnyRef(filePath))
new CsvFileDataManager(spark, Map("file_config" -> "files_test")) {
override lazy val fileConfig = newConfig
}
}
"CsvFileDataManager" should "be able to load files with no options in config" in {
val fileDf = dataManager.get("file.no_options")
fileDf.columns should contain allOf("col1", "col2", "col3")
fileDf.count shouldBe 2
}
it should "be able to load files with options in config" in {
val fileDf = dataManager.get("file.options")
fileDf.columns should contain allOf("col1", "col2", "col3")
fileDf.count shouldBe 2
}
it should "be able to load files with filters in config" in {
val fileDf = dataManager.get("file.with_filter")
fileDf.columns should contain allOf("col1", "col2", "col3")
fileDf.count shouldBe 1
}
it should "be able to load files with select in config" in {
val fileDf = dataManager.get("file.with_select")
fileDf.columns should contain allOf("new_col1", "col3")
fileDf.count shouldBe 2
}
}
示例4: FStorageSpec
//设置package包名称以及导入依赖的类
package io.hydrosphere.mist.master.data
import java.nio.file.Paths
import com.typesafe.config.{Config, ConfigValueFactory}
import io.hydrosphere.mist.master.models.NamedConfig
import org.apache.commons.io.FileUtils
import org.scalatest._
class FStorageSpec extends FunSpec with Matchers with BeforeAndAfter {
case class TestEntry(
name: String,
value: Int
) extends NamedConfig
val testEntryConfigRepr = new ConfigRepr[TestEntry] {
import scala.collection.JavaConverters._
override def fromConfig(config: Config): TestEntry = {
TestEntry(config.getString("name"), config.getInt("value"))
}
override def toConfig(a: TestEntry): Config = {
val map = Map("value" -> ConfigValueFactory.fromAnyRef(a.value))
ConfigValueFactory.fromMap(map.asJava).toConfig
}
}
val path = "./target/file_store_test"
before {
val f = Paths.get(path).toFile
if (f.exists()) FileUtils.deleteDirectory(f)
}
it("should store files") {
val storage = FsStorage.create(path, testEntryConfigRepr)
storage.write("one", TestEntry("one", 1))
storage.write("two", TestEntry("two", 2))
storage.entries should contain allOf(
TestEntry("one", 1),
TestEntry("two", 2)
)
}
}
示例5: ConfigurableBM25Similarity
//设置package包名称以及导入依赖的类
package it.agilelab.bigdata.spark.search.impl.similarities
import com.typesafe.config.{Config, ConfigFactory, ConfigValueFactory}
import it.agilelab.bigdata.spark.search.impl.LuceneConfig
import org.apache.lucene.search.similarities.{BM25Similarity, Similarity}
class ConfigurableBM25Similarity(conf: LuceneConfig) extends ConfigurableSimilarity(conf) {
import ConfigurableBM25Similarity._
override def getSimilarity: Similarity = {
val similarityConf = conf.getConfigurableSimilarityConfig
val k1 = if (similarityConf.hasPath(K1Subproperty)) similarityConf.getDouble(K1Subproperty) else DefaultK1
val b = if (similarityConf.hasPath(BSubproperty)) similarityConf.getDouble(BSubproperty) else DefaultB
new BM25Similarity(k1.toFloat, b.toFloat)
}
}
object ConfigurableBM25Similarity {
val K1Subproperty = "k1"
val BSubproperty = "b"
val DefaultK1 = 1.2d
val DefaultB = 0.75d
def makeConf(k1: Double, b: Double): Config = {
ConfigFactory.empty()
.withValue(K1Subproperty, ConfigValueFactory.fromAnyRef(k1))
.withValue(BSubproperty, ConfigValueFactory.fromAnyRef(b))
}
}
示例6: CheckpointWriter
//设置package包名称以及导入依赖的类
package com.criteo.dev.cluster.config
import com.typesafe.config.{Config, ConfigFactory, ConfigRenderOptions, ConfigValueFactory}
import collection.JavaConverters._
object CheckpointWriter {
def apply(checkpoint: Checkpoint): Config = {
import checkpoint._
ConfigFactory.empty
.withValue("created", ConfigValueFactory.fromAnyRef(created.toEpochMilli))
.withValue("updated", ConfigValueFactory.fromAnyRef(updated.toEpochMilli))
.withValue("todo", ConfigValueFactory.fromIterable(todo.asJava))
.withValue("finished", ConfigValueFactory.fromIterable(finished.asJava))
.withValue("failed", ConfigValueFactory.fromIterable(failed.asJava))
}
def render(checkpoint: Checkpoint, configRenderOptions: ConfigRenderOptions = ConfigRenderOptions.concise): String = {
apply(checkpoint).root.render(configRenderOptions)
}
}
示例7: ConfigurationSpec
//设置package包名称以及导入依赖的类
package com.gu.cm
import com.typesafe.config.{ConfigValueFactory, ConfigFactory, Config}
import org.specs2.mutable.Specification
import org.specs2.specification.Scope
class ConfigurationSpec extends Specification {
"a Configuration object" should {
"compose from multiple sources" in new ConfigurationScope() {
val source1 = config(Map("src.1.a" -> "a", "src.1.b" -> 2))
val source2 = config(Map("src.2.a" -> "z", "src.2.b" -> 54))
val configuration = new Configuration(List(source1, source2)).load
configuration.getString("src.1.a") shouldEqual "a"
configuration.getInt("src.1.b") shouldEqual 2
configuration.getString("src.2.a") shouldEqual "z"
configuration.getInt("src.2.b") shouldEqual 54
}
"respect the order of the sources" in new ConfigurationScope() {
val source1 = config(Map("src.1.a" -> "a", "src.1.b" -> 2))
val source2 = config(Map("src.1.a" -> "ignore-me", "src.2.b" -> 54))
val configuration = new Configuration(List(source1, source2)).load
configuration.getString("src.1.a") shouldEqual "a"
configuration.getInt("src.1.b") shouldEqual 2
configuration.getInt("src.2.b") shouldEqual 54
}
}
trait ConfigurationScope extends Scope {
def config(values: Map[String, Any]): ConfigurationSource = new ConfigurationSource {
override def load: Config = values.foldLeft(ConfigFactory.empty()) {
case (agg, (path, value)) => agg.withValue(path, ConfigValueFactory.fromAnyRef(value, "Unit test"))
}
}
}
}
示例8: RetryScheduleSpec
//设置package包名称以及导入依赖的类
package oriana
import com.typesafe.config.{ConfigValueFactory, ConfigFactory}
import org.scalatest.{Matchers, FlatSpec}
import scala.concurrent.duration._
import scala.collection.JavaConversions._
class RetryScheduleSpec extends FlatSpec with Matchers {
"The no-retry schedule" should "always return None" in {
NoRetrySchedule.retryDelay(1) shouldEqual None
}
"the default schedule" should "schedule retries at 100, 200, ... 500 ms" in {
(0 to 5).map(DefaultSchedule.retryDelay) should contain theSameElementsInOrderAs List(Some(100.millis), Some(200.millis), Some(300.millis), Some(400.millis), Some(500.millis), None)
}
"the config schedule" should "evaluate 'retry_db_delay_millis' from the config" in {
val config = ConfigFactory.empty.withValue("retry_db_delay_millis", ConfigValueFactory.fromIterable(Seq(100, 500, 1000).map(Integer.valueOf)))
val schedule = new ConfiguredRetrySchedule(config)
(0 to 3).map(schedule.retryDelay) should contain theSameElementsInOrderAs List(Some(100.millis), Some(500.millis), Some(1.second), None)
}
}
示例9: RootActorSystem
//设置package包名称以及导入依赖的类
package com.wavesplatform.actor
import java.io.File
import akka.actor.{ActorSystem, AllForOneStrategy, SupervisorStrategy, SupervisorStrategyConfigurator}
import com.typesafe.config.{ConfigFactory, ConfigValueFactory}
import com.wavesplatform.matcher.MatcherSettings
import scorex.utils.ScorexLogging
import scala.concurrent.Await
import scala.concurrent.duration.Duration
object RootActorSystem extends ScorexLogging {
@volatile private var failed = false
final class EscalatingStrategy extends SupervisorStrategyConfigurator {
override def create(): SupervisorStrategy = AllForOneStrategy(loggingEnabled = false) {
case t: Throwable =>
failed = true
log.error("Root actor got exception, escalate", t)
SupervisorStrategy.Escalate
}
}
def start(id: String, settings: MatcherSettings)(init: ActorSystem => Unit): Unit = {
val journalDir = new File(settings.journalDataDir)
val snapshotDir = new File(settings.snapshotsDataDir)
journalDir.mkdirs()
snapshotDir.mkdirs()
checkDirectory(journalDir)
checkDirectory(snapshotDir)
val system = ActorSystem(id, ConfigFactory.load().withValue("akka.actor.guardian-supervisor-strategy",
ConfigValueFactory.fromAnyRef("com.wavesplatform.actor.RootActorSystem$EscalatingStrategy"))
.withValue("akka.persistence.journal.leveldb.dir", ConfigValueFactory.fromAnyRef(settings.journalDataDir))
.withValue("akka.persistence.snapshot-store.local.dir", ConfigValueFactory.fromAnyRef(settings.snapshotsDataDir)))
try {
init(system)
} catch {
case e: Exception =>
log.error(s"Error while initializing actor system $id", e)
sys.exit(1)
}
Await.result(system.whenTerminated, Duration.Inf)
if (failed) {
sys.exit(1)
} else {
sys.exit(0)
}
}
private def checkDirectory(directory: File): Unit = if (!directory.exists()) {
log.error(s"Failed to create directory '${directory.getPath}'")
sys.exit(1)
}
}