本文整理汇总了Scala中java.util.concurrent.Future类的典型用法代码示例。如果您正苦于以下问题:Scala Future类的具体用法?Scala Future怎么用?Scala Future使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Future类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Scala代码示例。
示例1: ExecutorServiceBackboneCoordinator
//设置package包名称以及导入依赖的类
package ie.zalando.pipeline.backbone.concurrent
import java.util.concurrent.{ Callable, Future, ExecutorService }
import scala.util.Try
import scala.util.control.NonFatal
import org.slf4j.LoggerFactory
import cats.data.Xor
import ie.zalando.pipeline.backbone.Backbone
import ie.zalando.pipeline.backbone.Phases.{ LocalReleasePhase, TransformationPipelineFailure }
class ExecutorServiceBackboneCoordinator[DA](backbone: Backbone[DA], executor: ExecutorService) {
import ExecutorServiceBackboneCoordinator._
val localInitPhases = backbone.initializeTopLevelContexts
private class BackboneCallable(datum: DA) extends Callable[Xor[TransformationPipelineFailure, DA]] {
override def call(): Xor[TransformationPipelineFailure, DA] = {
val (dataPhases, releasePhases) = backbone.initializeInLocalContext(-1, localInitPhases).unzip
try {
backbone.transformDatum(backbone.createStateMonad(dataPhases), datum)
} finally {
releasePhases.foreach((phase: LocalReleasePhase) => {
Try({ phase.releaseLocalResources() }).recover { case NonFatal(ex) => log.warn(s"Release phase $phase failed:", ex) }
})
}
}
}
def process(datum: DA): Future[Xor[TransformationPipelineFailure, DA]] = {
executor.submit(new BackboneCallable(datum))
}
}
object ExecutorServiceBackboneCoordinator {
val log = LoggerFactory.getLogger(classOf[ExecutorServiceBackboneCoordinator[_]])
}
示例2: KProducer
//设置package包名称以及导入依赖的类
package org.parsec
import java.util.Properties
import java.util.concurrent.Future
import com.sksamuel.avro4s.{FromRecord, RecordFormat, ToRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
// Convenient Kafka producer using avro4s
class KProducer[K <: Product, V <: Product] {
val kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put("schema.registry.url", "http://localhost:8081")
private lazy val producer = new KafkaProducer[GenericRecord, GenericRecord](kafkaProps)
def produce(topic: String, key: K, value: V, partition: Int = 0)(implicit toRecordKey: ToRecord[K], fromRecordKey: FromRecord[K], toRecord: ToRecord[V], fromRecord: FromRecord[V]): Future[RecordMetadata] = {
val keyRec = RecordFormat[K].to(key)
val valueRec = RecordFormat[V].to(value)
val data: ProducerRecord[GenericRecord, GenericRecord] = new ProducerRecord(topic, partition, keyRec, valueRec)
producer.send(data)
}
}
示例3: FailingKafkaStorage
//设置package包名称以及导入依赖的类
package io.amient.affinity.testutil.storage
import java.nio.ByteBuffer
import java.util.concurrent.{Callable, Executors, Future}
import com.typesafe.config.Config
import io.amient.affinity.core.storage.kafka.KafkaStorage
import org.apache.kafka.clients.producer.{ProducerRecord, RecordMetadata}
class FailingKafkaStorage(config: Config, partition: Int) extends KafkaStorage(config, partition) {
val executor = Executors.newFixedThreadPool(1)
override def write(key: ByteBuffer, value: ByteBuffer): Future[RecordMetadata] = {
val javaFuture: Future[RecordMetadata] = kafkaProducer.send(new ProducerRecord(topic, partition, key, value))
return executor.submit(new Callable[RecordMetadata]() {
override def call(): RecordMetadata = {
if (System.currentTimeMillis() % 10 == 0) {
throw new RuntimeException("Simulated Exception in FailingKafkaStorage")
} else {
javaFuture.get
}
}
})
}
}
示例4: KProducer
//设置package包名称以及导入依赖的类
package org.parsec
import java.util.Properties
import java.util.concurrent.Future
import com.sksamuel.avro4s.{FromRecord, RecordFormat, ToRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
// Convenient Kafka producer using avro4s
class KProducer[K <: Product, V <: Product] {
val kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "parsec.playground.landoop.com:49092")
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put("schema.registry.url", "http://parsec.playground.landoop.com:48081")
private lazy val producer = new KafkaProducer[GenericRecord, GenericRecord](kafkaProps)
def produce(topic: String, key: K, value: V, partition: Int = 0)(implicit toRecordKey: ToRecord[K], fromRecordKey: FromRecord[K], toRecord: ToRecord[V], fromRecord: FromRecord[V]): Future[RecordMetadata] = {
val keyRec = RecordFormat[K].to(key)
val valueRec = RecordFormat[V].to(value)
val data: ProducerRecord[GenericRecord, GenericRecord] = new ProducerRecord(topic, partition, keyRec, valueRec)
producer.send(data)
}
}