本文整理汇总了Scala中java.util.concurrent.Future类的典型用法代码示例。如果您正苦于以下问题:Scala Future类的具体用法?Scala Future怎么用?Scala Future使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
在下文中一共展示了Future类的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。
示例1: ExecutorServiceBackboneCoordinator
//设置package包名称以及导入依赖的类
package ie.zalando.pipeline.backbone.concurrent
import java.util.concurrent.{ Callable, Future, ExecutorService }
import scala.util.Try
import scala.util.control.NonFatal
import org.slf4j.LoggerFactory
import cats.data.Xor
import ie.zalando.pipeline.backbone.Backbone
import ie.zalando.pipeline.backbone.Phases.{ LocalReleasePhase, TransformationPipelineFailure }
class ExecutorServiceBackboneCoordinator[DA](backbone: Backbone[DA], executor: ExecutorService) {
import ExecutorServiceBackboneCoordinator._
val localInitPhases = backbone.initializeTopLevelContexts
private class BackboneCallable(datum: DA) extends Callable[Xor[TransformationPipelineFailure, DA]] {
override def call(): Xor[TransformationPipelineFailure, DA] = {
val (dataPhases, releasePhases) = backbone.initializeInLocalContext(-1, localInitPhases).unzip
try {
backbone.transformDatum(backbone.createStateMonad(dataPhases), datum)
} finally {
releasePhases.foreach((phase: LocalReleasePhase) => {
Try({ phase.releaseLocalResources() }).recover { case NonFatal(ex) => log.warn(s"Release phase $phase failed:", ex) }
})
}
}
}
def process(datum: DA): Future[Xor[TransformationPipelineFailure, DA]] = {
executor.submit(new BackboneCallable(datum))
}
}
object ExecutorServiceBackboneCoordinator {
val log = LoggerFactory.getLogger(classOf[ExecutorServiceBackboneCoordinator[_]])
}
开发者ID:retnuh,项目名称:pipeline-backbone,代码行数:41,代码来源:ExecutorServiceBackboneCoordinator.scala
示例2: KProducer
//设置package包名称以及导入依赖的类
package org.parsec
import java.util.Properties
import java.util.concurrent.Future
import com.sksamuel.avro4s.{FromRecord, RecordFormat, ToRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
// Convenient Kafka producer using avro4s
class KProducer[K <: Product, V <: Product] {
val kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092")
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put("schema.registry.url", "http://localhost:8081")
private lazy val producer = new KafkaProducer[GenericRecord, GenericRecord](kafkaProps)
def produce(topic: String, key: K, value: V, partition: Int = 0)(implicit toRecordKey: ToRecord[K], fromRecordKey: FromRecord[K], toRecord: ToRecord[V], fromRecord: FromRecord[V]): Future[RecordMetadata] = {
val keyRec = RecordFormat[K].to(key)
val valueRec = RecordFormat[V].to(value)
val data: ProducerRecord[GenericRecord, GenericRecord] = new ProducerRecord(topic, partition, keyRec, valueRec)
producer.send(data)
}
}
开发者ID:cryptocurrencyindia,项目名称:Parsec,代码行数:29,代码来源:KProducer.scala
示例3: FailingKafkaStorage
//设置package包名称以及导入依赖的类
package io.amient.affinity.testutil.storage
import java.nio.ByteBuffer
import java.util.concurrent.{Callable, Executors, Future}
import com.typesafe.config.Config
import io.amient.affinity.core.storage.kafka.KafkaStorage
import org.apache.kafka.clients.producer.{ProducerRecord, RecordMetadata}
class FailingKafkaStorage(config: Config, partition: Int) extends KafkaStorage(config, partition) {
val executor = Executors.newFixedThreadPool(1)
override def write(key: ByteBuffer, value: ByteBuffer): Future[RecordMetadata] = {
val javaFuture: Future[RecordMetadata] = kafkaProducer.send(new ProducerRecord(topic, partition, key, value))
return executor.submit(new Callable[RecordMetadata]() {
override def call(): RecordMetadata = {
if (System.currentTimeMillis() % 10 == 0) {
throw new RuntimeException("Simulated Exception in FailingKafkaStorage")
} else {
javaFuture.get
}
}
})
}
}
开发者ID:amient,项目名称:affinity,代码行数:30,代码来源:FailingKafkaStorage.scala
示例4: KProducer
//设置package包名称以及导入依赖的类
package org.parsec
import java.util.Properties
import java.util.concurrent.Future
import com.sksamuel.avro4s.{FromRecord, RecordFormat, ToRecord}
import io.confluent.kafka.serializers.KafkaAvroSerializer
import org.apache.avro.generic.GenericRecord
import org.apache.kafka.clients.producer.{KafkaProducer, ProducerConfig, ProducerRecord, RecordMetadata}
// Convenient Kafka producer using avro4s
class KProducer[K <: Product, V <: Product] {
val kafkaProps = new Properties()
kafkaProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "parsec.playground.landoop.com:49092")
kafkaProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, classOf[KafkaAvroSerializer].getCanonicalName)
kafkaProps.put("schema.registry.url", "http://parsec.playground.landoop.com:48081")
private lazy val producer = new KafkaProducer[GenericRecord, GenericRecord](kafkaProps)
def produce(topic: String, key: K, value: V, partition: Int = 0)(implicit toRecordKey: ToRecord[K], fromRecordKey: FromRecord[K], toRecord: ToRecord[V], fromRecord: FromRecord[V]): Future[RecordMetadata] = {
val keyRec = RecordFormat[K].to(key)
val valueRec = RecordFormat[V].to(value)
val data: ProducerRecord[GenericRecord, GenericRecord] = new ProducerRecord(topic, partition, keyRec, valueRec)
producer.send(data)
}
}
开发者ID:parsec-network,项目名称:parsec,代码行数:29,代码来源:KProducer.scala
注:本文中的java.util.concurrent.Future类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。 |
请发表评论