• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala StringSerializer类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.kafka.common.serialization.StringSerializer的典型用法代码示例。如果您正苦于以下问题:Scala StringSerializer类的具体用法?Scala StringSerializer怎么用?Scala StringSerializer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了StringSerializer类的20个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: ProcessingKafkaApplication

//设置package包名称以及导入依赖的类
package com.packt.chapter8

import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerSettings, Subscriptions}
import akka.stream.{ActorMaterializer, ClosedShape}
import akka.stream.scaladsl.{Flow, GraphDSL, RunnableGraph, Sink, Source}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}

import scala.concurrent.duration._

object ProcessingKafkaApplication extends App {
  implicit val actorSystem = ActorSystem("SimpleStream")
  implicit val actorMaterializer = ActorMaterializer()

  val bootstrapServers = "localhost:9092"
  val kafkaTopic = "akka_streams_topic"
  val partition = 0
  val subscription = Subscriptions.assignment(new TopicPartition(kafkaTopic, partition))

  val consumerSettings = ConsumerSettings(actorSystem, new ByteArrayDeserializer, new StringDeserializer)
    .withBootstrapServers(bootstrapServers)
    .withGroupId("akka_streams_group")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(actorSystem, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers(bootstrapServers)

  val runnableGraph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
    import GraphDSL.Implicits._

    val tickSource = Source.tick(0 seconds, 5 seconds, "Hello from Akka Streams using Kafka!")
    val kafkaSource = Consumer.plainSource(consumerSettings, subscription)
    val kafkaSink = Producer.plainSink(producerSettings)
    val printlnSink = Sink.foreach(println)

    val mapToProducerRecord = Flow[String].map(elem => new ProducerRecord[Array[Byte], String](kafkaTopic, elem))
    val mapFromConsumerRecord = Flow[ConsumerRecord[Array[Byte], String]].map(record => record.value())

    tickSource  ~> mapToProducerRecord   ~> kafkaSink
    kafkaSource ~> mapFromConsumerRecord ~> printlnSink

    ClosedShape
  })

  runnableGraph.run()
} 
开发者ID:PacktPublishing,项目名称:Akka-Cookbook,代码行数:51,代码来源:ProcessingKafkaApplication.scala


示例2: Settings

//设置package包名称以及导入依赖的类
package com.scalaio.kafka.consumer

import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.CommittableMessage
import akka.kafka.scaladsl.Consumer
import akka.kafka.{ConsumerSettings, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import com.scalaio.kafka.consumer.Settings.consumerSettings
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}

import scala.concurrent.Future

object Settings {
  def consumerSettings(implicit system: ActorSystem) =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("CommittableSourceConsumer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  def producerSettings(implicit system: ActorSystem) =
    ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
      .withBootstrapServers("localhost:9092")
}

object CommittableSource extends App {

  type KafkaMessage = CommittableMessage[Array[Byte], String]

  implicit val system = ActorSystem("CommittableSourceConsumerMain")
  implicit val materializer = ActorMaterializer()

  implicit val ec = system.dispatcher

  // explicit commit
  Consumer
    .committableSource(consumerSettings, Subscriptions.topics("topic1"))
    .mapAsync(1) { msg =>
      BusinessController.handleMessage(msg.record.value)
        .flatMap(response => msg.committableOffset.commitScaladsl())
        .recoverWith { case e => msg.committableOffset.commitScaladsl() }
    }
    .runWith(Sink.ignore)

}

object BusinessController {

  type Service[A, B] = A => Future[B]

  val handleMessage: Service[String, String] =
    (message) => Future.successful(message.toUpperCase)

} 
开发者ID:fagossa,项目名称:scalaio_akka,代码行数:56,代码来源:CommittableSource.scala


示例3: ReadyKafkaProducer

//设置package包名称以及导入依赖的类
package com.bencassedy.readykafka.producer

import java.util.Properties
import java.util.concurrent.TimeUnit

import org.apache.kafka.clients.producer.{ProducerRecord, KafkaProducer}
import org.apache.kafka.common.serialization.{StringSerializer, StringDeserializer}


class ReadyKafkaProducer {
  case class KafkaProducerConfigs(brokerList: String = "127.0.0.1:9092") {
    val properties = new Properties()
    properties.put("bootstrap.servers", brokerList)
    properties.put("key.serializer", classOf[StringSerializer])
    properties.put("value.serializer", classOf[StringSerializer])
//    properties.put("serializer.class", classOf[StringDeserializer])
//    properties.put("batch.size", 16384)
//    properties.put("linger.ms", 1)
//    properties.put("buffer.memory", 33554432)
  }

  val producer = new KafkaProducer[String, String](KafkaProducerConfigs().properties)

  def produce(topic: String, messages: Iterable[String]): Unit = {
    messages.foreach { m =>
      producer.send(new ProducerRecord[String, String](topic, m))
    }
    producer.close(100L, TimeUnit.MILLISECONDS)
  }
} 
开发者ID:bencassedy,项目名称:ready-kafka,代码行数:31,代码来源:ReadyKafkaProducer.scala


示例4: KafkaFeedsExporter

//设置package包名称以及导入依赖的类
package ru.fediq.scrapingkit.backend

import cakesolutions.kafka.KafkaProducer
import org.apache.kafka.clients.producer.{ProducerRecord, RecordMetadata}
import org.apache.kafka.common.serialization.StringSerializer
import ru.fediq.scrapingkit.scraper.ScrapedEntity

import scala.concurrent.Future

class KafkaFeedsExporter(
  val bootstrapServer: String,
  val topic: String
) extends FeedExporter {
  val producer = KafkaProducer(KafkaProducer.Conf(new StringSerializer(), new StringSerializer, bootstrapServer))

  override def store[T <: ScrapedEntity](entity: T): Future[RecordMetadata] = {
    producer.send(new ProducerRecord(topic, entity.dump))
  }

  override def close() = producer.close()
} 
开发者ID:fediq,项目名称:scraping-kit,代码行数:22,代码来源:KafkaFeedsExporter.scala


示例5: EventSerialiser

//设置package包名称以及导入依赖的类
package serialisation

import java.util

import model.Event
import org.apache.kafka.common.serialization.{Serializer, StringSerializer}
import org.json4s.native.Serialization
import org.json4s.native.Serialization.write
import org.json4s.{Formats, NoTypeHints}

class EventSerialiser extends Serializer[Event] {

  implicit val formats: Formats = Serialization.formats(NoTypeHints)

  val stringSerialiser = new StringSerializer

  override def configure(configs: util.Map[String, _], isKey: Boolean): Unit = {
    stringSerialiser.configure(configs, isKey)
  }

  override def serialize(topic: String, data: Event): Array[Byte] = {
    val stringValue = write(data)
    stringSerialiser.serialize(topic, stringValue)
  }

  override def close(): Unit = {
    stringSerialiser.close()
  }
} 
开发者ID:benwheeler,项目名称:kafka-streams-poc,代码行数:30,代码来源:EventSerialiser.scala


示例6: running

//设置package包名称以及导入依赖的类
package producers

import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Keep, Source}
import akka.{Done, NotUsed}
import broker.ActorBroker
import config.AppConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}

import scala.concurrent.Future

trait Producerable extends ActorBroker {
  val config: AppConfig
  implicit val materializer = ActorMaterializer()

  val producerSettings = ProducerSettings(context.system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers(s"${config.kafkaConfig.uri}:${config.kafkaConfig.port}")

  def running(): Receive = {
    case Stop =>
      log.info("Stopping Kafka producer stream and actor")
      context.stop(self)
  }

  def sendToSink(message: String): Unit = {
    log.info(s"Attempting to produce message on topic $topicName")
    val kafkaSink = Producer.plainSink(producerSettings)

    val stringToProducerRecord: ProducerRecord[Array[Byte], String] = new ProducerRecord[Array[Byte], String](topicName, message)
    val (a, future): (NotUsed, Future[Done]) = Source.fromFuture(Future(stringToProducerRecord))
      .toMat(kafkaSink)(Keep.both)
      .run()
    future.onFailure {
      case ex =>
        log.error("Stream failed due to error, restarting", ex)
        throw ex
    }
    context.become(running())
    log.info(s"Writer now running, writing random numbers to topic $topicName")
  }


  case object Stop
} 
开发者ID:jguido,项目名称:reactive-kafka-registration,代码行数:48,代码来源:Producerable.scala


示例7: Main

//设置package包名称以及导入依赖的类
import java.util.concurrent.TimeUnit.SECONDS

import akka.actor.ActorSystem
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, ThrottleMode}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}

import scala.concurrent.duration.FiniteDuration
import scala.language.postfixOps

object Main {

  def main(args: Array[String]): Unit = {
    implicit val system = ActorSystem.apply("akka-stream-kafka")
    implicit val materializer = ActorMaterializer()

    val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
      .withBootstrapServers("localhost:9092;localhost:9093")

    Source.repeat(0)
      .scan(0)((next, _) => next + 1)
      .throttle(1, FiniteDuration(2L, SECONDS), 1, ThrottleMode.Shaping)
      .map(nextInt => {
        val topicName = "topic1"
        val partitionCount = 2
        val partition = nextInt % partitionCount

        new ProducerRecord[Array[Byte], String](topicName, nextInt.toString.getBytes, nextInt.toString)
//        new ProducerRecord[Array[Byte], String](topicName, partition, null, nextInt.toString)
      })
      .runWith(Producer.plainSink(producerSettings))
  }
} 
开发者ID:kczulko,项目名称:akka-streams-kafka,代码行数:37,代码来源:Main.scala


示例8: self

//设置package包名称以及导入依赖的类
package com.omearac.producers

import akka.actor.{ActorRef, ActorSystem}
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.OverflowStrategy
import akka.stream.scaladsl.{Flow, Source}
import com.omearac.shared.JsonMessageConversion.Conversion
import com.omearac.shared.{AkkaStreams, EventSourcing}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}



trait ProducerStream extends AkkaStreams with EventSourcing {
    implicit val system: ActorSystem
    def self: ActorRef

    def createStreamSource[msgType] = {
        Source.queue[msgType](Int.MaxValue,OverflowStrategy.backpressure)
    }

    def createStreamSink(producerProperties: Map[String, String]) = {
        val kafkaMBAddress = producerProperties("bootstrap-servers")
        val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer).withBootstrapServers(kafkaMBAddress)

        Producer.plainSink(producerSettings)
    }

    def createStreamFlow[msgType: Conversion](producerProperties: Map[String, String]) = {
        val numberOfPartitions = producerProperties("num.partitions").toInt -1
        val topicToPublish = producerProperties("publish-topic")
        val rand = new scala.util.Random
        val range = 0 to numberOfPartitions

        Flow[msgType].map { msg =>
            val partition = range(rand.nextInt(range.length))
            val stringJSONMessage = Conversion[msgType].convertToJson(msg)
            new ProducerRecord[Array[Byte], String](topicToPublish, partition, null, stringJSONMessage)
        }
    }
} 
开发者ID:omearac,项目名称:reactive-kafka-microservice-template,代码行数:43,代码来源:ProducerStream.scala


示例9: ReactiveKafkaSingleConsumerMultipleProducerScala

//设置package包名称以及导入依赖的类
package org.rgcase.reactivekafka

import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffsetBatch }
import akka.kafka.ProducerMessage.Message
import akka.kafka.scaladsl.{ Consumer, Producer }
import akka.kafka.{ ConsumerSettings, ProducerSettings, Subscriptions }
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Flow, Sink }
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer }

class ReactiveKafkaSingleConsumerMultipleProducerScala extends App {

  implicit val system = ActorSystem("reactivekafkascala")
  implicit val mat = ActorMaterializer()

  val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
    .withBootstrapServers("localhost:9092")
    .withGroupId("group1")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9093")

  val kafkaSource =
    Consumer.committableSource(consumerSettings, Subscriptions.topics("sourcetopic"))

  def toProducerMessage(topic: String) = (msg: CommittableMessage[Array[Byte], String]) ?
    Message[Array[Byte], String, CommittableMessage[Array[Byte], String]](new ProducerRecord(topic, msg.record.value), msg)

  val producerFlow1 =
    Flow.fromFunction(toProducerMessage("targettopic1")).via(Producer.flow(producerSettings)).map(_.message.passThrough)

  val producerFlow2 =
    Flow.fromFunction(toProducerMessage("targettopic2")).via(Producer.flow(producerSettings)).map(_.message.passThrough)

  val producerFlow3 =
    Flow.fromFunction(toProducerMessage("targettopic3")).via(Producer.flow(producerSettings)).map(_.message.passThrough)

  kafkaSource
    .via(producerFlow1)
    .via(producerFlow2)
    .via(producerFlow3)
    .batch(max = 20, first ? CommittableOffsetBatch.empty.updated(first.committableOffset)) { (batch, elem) ?
      batch.updated(elem.committableOffset)
    }.mapAsync(3)(_.commitScaladsl())
    .runWith(Sink.ignore)

} 
开发者ID:rgcase,项目名称:testplayground,代码行数:52,代码来源:ReactiveKafkaSingleConsumerMultipleProducerScala.scala


示例10: config

//设置package包名称以及导入依赖的类
package com.example

import akka.actor.Actor
import akka.event.LoggingAdapter
import cakesolutions.kafka.akka.KafkaConsumerActor.Subscribe
import cakesolutions.kafka.akka.{ConsumerRecords, KafkaConsumerActor, KafkaProducerActor, ProducerRecords}
import cakesolutions.kafka.{KafkaProducer, KafkaProducerRecord}
import com.example.PingPongProtocol.PingPongMessage
import com.typesafe.config.Config
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}

import scala.util.Random
trait KafkaConfig{
  def config:Config
  def log: LoggingAdapter
  def randomString(len: Int= 5): String = Random.alphanumeric.take(len).mkString("")
}

trait PingPongConsumer extends KafkaConfig{
  this: Actor =>

  //for pattern matching in our receive method
  val msgExtractor = ConsumerRecords.extractor[java.lang.String, PingPongMessage]

  val kafkaConsumerActor = context.actorOf(
    KafkaConsumerActor.props(config,new StringDeserializer(), new JsonDeserializer[PingPongMessage], self),
    "PingKafkaConsumerActor"
  )

  def subscribe(topics: List[String]) =
     kafkaConsumerActor ! Subscribe.AutoPartition(topics)

}

trait PingPongProducer  extends KafkaConfig{
  this: Actor =>

  val kafkaProducerConf = KafkaProducer.Conf(
    bootstrapServers = config.getString("bootstrap.servers"),
    keySerializer = new StringSerializer(),
    valueSerializer = new JsonSerializer[PingPongMessage])


  val kafkaProducerActor = context.actorOf(KafkaProducerActor.props( kafkaProducerConf))

  def submitMsg(topics: List[String], msg: PingPongMessage) = {
    log.info(s"Placing $msg on ${topics.mkString(",")}")
    topics.foreach(topic => kafkaProducerActor ! ProducerRecords(List(KafkaProducerRecord(topic, randomString(3), msg))))
  }
} 
开发者ID:123avi,项目名称:kafka-akka-example,代码行数:51,代码来源:PingPongConsumer.scala


示例11: SampleSubmitter

//设置package包名称以及导入依赖的类
package com.example.smaple

import cakesolutions.kafka.{KafkaProducer, KafkaProducerRecord}
import com.example.JsonSerializer
import com.typesafe.config.Config
import org.apache.kafka.common.serialization.StringSerializer

class SampleSubmitter(config: Config) {

  private val producer = KafkaProducer(
    KafkaProducer.Conf(
      config,
      keySerializer = new StringSerializer,
      valueSerializer = new JsonSerializer[SubmitSampleCommand])
  )

  private val topic = config.getString("topic")

  def submitSample(meterId: MeterId, submitSampleCommand: SubmitSampleCommand) = producer.send(
    KafkaProducerRecord(topic, meterId.id.toString, submitSampleCommand)
  )

  def close() = producer.close()

} 
开发者ID:123avi,项目名称:kafka-akka-example,代码行数:26,代码来源:SampleSubmitter.scala


示例12: ReactiveProducer

//设置package包名称以及导入依赖的类
package co.s4n.reactiveKafka

import akka.actor.ActorSystem
import akka.kafka.ProducerMessage
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.scaladsl.Source
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.ByteArraySerializer
import org.apache.kafka.common.serialization.StringSerializer
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import scala.concurrent.Future
import akka.Done
import scala.util.{ Failure, Success }


object ReactiveProducer {

  val system = ActorSystem("example")
  implicit val ec = system.dispatcher
  implicit val materializer = ActorMaterializer.create(system)

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9092")
  val kafkaProducer = producerSettings.createKafkaProducer()

  def produce(msg: String): Unit = {
    val done = Source(1 to 1)
      .map(_.toString)
      .map { elem =>
        println("\n" + msg);
        new ProducerRecord[Array[Byte], String]("UsersTopic", msg)
      }
      .runWith(Producer.plainSink(producerSettings, kafkaProducer))
    // #plainSinkWithProducer

    //    terminateWhenDone(done)
  }

  def terminateWhenDone(result: Future[Done]): Unit = {
    result.onComplete {
      case Failure(e) =>
        system.log.error(e, e.getMessage)
        system.terminate()
      case Success(_) => system.terminate()
    }
  }

} 
开发者ID:bazzo03,项目名称:users-api,代码行数:51,代码来源:ReactiveProducer.scala


示例13: FlowProducerMain

//设置package包名称以及导入依赖的类
package com.example.producer

import akka.actor.ActorSystem
import akka.kafka.scaladsl.Producer
import akka.kafka.{ProducerMessage, ProducerSettings}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Sink, Source}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}


object FlowProducerMain extends App {
  implicit val system = ActorSystem("FlowProducerMain")
  implicit val materializer = ActorMaterializer()

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9092")

  val done = Source(1 to 10)
    .map { n =>
      // val partition = math.abs(n) % 2
      val partition = 0
      ProducerMessage.Message(new ProducerRecord[Array[Byte], String](
        "topic1", partition, null, n.toString
      ), n)
    }
    .via(Producer.flow(producerSettings))
    .map { result =>
      val record = result.message.record
      println(s"${record.topic}/${record.partition} ${result.offset}: ${record.value}" +
        s"(${result.message.passThrough})")
      result
    }
    .runWith(Sink.ignore)
} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:36,代码来源:FlowProducerMain.scala


示例14: CommitConsumerToFlowProducerMain

//设置package包名称以及导入依赖的类
package com.example.producer

import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}


object CommitConsumerToFlowProducerMain extends App {
  implicit val system = ActorSystem("CommitConsumerToFlowProducerMain")
  implicit val materializer = ActorMaterializer()

  val consumerSettings =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("CommitConsumerToFlowProducer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9092")

  val done =
    Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1"))
      .map { msg =>
        println(s"topic1 -> topic2: $msg")
        ProducerMessage.Message(new ProducerRecord[Array[Byte], String](
          "topic2",
          msg.record.value
        ), msg.committableOffset)
      }
      .via(Producer.flow(producerSettings))
      .mapAsync(producerSettings.parallelism) { result =>
        result.message.passThrough.commitScaladsl()
      }
      .runWith(Sink.ignore)
} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:41,代码来源:CommitConsumerToFlowProducerMain.scala


示例15: PlainSinkProducerMain

//设置package包名称以及导入依赖的类
package com.example.producer

import akka.actor.ActorSystem
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Source
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}


object PlainSinkProducerMain extends App {

  implicit val system = ActorSystem("PlainSinkProducerMain")
  implicit val materializer = ActorMaterializer()

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9092")

  val done = Source(1 to 10)
    .map(_.toString)
    .map { elem =>
      println(s"PlainSinkProducer produce: ${elem}")
      new ProducerRecord[Array[Byte], String]("topic1", elem)
    }
    .runWith(Producer.plainSink(producerSettings))

} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:29,代码来源:PlainSinkProducerMain.scala


示例16: ConsumerToCommitableSinkProducerMain

//设置package包名称以及导入依赖的类
package com.example.producer

import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.CommittableOffsetBatch
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}


object ConsumerToCommitableSinkProducerMain extends App {

  implicit val system = ActorSystem("Consumer2ProducerMain")
  implicit val materializer = ActorMaterializer()

  //TODO: move to configuration application.conf
  val consumerSettings =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("Consumer2Producer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  //TODO: move to configuration application.conf
  val producerSettings =
    ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
      .withBootstrapServers("localhost:9092")

  Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1"))
    .map { msg =>
      println(s"topic1 -> topic2: $msg")
      ProducerMessage.Message(new ProducerRecord[Array[Byte], String](
        "topic2",
        msg.record.value
      ), msg.committableOffset)
    }
    .runWith(Producer.commitableSink(producerSettings))

} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:42,代码来源:ConsumerToCommitableSinkProducerMain.scala


示例17: BaseStation2Kafka

//设置package包名称以及导入依赖的类
package com.darienmt.airplaneadventures.basestation.collector.streams

import akka.Done
import akka.actor.ActorSystem
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.ActorMaterializer
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ ByteArraySerializer, StringSerializer }

import scala.concurrent.Future
import io.circe._
import io.circe.generic.auto._
import io.circe.parser._
import io.circe.syntax._
import io.circe.java8.time.{ encodeLocalDateDefault, encodeZonedDateTimeDefault }
import com.darienmt.airplaneadventures.basestation.collector.parsing.CirceEncoders._

object BaseStation2Kafka {

  case class SourceConfig(address: String, port: Int)
  case class SinkConfig(address: String, port: Int, topic: String)

  def apply(sourceConfig: SourceConfig, sinkConfig: SinkConfig)(implicit system: ActorSystem, materializer: ActorMaterializer): Future[Done] =
    BaseStationSource(sourceConfig.address, sourceConfig.port)
      .map(_.asJson.noSpaces)
      .map(m => new ProducerRecord[Array[Byte], String](sinkConfig.topic, m))
      .runWith(
        Producer.plainSink(
          ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
            .withBootstrapServers(s"${sinkConfig.address}:${sinkConfig.port}")
        )
      )
} 
开发者ID:darienmt,项目名称:airplane-adventures,代码行数:35,代码来源:BaseStation2Kafka.scala


示例18: Main

//设置package包名称以及导入依赖的类
package com.darienmt.airplaneadventures.basestation.rawcollector

import java.net.InetSocketAddress

import akka.Done
import akka.io.Inet.SocketOption
import akka.kafka.ProducerSettings
import akka.kafka.scaladsl.Producer
import akka.stream.scaladsl.{ Framing, Source, Tcp }
import akka.util.ByteString
import com.darienmt.keepers.{ Generator, KeepThisUp, MainCommons }
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ ByteArraySerializer, StringSerializer }

import scala.collection.immutable
import scala.collection.immutable.IndexedSeq
import scala.util.Success
import scala.concurrent.duration._

object Main extends App with MainCommons {

  implicit def asFiniteDuration(d: java.time.Duration): FiniteDuration =
    scala.concurrent.duration.Duration.fromNanos(d.toNanos)

  val bsAddress = config.getString("station.address")
  val bsPort = config.getInt("station.port")
  val connectTimeout: Duration = config.getDuration("station.connectTimeout")
  val idleTimeout: Duration = config.getDuration("station.idleTimeout")

  val kafkaAddress = config.getString("kafka.address")
  val kafkaPort = config.getInt("kafka.port")
  val kafkaTopic = config.getString("kafka.topic")


  val generator: Generator = () => Source(IndexedSeq(ByteString.empty))
    .via(
      Tcp().outgoingConnection(
        remoteAddress = InetSocketAddress.createUnresolved(bsAddress, bsPort),
        connectTimeout = connectTimeout,
        idleTimeout = idleTimeout
      )
        .via(Framing.delimiter(ByteString("\n"), 256))
        .map(_.utf8String)
    )
    .map(m => new ProducerRecord[Array[Byte], String](kafkaTopic, m))
    .runWith(
      Producer.plainSink(
        ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
          .withBootstrapServers(s"${kafkaAddress}:${kafkaPort}")
      )
    )

  val keeper = KeepThisUp(config)
  keeper(generator)
} 
开发者ID:darienmt,项目名称:airplane-adventures,代码行数:56,代码来源:Main.scala


示例19: Config

//设置package包名称以及导入依赖的类
package com.github.kliewkliew.cornucopia.kafka

import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Producer, Consumer => ConsumerDSL}
import akka.kafka.{ConsumerSettings, ProducerSettings, Subscriptions}
import akka.stream.{ActorMaterializer, ActorMaterializerSettings, Supervision}
import com.typesafe.config.ConfigFactory
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.slf4j.LoggerFactory

import scala.concurrent.duration._

object Config {
  object Cornucopia {
    private val config = ConfigFactory.load().getConfig("cornucopia")
    val minReshardWait = config.getInt("reshard.interval").seconds
    val gracePeriod = config.getInt("grace.period") * 1000
    val refreshTimeout = config.getInt("refresh.timeout") * 1000
    val batchPeriod = config.getInt("batch.period").seconds
  }

  object Consumer {
    private val kafkaConfig = ConfigFactory.load().getConfig("kafka")
    private val kafkaServers = kafkaConfig.getString("bootstrap.servers")
    private val kafkaConsumerConfig = kafkaConfig.getConfig("consumer")
    private val topic = kafkaConsumerConfig.getString("topic")
    private val groupId = kafkaConsumerConfig.getString("group.id")

    implicit val actorSystem = ActorSystem()
    // Log failures and resume processing
    private val decider: Supervision.Decider = { e =>
      LoggerFactory.getLogger(this.getClass).error("Failed to process event", e)
      Supervision.Resume
    }
    private val materializerSettings = ActorMaterializerSettings(actorSystem).withSupervisionStrategy(decider)

    private val sourceSettings = ConsumerSettings(actorSystem, new StringDeserializer, new StringDeserializer)
      .withBootstrapServers(kafkaServers)
      .withGroupId(groupId)
    private val subscription = Subscriptions.topics(topic)

    private val sinkSettings = ProducerSettings(actorSystem, new StringSerializer, new StringSerializer)
      .withBootstrapServers(kafkaServers)

    implicit val materializer = ActorMaterializer(materializerSettings)(actorSystem)
    val cornucopiaSource = ConsumerDSL.plainSource(sourceSettings, subscription)
    val cornucopiaSink = Producer.plainSink(sinkSettings)
  }

} 
开发者ID:kliewkliew,项目名称:cornucopia,代码行数:51,代码来源:Config.scala


示例20: Module

//设置package包名称以及导入依赖的类
package com.github.dnvriend

import com.google.inject.{AbstractModule, Provides}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.ProducerConfig
import org.apache.kafka.common.serialization.{StringDeserializer, StringSerializer}
import org.springframework.kafka.core.{DefaultKafkaConsumerFactory, DefaultKafkaProducerFactory, KafkaTemplate}
import org.springframework.kafka.listener.config.ContainerProperties
import org.springframework.kafka.listener.{KafkaMessageListenerContainer, MessageListener}

import scala.collection.JavaConversions._

class Module extends AbstractModule {
  protected def configure(): Unit = {
  }

  @Provides
  def createProducerTemplate: KafkaTemplate[String, String] = {
    val senderProps: java.util.Map[String, Any] = Map(
      ProducerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092",
      ProducerConfig.RETRIES_CONFIG -> 0,
      ProducerConfig.BATCH_SIZE_CONFIG -> 16384,
      ProducerConfig.LINGER_MS_CONFIG -> 1,
      ProducerConfig.BUFFER_MEMORY_CONFIG -> 33554432,
      ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG -> classOf[StringSerializer],
      ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG -> classOf[StringSerializer]
    )
    val producerFactory = new DefaultKafkaProducerFactory[String, String](senderProps.mapValues(_.asInstanceOf[AnyRef]))
    new KafkaTemplate[String, String](producerFactory)
  }

  @Provides
  def createKafkaMessageListenerContainer(messageListener: MessageListener[String, String]): KafkaMessageListenerContainer[String, String] = {
    val consumerProps: java.util.Map[String, Any] = Map(
      ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG -> "localhost:9092",
      ConsumerConfig.GROUP_ID_CONFIG -> "group",
      ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG -> true,
      ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG -> "100",
      ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG -> "15000",
      ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer],
      ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG -> classOf[StringDeserializer]
    )

    val containerProperties = new ContainerProperties("test")
    containerProperties.setMessageListener(messageListener)

    val consumerFactory = new DefaultKafkaConsumerFactory[String, String](consumerProps.mapValues(_.asInstanceOf[AnyRef]))
    val container = new KafkaMessageListenerContainer[String, String](consumerFactory, containerProperties)
    container.setBeanName("testAuto")
    container.start()
    container
  }

  @Provides
  def messageListener: MessageListener[String, String] = new MessageListener[String, String] {
    override def onMessage(message: ConsumerRecord[String, String]): Unit = {
      println(s"received: $message")
    }
  }
} 
开发者ID:dnvriend,项目名称:spring-kafka-test,代码行数:61,代码来源:Module.scala



注:本文中的org.apache.kafka.common.serialization.StringSerializer类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala Awaitable类代码示例发布时间:2022-05-23
下一篇:
Scala CacheLoader类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap