• 设为首页
  • 点击收藏
  • 手机版
    手机扫一扫访问
    迪恩网络手机版
  • 关注官方公众号
    微信扫一扫关注
    公众号

Scala ByteArrayDeserializer类代码示例

原作者: [db:作者] 来自: [db:来源] 收藏 邀请

本文整理汇总了Scala中org.apache.kafka.common.serialization.ByteArrayDeserializer的典型用法代码示例。如果您正苦于以下问题:Scala ByteArrayDeserializer类的具体用法?Scala ByteArrayDeserializer怎么用?Scala ByteArrayDeserializer使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。



在下文中一共展示了ByteArrayDeserializer类的18个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于我们的系统推荐出更棒的Scala代码示例。

示例1: ProcessingKafkaApplication

//设置package包名称以及导入依赖的类
package com.packt.chapter8

import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerSettings, Subscriptions}
import akka.stream.{ActorMaterializer, ClosedShape}
import akka.stream.scaladsl.{Flow, GraphDSL, RunnableGraph, Sink, Source}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.TopicPartition
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}

import scala.concurrent.duration._

object ProcessingKafkaApplication extends App {
  implicit val actorSystem = ActorSystem("SimpleStream")
  implicit val actorMaterializer = ActorMaterializer()

  val bootstrapServers = "localhost:9092"
  val kafkaTopic = "akka_streams_topic"
  val partition = 0
  val subscription = Subscriptions.assignment(new TopicPartition(kafkaTopic, partition))

  val consumerSettings = ConsumerSettings(actorSystem, new ByteArrayDeserializer, new StringDeserializer)
    .withBootstrapServers(bootstrapServers)
    .withGroupId("akka_streams_group")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(actorSystem, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers(bootstrapServers)

  val runnableGraph = RunnableGraph.fromGraph(GraphDSL.create() { implicit builder =>
    import GraphDSL.Implicits._

    val tickSource = Source.tick(0 seconds, 5 seconds, "Hello from Akka Streams using Kafka!")
    val kafkaSource = Consumer.plainSource(consumerSettings, subscription)
    val kafkaSink = Producer.plainSink(producerSettings)
    val printlnSink = Sink.foreach(println)

    val mapToProducerRecord = Flow[String].map(elem => new ProducerRecord[Array[Byte], String](kafkaTopic, elem))
    val mapFromConsumerRecord = Flow[ConsumerRecord[Array[Byte], String]].map(record => record.value())

    tickSource  ~> mapToProducerRecord   ~> kafkaSink
    kafkaSource ~> mapFromConsumerRecord ~> printlnSink

    ClosedShape
  })

  runnableGraph.run()
} 
开发者ID:PacktPublishing,项目名称:Akka-Cookbook,代码行数:51,代码来源:ProcessingKafkaApplication.scala


示例2: Settings

//设置package包名称以及导入依赖的类
package com.scalaio.kafka.consumer

import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.CommittableMessage
import akka.kafka.scaladsl.Consumer
import akka.kafka.{ConsumerSettings, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import com.scalaio.kafka.consumer.Settings.consumerSettings
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}

import scala.concurrent.Future

object Settings {
  def consumerSettings(implicit system: ActorSystem) =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("CommittableSourceConsumer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  def producerSettings(implicit system: ActorSystem) =
    ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
      .withBootstrapServers("localhost:9092")
}

object CommittableSource extends App {

  type KafkaMessage = CommittableMessage[Array[Byte], String]

  implicit val system = ActorSystem("CommittableSourceConsumerMain")
  implicit val materializer = ActorMaterializer()

  implicit val ec = system.dispatcher

  // explicit commit
  Consumer
    .committableSource(consumerSettings, Subscriptions.topics("topic1"))
    .mapAsync(1) { msg =>
      BusinessController.handleMessage(msg.record.value)
        .flatMap(response => msg.committableOffset.commitScaladsl())
        .recoverWith { case e => msg.committableOffset.commitScaladsl() }
    }
    .runWith(Sink.ignore)

}

object BusinessController {

  type Service[A, B] = A => Future[B]

  val handleMessage: Service[String, String] =
    (message) => Future.successful(message.toUpperCase)

} 
开发者ID:fagossa,项目名称:scalaio_akka,代码行数:56,代码来源:CommittableSource.scala


示例3: StreamConsumer

//设置package包名称以及导入依赖的类
package consumers

import akka.Done
import akka.actor.ActorSystem
import akka.kafka.scaladsl.Consumer
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.stream.scaladsl.Sink
import akka.stream.{ActorMaterializer, ActorMaterializerSettings}
import cats.data.Xor
import com.typesafe.config.ConfigFactory
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.consumer.internals.PartitionAssignor.Subscription
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer}
import io.circe._
import io.circe.generic.auto._
import cats.data.Xor.{Left, Right}
import model.Employee

import scala.concurrent.Future

object StreamConsumer  extends App{
  implicit val actorSystem = ActorSystem("consumer-actors", ConfigFactory.load())
  implicit val materializer = ActorMaterializer(ActorMaterializerSettings(actorSystem))

  lazy val consumerSettings = ConsumerSettings(actorSystem, new ByteArrayDeserializer, new StringDeserializer)
    .withBootstrapServers("localhost:9092")
    .withGroupId("group13")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")//"latest")
    .withProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true")
    .withProperty(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000")

  lazy val subscription = Subscriptions.topics("raw-data-1")
  lazy val db = new Processor()
  Consumer.plainSource(consumerSettings, subscription)
      .mapAsync(4){
        db.processMessage
      }
    .runWith(Sink.ignore)

}

class Processor {
  def processMessage(record: ConsumerRecord[Array[Byte], String]): Future[Done] ={
    println(s"DB.save: ${record.value()}")

    Option(record.value()).foreach{ jsonString =>
      val mayBeEmp: Xor[Error, Employee] = jawn.decode[Employee](jsonString)
      mayBeEmp match {
        case Left(error) => println(error)
        case Right(emp) => println(s"employee name: ${emp.name}")
      }
    }
    Future.successful(Done)  }
} 
开发者ID:ajit-scala,项目名称:kafka-consumers,代码行数:55,代码来源:StreamConsumer.scala


示例4: Main

//设置package包名称以及导入依赖的类
import akka.actor.ActorSystem
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.kafka.scaladsl.Consumer
import akka.stream.ActorMaterializer
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer}

import scala.concurrent.Future

object Main {
  def main(args: Array[String]): Unit = {
    implicit val system = ActorSystem.apply("akka-stream-kafka")
    implicit val materializer = ActorMaterializer()

    val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092;localhost:9093")
      .withGroupId("group1")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

    Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1"))
      .mapAsync(1)(msg => {
        msg.committableOffset.commitScaladsl
        Future.successful(msg)
      })
      .runForeach(msg => println(s"partition: ${msg.record.partition}; value: ${msg.record.value}"))
  }
} 
开发者ID:kczulko,项目名称:akka-streams-kafka,代码行数:28,代码来源:Main.scala


示例5: ReactiveKafkaSingleConsumerMultipleProducerScala

//设置package包名称以及导入依赖的类
package org.rgcase.reactivekafka

import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.{ CommittableMessage, CommittableOffsetBatch }
import akka.kafka.ProducerMessage.Message
import akka.kafka.scaladsl.{ Consumer, Producer }
import akka.kafka.{ ConsumerSettings, ProducerSettings, Subscriptions }
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{ Flow, Sink }
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer }

class ReactiveKafkaSingleConsumerMultipleProducerScala extends App {

  implicit val system = ActorSystem("reactivekafkascala")
  implicit val mat = ActorMaterializer()

  val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
    .withBootstrapServers("localhost:9092")
    .withGroupId("group1")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9093")

  val kafkaSource =
    Consumer.committableSource(consumerSettings, Subscriptions.topics("sourcetopic"))

  def toProducerMessage(topic: String) = (msg: CommittableMessage[Array[Byte], String]) ?
    Message[Array[Byte], String, CommittableMessage[Array[Byte], String]](new ProducerRecord(topic, msg.record.value), msg)

  val producerFlow1 =
    Flow.fromFunction(toProducerMessage("targettopic1")).via(Producer.flow(producerSettings)).map(_.message.passThrough)

  val producerFlow2 =
    Flow.fromFunction(toProducerMessage("targettopic2")).via(Producer.flow(producerSettings)).map(_.message.passThrough)

  val producerFlow3 =
    Flow.fromFunction(toProducerMessage("targettopic3")).via(Producer.flow(producerSettings)).map(_.message.passThrough)

  kafkaSource
    .via(producerFlow1)
    .via(producerFlow2)
    .via(producerFlow3)
    .batch(max = 20, first ? CommittableOffsetBatch.empty.updated(first.committableOffset)) { (batch, elem) ?
      batch.updated(elem.committableOffset)
    }.mapAsync(3)(_.commitScaladsl())
    .runWith(Sink.ignore)

} 
开发者ID:rgcase,项目名称:testplayground,代码行数:52,代码来源:ReactiveKafkaSingleConsumerMultipleProducerScala.scala


示例6: CommitConsumerToFlowProducerMain

//设置package包名称以及导入依赖的类
package com.example.producer

import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}


object CommitConsumerToFlowProducerMain extends App {
  implicit val system = ActorSystem("CommitConsumerToFlowProducerMain")
  implicit val materializer = ActorMaterializer()

  val consumerSettings =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("CommitConsumerToFlowProducer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("localhost:9092")

  val done =
    Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1"))
      .map { msg =>
        println(s"topic1 -> topic2: $msg")
        ProducerMessage.Message(new ProducerRecord[Array[Byte], String](
          "topic2",
          msg.record.value
        ), msg.committableOffset)
      }
      .via(Producer.flow(producerSettings))
      .mapAsync(producerSettings.parallelism) { result =>
        result.message.passThrough.commitScaladsl()
      }
      .runWith(Sink.ignore)
} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:41,代码来源:CommitConsumerToFlowProducerMain.scala


示例7: ConsumerToCommitableSinkProducerMain

//设置package包名称以及导入依赖的类
package com.example.producer

import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.CommittableOffsetBatch
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}


object ConsumerToCommitableSinkProducerMain extends App {

  implicit val system = ActorSystem("Consumer2ProducerMain")
  implicit val materializer = ActorMaterializer()

  //TODO: move to configuration application.conf
  val consumerSettings =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("Consumer2Producer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  //TODO: move to configuration application.conf
  val producerSettings =
    ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
      .withBootstrapServers("localhost:9092")

  Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1"))
    .map { msg =>
      println(s"topic1 -> topic2: $msg")
      ProducerMessage.Message(new ProducerRecord[Array[Byte], String](
        "topic2",
        msg.record.value
      ), msg.committableOffset)
    }
    .runWith(Producer.commitableSink(producerSettings))

} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:42,代码来源:ConsumerToCommitableSinkProducerMain.scala


示例8: Config

//设置package包名称以及导入依赖的类
package com.kissthinker.kafka

import java.util.Properties
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer}

abstract class Config(kafkaAddress: String) extends Properties {
  put("bootstrap.servers", kafkaAddress)
}

class PublisherConfig(kafkaAddress: String) extends Config(kafkaAddress) {
  put("metadata.broker.list", kafkaAddress)
  //put("client.id", "kafka-publisher")
  put("key.serializer", classOf[ByteArraySerializer].getName)
  put("value.serializer", classOf[ByteArraySerializer].getName)
  put("producer.type", "async")
}

class SubscriberConfig(zookeeperAddress: String, kafkaAddress: String) extends Config(kafkaAddress) {
  //put("zookeeper.connect", "127.0.0.1:2181")
  put("zookeeper.connect", zookeeperAddress)
  //put("group.id", "1")
  put("group.id", "2")
  put("auto.offset.reset", "largest")
  put("zookeeper.session.timeout.ms", "400")
  put("zookeeper.sync.time.ms", "200")
  put("auto.commit.interval.ms", "1000")
  put("key.deserializer", classOf[ByteArrayDeserializer].getName)
  put("value.deserializer", classOf[ByteArrayDeserializer].getName)
} 
开发者ID:davidainslie,项目名称:kafka-kissthinker,代码行数:30,代码来源:Config.scala


示例9: Main

//设置package包名称以及导入依赖的类
package connector

import akka.actor.ActorSystem
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.kafka.scaladsl.Consumer
import akka.stream.ActorMaterializer
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer}

import scala.concurrent.duration._

object Main {
  def main(args: Array[String]): Unit = {
    implicit val system = ActorSystem("QuickStart")
    implicit val materializer = ActorMaterializer()

    val kafkaConsumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("group1")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")
      .withProperty(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "true")

    val kafkaSubscription = Subscriptions.topics("input")
    val maxItemsInBatch = 10

    Consumer.plainSource(kafkaConsumerSettings, kafkaSubscription)
      .groupedWithin(maxItemsInBatch, 10000.milliseconds)
      .runForeach(batch => persist(batch))
  }

//  TODO: Sent batch to ElasticSearch
  def persist(batch: Seq[ConsumerRecord[Array[Byte], String]]): Unit = {
    batch foreach println
  }
} 
开发者ID:jozi-k,项目名称:kafka-to-es-akka,代码行数:36,代码来源:Main.scala


示例10: throttolableConsumerFlow

//设置package包名称以及导入依赖的类
package com.github.everpeace

import akka.Done
import akka.actor.ActorSystem
import akka.kafka.ConsumerMessage.CommittableOffsetBatch
import akka.kafka.scaladsl.Consumer
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, ThrottleMode}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer}

import scala.concurrent.duration.Duration

package object reactive_kafka {

  def throttolableConsumerFlow(implicit system: ActorSystem, mat: ActorMaterializer): Source[Done, Consumer.Control] = {
    val c = system.settings.config.getConfig("throttolable-consumer")
    implicit val ec = system.dispatcher

    val bootstrapServers = c getString "bootstrap-servers"
    val topic = c getString "topic"
    val autoRestConfig = c getString "auto-offset-reset"
    val groupId = c getString "group-id"
    val throttle = c getInt "throttle"
    val throttlePer = Duration.fromNanos((c getDuration "throttle-per").toNanos)
    val throttleBurst = c getInt "throttle-burst"
    val logPer = c getInt "log-per"
    val offsetCommitBatchSize = c getInt "offset-commit-batch-size"
    val offsetCommitParallelism = c getInt "offset-commit-parallelism"

    val consumerSettings =
      ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
        .withBootstrapServers(bootstrapServers)
        .withGroupId(groupId)
        .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoRestConfig)

    val source = Consumer.committableSource(consumerSettings, Subscriptions.topics(Set(topic)))

    val throttled = if (throttle > 0)
      source.throttle(throttle, throttlePer, throttleBurst, ThrottleMode.shaping)
    else source

    throttled.statefulMapConcat(() => {
      var counter = 0
      msg => {
        if (counter % logPer == 0) {
          system.log.info(s"FakeConsumer consume: $msg")
          counter = 0
        }
        counter += 1
        msg :: Nil
      }
    }).batch(max = offsetCommitBatchSize, m => CommittableOffsetBatch.empty.updated(m.committableOffset))((batch, m) => batch.updated(m.committableOffset))
      .mapAsync(offsetCommitParallelism) { batch =>
      batch.commitScaladsl()
    }
  }
} 
开发者ID:everpeace,项目名称:throttolable-perf-consumer,代码行数:60,代码来源:package.scala


示例11: FutureToTry

//设置package包名称以及导入依赖的类
package com.github.dnvriend

import akka.NotUsed
import akka.actor._
import akka.event.{Logging, LoggingAdapter}
import akka.kafka.{ConsumerSettings, ProducerSettings}
import org.apache.kafka.common.serialization.StringDeserializer
import org.apache.kafka.common.serialization.ByteArrayDeserializer
import org.apache.kafka.clients.consumer.ConsumerConfig
import akka.stream.scaladsl.Source
import akka.stream.{ActorMaterializer, Materializer}
import org.apache.kafka.common.serialization.{ByteArraySerializer, StringSerializer}
import org.scalatest.concurrent.ScalaFutures
import org.scalatest.{BeforeAndAfterAll, FlatSpec, Matchers}

import scala.concurrent.duration._
import scala.concurrent.{ExecutionContext, Future}
import scala.util.Try

trait TestSpec extends FlatSpec with Matchers with ScalaFutures with BeforeAndAfterAll {
  implicit val system: ActorSystem = ActorSystem()
  implicit val ec: ExecutionContext = system.dispatcher
  implicit val mat: Materializer = ActorMaterializer()
  implicit val log: LoggingAdapter = Logging(system, this.getClass)
  implicit val pc: PatienceConfig = PatienceConfig(timeout = 50.seconds)

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers("boot2docker:9092")

  val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer,
    Set("topic1"))
    .withBootstrapServers("boot2docker:9092")
    .withGroupId("group1")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  implicit class FutureToTry[T](f: Future[T]) {
    def toTry: Try[T] = Try(f.futureValue)
  }

  
  def withIterator[T](start: Int = 0)(f: Source[Int, NotUsed] ? T): T =
    f(Source.fromIterator(() ? Iterator from start))

  override protected def afterAll(): Unit = {
    system.terminate()
    system.whenTerminated.toTry should be a 'success
  }
} 
开发者ID:dnvriend,项目名称:reactive-kafka-test,代码行数:49,代码来源:TestSpec.scala


示例12: AnalyzerRunner

//设置package包名称以及导入依赖的类
package io.scalac.newspaper.analyzer

import akka.actor.ActorSystem
import akka.kafka.ProducerMessage
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{Subscriptions, ConsumerSettings, ProducerSettings}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Source, Sink}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.{ProducerRecord}
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer}

import io.scalac.newspaper.events._

object AnalyzerRunner extends App {
  implicit val system = ActorSystem("Newspaper-Analyzer-System")
  implicit val materializer = ActorMaterializer()

  val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new ContentFetchedDeserializer)
    .withGroupId("Newspaper-Analyzer")
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new ChangeDetectedSerializer)

  val subscription = Subscriptions.topics("newspaper-content")
  Consumer.committableSource(consumerSettings, subscription)
    .map { msg =>
      // Do sth with msg.record.value
      println(s"[ANALYZING] ${msg.record.value}")
      val input = msg.record.value
      val output = ChangeDetected(input.pageUrl, input.pageContent)
      val record = new ProducerRecord[Array[Byte], ChangeDetected]("newspaper", output)
      ProducerMessage.Message(record, msg.committableOffset)
    }
    .via(Producer.flow(producerSettings))
    .map(_.message.passThrough)
    .mapAsync(1)(_.commitScaladsl())
    .runWith(Sink.ignore)

} 
开发者ID:ScalaConsultants,项目名称:newspaper,代码行数:41,代码来源:AnalyzerRunner.scala


示例13: KafkaPublisherConfig

//设置package包名称以及导入依赖的类
package com.pragmasoft.eventaggregator.streams

import akka.kafka.scaladsl.Consumer
import akka.kafka.scaladsl.Consumer.Control
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.stream.scaladsl.Source
import com.pragmasoft.eventaggregator.ActorSystemProvider
import com.typesafe.scalalogging.LazyLogging
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.common.serialization.ByteArrayDeserializer

case class KafkaPublisherConfig(reactiveKafkaDispatcher: String, bootstrapBrokers: String, topicRegex: String, groupId: String, readFromBeginning: Boolean)

trait SourceProvider[T, Mat] {
  def source: Source[T, Mat]
}

trait KafkaSourceProvider extends SourceProvider[ConsumerRecord[Array[Byte], Array[Byte]], Control] with LazyLogging {
  self: ActorSystemProvider =>

  def kafkaConfig: KafkaPublisherConfig

  lazy val consumerProperties = {
    ConsumerSettings(actorSystem, new ByteArrayDeserializer, new ByteArrayDeserializer)
      .withBootstrapServers(kafkaConfig.bootstrapBrokers)
      .withGroupId(kafkaConfig.groupId)
      .withProperty(
        ConsumerConfig.AUTO_OFFSET_RESET_CONFIG,
        if (kafkaConfig.readFromBeginning)
          "earliest"
        else
          "latest"
      )
      .withProperty(
        ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG,
        "true"
      )
      .withDispatcher(kafkaConfig.reactiveKafkaDispatcher)
  }

  override lazy val source: Source[ConsumerRecord[Array[Byte], Array[Byte]], Control] =
    Consumer.atMostOnceSource(consumerProperties, Subscriptions.topicPattern(kafkaConfig.topicRegex))
} 
开发者ID:galarragas,项目名称:event-aggregator,代码行数:44,代码来源:KafkaSourceProvider.scala


示例14: KafkaEventBus

//设置package包名称以及导入依赖的类
package io.corbel.event.kafka

import akka.NotUsed
import akka.actor.ActorSystem
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.kafka.{ConsumerSettings, ProducerMessage, ProducerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.{Flow, Sink, Source}
import grizzled.slf4j.Logging
import io.corbel.event.{Event, EventBus, SubscriptionProperties}
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer, StringDeserializer, StringSerializer}
import org.json4s.DefaultFormats
import org.json4s.JsonAST.JObject
import org.json4s.native.Serialization._


class KafkaEventBus(kafkaServers: String)(implicit system: ActorSystem) extends EventBus with Logging {

  implicit val materializer = ActorMaterializer()
  implicit val format = DefaultFormats

  val producerSettings = ProducerSettings(system, new ByteArraySerializer, new StringSerializer)
    .withBootstrapServers(kafkaServers)

  val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
    .withBootstrapServers(kafkaServers)
    .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest")

  override def dispatcher(event: Event): Sink[JObject, NotUsed] = {
      Flow[JObject]
        .map(e => ProducerMessage.Message(new ProducerRecord[Array[Byte], String](event.id, write(e)), e))
        .via(Producer.flow(producerSettings))
        .map(result => {
          val record = result.message.record
          println(s"${record.topic}/${record.partition} ${result.offset}: ${record.value} (${result.message.passThrough}")
          result
        })
        .recover({
          case e: Throwable => e.printStackTrace()
        })
        .to(Sink.last)
    }

  override def subscribe(event: Event, props: SubscriptionProperties): Source[(Event, JObject), _] = {
    val settings = props.toConsumerSettings(consumerSettings)
    val subscription = Subscriptions.topics(event.id)
    Consumer.atMostOnceSource(settings, subscription).map(message => (event, deserialize(message.value)))
  }

  def deserialize(data:String): JObject = {
    read[JObject](data)
  }
} 
开发者ID:alexdeleon,项目名称:corbel-2.0,代码行数:56,代码来源:KafkaEventBus.scala


示例15: SparkStreamMain

//设置package包名称以及导入依赖的类
package io.bigfast.tracking.grpc

import io.bigfast.tracking.Event
import org.apache.kafka.common.serialization.ByteArrayDeserializer
import org.apache.spark.SparkConf
import org.apache.spark.streaming.kafka010.ConsumerStrategies.Subscribe
import org.apache.spark.streaming.kafka010.LocationStrategies.PreferConsistent
import org.apache.spark.streaming.kafka010._
import org.apache.spark.streaming.{Seconds, StreamingContext}


object SparkStreamMain {
  def main(args:Array[String]) = {
    val conf = new SparkConf().setMaster("spark://spark-master:7077").setAppName("SparkStreamMain")
    val ssc = new StreamingContext(conf, Seconds(1))

    val kafkaParams = Map[String, Object](
      "bootstrap.servers" -> "kafka:9092",
      "key.deserializer" -> classOf[ByteArrayDeserializer],
      "value.deserializer" -> classOf[ByteArrayDeserializer],
      "group.id" -> "my.spark.stream",
      "auto.offset.reset" -> "earliest",
      "enable.auto.commit" -> (false: java.lang.Boolean)
    )

    val topics = Array("event")
    val stream = KafkaUtils.createDirectStream[Array[Byte], Array[Byte]](
      ssc,
      PreferConsistent,
      Subscribe[Array[Byte], Array[Byte]](topics, kafkaParams)
    )

    stream.map(record => {
      val id = if (record != null && record.key != null) new String(record.key) else "empty"
      val event = Event.parseFrom(record.value)
      println(s"id: ${id} event: ${event.toString}")
      (id, event)
    }).print()

    ssc.start()
    ssc.awaitTermination()
  }
} 
开发者ID:kykl,项目名称:pba,代码行数:44,代码来源:SparkStreamMain.scala


示例16: KafkaUtil

//设置package包名称以及导入依赖的类
package io.plasmap.geo.util

import akka.NotUsed
import akka.actor.ActorSystem
import akka.kafka.scaladsl.Consumer.Control
import akka.kafka.{ConsumerSettings, ProducerSettings}
import akka.kafka.scaladsl.{Consumer, Producer}
import akka.stream.scaladsl.{Sink, Source}
import org.apache.kafka.clients.consumer.{ConsumerConfig, ConsumerRecord}
import org.apache.kafka.clients.producer.ProducerRecord
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, ByteArraySerializer}

object KafkaUtil {

  

  val bytesToProducerRecord: (String) => (Array[Byte]) => ProducerRecord[Array[Byte], Array[Byte]] =
    topic => elem => new ProducerRecord[Array[Byte], Array[Byte]](topic, elem)

  val consumerRecordToBytes: (ConsumerRecord[Array[Byte],Array[Byte]]) => Array[Byte] =
    _.value()

  def kafkaSink(host: String)(implicit system: ActorSystem): Sink[ProducerRecord[Array[Byte], Array[Byte]], NotUsed] = {
    val producerSettings =
      ProducerSettings(system, new ByteArraySerializer, new ByteArraySerializer)
        .withBootstrapServers(host)
        .withParallelism(4)

    Producer.plainSink(producerSettings)
  }

  def kafkaSource(host:String)(topic:String,group:String)(implicit system:ActorSystem): Source[ConsumerRecord[Array[Byte], Array[Byte]], Control] = {

    val consumerSettings = ConsumerSettings(system, new ByteArrayDeserializer, new ByteArrayDeserializer,
      Set(topic))
      .withBootstrapServers(host)
      .withGroupId(group)
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

    Consumer.plainSource(consumerSettings)
  }
} 
开发者ID:plasmap,项目名称:plasmap,代码行数:43,代码来源:KafkaUtil.scala


示例17: KafkaConsumerBuilder

//设置package包名称以及导入依赖的类
package com.landoop.kafka.ws.core

import java.util.Properties

import com.landoop.kafka.ws.KafkaWSContext
import org.apache.kafka.clients.consumer.{ConsumerConfig, KafkaConsumer}
import org.apache.kafka.common.serialization.ByteArrayDeserializer


object KafkaConsumerBuilder {
  def apply(group: String)(implicit context: KafkaWSContext): KafkaConsumer[Array[Byte], Array[Byte]] = {
    val properties = new Properties()
    properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, context.config.brokers)
    //properties.put(ConsumerConfig.GROUP_ID_CONFIG, group)
    properties.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, "false")
    properties.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "30000")
    properties.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, context.config.maxPollRecords.toString)
    properties.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")
    properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, (new ByteArrayDeserializer).getClass.getName)
    properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, (new ByteArrayDeserializer).getClass.getName)
    context.config.consumerProperties.foreach { case (k, v) =>
      properties.put(k, v)
    }
    new KafkaConsumer[Array[Byte], Array[Byte]](properties)
  }
} 
开发者ID:Landoop,项目名称:kafka-ws,代码行数:27,代码来源:KafkaConsumerBuilder.scala


示例18: CommittableSourceConsumerMain

//设置package包名称以及导入依赖的类
package com.example.consumer

import akka.actor.ActorSystem
import akka.kafka.scaladsl.Consumer
import akka.kafka.{ConsumerSettings, Subscriptions}
import akka.stream.ActorMaterializer
import akka.stream.scaladsl.Sink
import org.apache.kafka.clients.consumer.ConsumerConfig
import org.apache.kafka.common.serialization.{ByteArrayDeserializer, StringDeserializer}


object CommittableSourceConsumerMain extends App {

  implicit val system = ActorSystem("CommittableSourceConsumerMain")
  implicit val materializer = ActorMaterializer()

  //TODO: move to configuration application.conf
  val consumerSettings =
    ConsumerSettings(system, new ByteArrayDeserializer, new StringDeserializer)
      .withBootstrapServers("localhost:9092")
      .withGroupId("CommittableSourceConsumer")
      .withProperty(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest")

  val done =
    Consumer.committableSource(consumerSettings, Subscriptions.topics("topic1"))
      .mapAsync(1) { msg =>
        println(s"CommittableSourceConsumer consume: $msg")
        msg.committableOffset.commitScaladsl()
      }
      .runWith(Sink.ignore)

} 
开发者ID:makersu,项目名称:reactive-kafka-scala-example,代码行数:33,代码来源:CommittableSourceConsumerMain.scala



注:本文中的org.apache.kafka.common.serialization.ByteArrayDeserializer类示例整理自Github/MSDocs等源码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。


鲜花

握手

雷人

路过

鸡蛋
该文章已有0人参与评论

请发表评论

全部评论

专题导读
上一篇:
Scala VectorSlicer类代码示例发布时间:2022-05-23
下一篇:
Scala JArray类代码示例发布时间:2022-05-23
热门推荐
热门话题
阅读排行榜

扫描微信二维码

查看手机版网站

随时了解更新最新资讯

139-2527-9053

在线客服(服务时间 9:00~18:00)

在线QQ客服
地址:深圳市南山区西丽大学城创智工业园
电邮:jeky_zhao#qq.com
移动电话:139-2527-9053

Powered by 互联科技 X3.4© 2001-2213 极客世界.|Sitemap