Skip to content

Commit

Permalink
Produce Kafka records at once
Browse files Browse the repository at this point in the history
  • Loading branch information
oguzhanunlu committed May 6, 2024
1 parent 2bea1a4 commit 54e1a97
Showing 1 changed file with 5 additions and 10 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,13 @@
package com.snowplowanalytics.snowplow.enrich.kafka

import java.util.UUID

import cats.Parallel
import cats.implicits._

import cats.effect.kernel.{Async, Resource, Sync}

import fs2.kafka._

import com.snowplowanalytics.snowplow.enrich.common.fs2.{AttributedByteSink, AttributedData, ByteSink}
import com.snowplowanalytics.snowplow.enrich.common.fs2.config.io.Output
import fs2.Chunk

object Sink {

Expand All @@ -40,12 +37,10 @@ object Sink {
output match {
case k: Output.Kafka =>
mkProducer(k, authCallbackClass).map { producer => records =>
records.parTraverse_ { record =>
producer
.produceOne_(toProducerRecord(k.topicName, record))
.flatten
.void
}
producer
.produce(Chunk.from(records.map(toProducerRecord(k.topicName, _))))
.flatten
.void
}
case o => Resource.eval(Sync[F].raiseError(new IllegalArgumentException(s"Output $o is not Kafka")))
}
Expand Down

0 comments on commit 54e1a97

Please sign in to comment.