|
| 1 | +package integration |
| 2 | + |
| 3 | +import base.KafkaSpecBase |
| 4 | +import cats.data.NonEmptyList |
| 5 | +import cats.effect.{Async, IO, Ref} |
| 6 | +import cats.syntax.all.* |
| 7 | +import fs2.kafka.* |
| 8 | +import io.github.embeddedkafka.EmbeddedKafkaConfig |
| 9 | +import load.LoadExample |
| 10 | +import org.scalatest.Assertion |
| 11 | +import org.typelevel.log4cats.LoggerFactory |
| 12 | +import org.typelevel.log4cats.slf4j.Slf4jFactory |
| 13 | +import utils.RandomPort |
| 14 | + |
| 15 | +import scala.concurrent.duration.* |
| 16 | + |
| 17 | +class LoadExampleIntSpec extends KafkaSpecBase[IO] { |
| 18 | + val inputTopic = "test-topic-1" |
| 19 | + val outputTopic = "output-topic-1" |
| 20 | + private val timeout = 10.seconds |
| 21 | + |
| 22 | + "LoadExample" should { |
| 23 | + "load previously seen messages into the store" in withKafkaContext { ctx => |
| 24 | + import ctx.* |
| 25 | + |
| 26 | + for { |
| 27 | + _ <- publishStringMessage(inputTopic, "key1", "value1") |
| 28 | + _ <- runAppAndDiscard |
| 29 | + _ <- publishStringMessage(inputTopic, "key2", "value2") |
| 30 | + result <- runApp |
| 31 | + } yield result should contain theSameElementsInOrderAs List("value1", "value2") |
| 32 | + } |
| 33 | + |
| 34 | + "not publish previously committed messages" in withKafkaContext { ctx => |
| 35 | + import ctx.* |
| 36 | + |
| 37 | + for { |
| 38 | + _ <- publishStringMessage(inputTopic, "key1", "value1") |
| 39 | + _ <- runAppAndDiscard |
| 40 | + _ <- consumeStringMessage(outputTopic, autoCommit = true) |
| 41 | + _ <- publishStringMessage(inputTopic, "key2", "value2") |
| 42 | + _ <- runAppAndDiscard |
| 43 | + result <- consumeStringMessage(outputTopic, autoCommit = true) |
| 44 | + } yield result shouldBe "value2" |
| 45 | + } |
| 46 | + } |
| 47 | + |
| 48 | + private abstract class TestContext[F[_] : Async] { |
| 49 | + private val store: F[Ref[F, List[String]]] = Ref[F].of(List.empty) |
| 50 | + |
| 51 | + private implicit val loggerFactory: LoggerFactory[F] = Slf4jFactory.create[F] |
| 52 | + |
| 53 | + implicit val kafkaConfig: EmbeddedKafkaConfig = |
| 54 | + EmbeddedKafkaConfig(kafkaPort = RandomPort(), zooKeeperPort = RandomPort(), Map("log.roll.ms" -> "10")) |
| 55 | + |
| 56 | + private val consumerSettings: ConsumerSettings[F, String, String] = |
| 57 | + ConsumerSettings[F, String, String] |
| 58 | + .withBootstrapServers(s"localhost:${kafkaConfig.kafkaPort}") |
| 59 | + .withAutoOffsetReset(AutoOffsetReset.Earliest) |
| 60 | + .withGroupId("load-example-consumer-group") |
| 61 | + |
| 62 | + private val producerSettings: ProducerSettings[F, String, String] = |
| 63 | + ProducerSettings[F, String, String] |
| 64 | + .withBootstrapServers(s"localhost:${kafkaConfig.kafkaPort}") |
| 65 | + |
| 66 | + val runApp: F[List[String]] = |
| 67 | + for { |
| 68 | + store <- store |
| 69 | + example1 = |
| 70 | + LoadExample.kafka[F]( |
| 71 | + topics = NonEmptyList.one(inputTopic), |
| 72 | + outputTopic = outputTopic, |
| 73 | + consumerSettings = consumerSettings, |
| 74 | + producerSettings = producerSettings, |
| 75 | + store = store |
| 76 | + ) |
| 77 | + stored <- example1.stream.interruptAfter(timeout).compile.drain *> store.get |
| 78 | + } yield stored |
| 79 | + |
| 80 | + val runAppAndDiscard: F[Unit] = runApp.void |
| 81 | + } |
| 82 | + |
| 83 | + private def withKafkaContext(test: TestContext[IO] => IO[Assertion]): IO[Assertion] = { |
| 84 | + object testContext extends TestContext[IO] |
| 85 | + import testContext.* |
| 86 | + embeddedKafka.use(_ => test(testContext)) |
| 87 | + } |
| 88 | +} |
0 commit comments