From b2a220ec7804fa9485015b4c5e57f4a04f3740c0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lo=C3=AFc=20GREFFIER?= Date: Sun, 4 Aug 2024 13:25:31 +0200 Subject: [PATCH] Add interactive queries for Key-Value stores (#204) * Refactor * Continue refacto * First interactive queries commits * update doc * handle some http errors * Handle key and value * improvements * Improve tests * update * update tests * Add IQ avro test * Fix unit tests * Add IQ doc * Update sonar * Update tests * Update tests * Fix interrupted exceptions * Add integration test on timestamped key value store * Improve unit tests coverage * Improve coverage * sonar * Start updating core module with IQ * sonar * Add hosts info in core * Handle get by key on core http server * Clean core impl of IQ * Updates * Handle timestamped key value stores * Refactor for Key-Value * Refactor testcontainers * Wrong store stype request * Simplify dtos * Fix tests * Update README * Fix javadoc * Improve tests * checkstyle * sonar * Updates * Last fixes * Update readme * Fix tests * Sonar --- CONTRIBUTING.md | 2 +- README.md | 364 ++++++------- .../kstreamplify/KafkaStreamsStarterTest.java | 6 +- .../TopologyErrorHandlerTest.java | 16 +- .../src/main/avro/kafka-error.avsc | 1 + .../constants/HttpServerConstants.java | 40 -- .../constants/InitializerConstants.java | 30 - .../constants/PropertyConstants.java | 45 -- .../context/KafkaStreamsExecutionContext.java | 6 +- .../converter/AvroToJsonConverter.java | 73 ++- .../converter/JsonToAvroConverter.java | 28 +- .../deduplication/DedupKeyProcessor.java | 15 +- .../deduplication/DedupKeyValueProcessor.java | 4 +- .../DedupWithPredicateProcessor.java | 14 +- .../deduplication/DeduplicationUtils.java | 35 +- .../DlqDeserializationExceptionHandler.java | 3 +- .../error/DlqProductionExceptionHandler.java | 3 +- .../error/GenericErrorProcessor.java | 22 +- .../kstreamplify/error/ProcessingResult.java | 14 +- .../error/TopologyErrorHandler.java | 15 +- .../exception/HttpServerException.java | 17 + .../OtherInstanceResponseException.java | 17 + .../exception/PropertiesFileException.java | 17 + .../exception/UnknownKeyException.java | 17 + .../initializer/KafkaStreamsInitializer.java | 69 ++- .../initializer/KafkaStreamsStarter.java | 2 +- .../michelin/kstreamplify/model/DlqTopic.java | 16 - .../model/RestServiceResponse.java | 27 - .../PropertiesUtils.java | 50 +- .../rest/DefaultProbeController.java | 118 ---- .../kstreamplify/serde/SerdesUtils.java | 45 ++ .../kstreamplify/serde/TopicWithSerde.java | 153 ++++++ .../server/KafkaStreamsHttpServer.java | 192 +++++++ .../service/InteractiveQueriesService.java | 314 +++++++++++ .../service/KubernetesService.java | 85 +++ .../kstreamplify/service/TopologyService.java | 36 ++ .../kstreamplify/services/ProbeService.java | 96 ---- .../{properties => store}/RocksDbConfig.java | 2 +- .../kstreamplify/store/StateStoreRecord.java | 44 ++ .../kstreamplify/store/StreamsMetadata.java | 43 ++ .../store/WindowStateStoreUtils.java | 47 ++ .../kstreamplify/topic/TopicUtils.java | 66 +++ .../kstreamplify/utils/SerdesUtils.java | 5 +- .../kstreamplify/utils/TopicUtils.java | 11 +- .../kstreamplify/utils/TopicWithSerde.java | 10 +- .../utils/WindowStateStoreUtils.java | 5 +- .../src/test/avro/kafka-person.avsc | 37 ++ .../{kafka-test.avsc => kafka-record.avsc} | 6 +- .../converter/AvroToJsonConverterTest.java | 114 ++-- .../converter/JsonToAvroConverterTest.java | 18 +- .../deduplication/DedupKeyProcessorTest.java | 17 +- .../DedupKeyValueProcessorTest.java | 23 +- .../DedupWithPredicateProcessorTest.java | 23 +- ...lqDeserializationExceptionHandlerTest.java | 4 +- .../DlqProductionExceptionHandlerTest.java | 6 +- .../error/ProcessingResultTest.java | 3 - .../KafkaStreamsInitializerTest.java | 12 +- .../initializer/KafkaStreamsStarterTest.java | 63 +-- .../InteractiveQueriesIntegrationTest.java | 512 ++++++++++++++++++ .../integration/KafkaIntegrationTest.java | 137 +++++ ...afkaStreamsInitializerIntegrationTest.java | 89 +-- .../PropertiesUtilsTest.java | 2 +- .../RocksDbConfigTest.java | 32 +- .../rest/DefaultProbeControllerTest.java | 17 - .../TopicWithSerdeTest.java} | 6 +- .../server/KafkaStreamsHttpServerTest.java | 18 + .../InteractiveQueriesServiceTest.java | 451 +++++++++++++++ .../service/KubernetesServiceTest.java | 157 ++++++ .../service/TopologyServiceTest.java | 57 ++ .../services/ProbeServiceTest.java | 134 ----- .../WindowStateStoreUtilsTest.java | 13 +- .../kstreamplify/topic/TopicUtilsTest.java | 35 ++ .../src/test/resources/application.yml | 4 +- kstreamplify-spring-boot/pom.xml | 9 + .../kstreamplify/config/BeanConfig.java | 50 ++ .../ControllerExceptionHandler.java | 74 +++ .../InteractiveQueriesController.java | 109 ++++ .../controller/KubernetesController.java | 48 ++ .../controller/TopologyController.java | 36 ++ ...=> SpringBootKafkaStreamsInitializer.java} | 6 +- .../KafkaProperties.java | 2 +- .../rest/SpringProbeController.java | 65 --- ...ot.autoconfigure.AutoConfiguration.imports | 12 +- .../src/test/avro/kafka-person.avsc | 37 ++ .../ControllerExceptionHandlerTest.java | 32 ++ .../InteractiveQueriesControllerTest.java | 103 ++++ .../controller/KubernetesControllerTest.java | 42 ++ .../controller/TopologyControllerTest.java | 33 ++ ...pringBootKafkaStreamsInitializerTest.java} | 8 +- .../InteractiveQueriesIntegrationTest.java | 454 ++++++++++++++++ .../integration/KafkaIntegrationTest.java | 99 ++++ ...fkaStreamsInitializerIntegrationTest.java} | 102 ++-- .../KafkaPropertiesTest.java | 3 +- .../rest/SpringProbeControllerTest.java | 55 -- .../application-interactive-queries.yml | 9 + .../src/test/resources/application.yml | 6 +- pom.xml | 3 +- 97 files changed, 4322 insertions(+), 1305 deletions(-) delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/HttpServerException.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/OtherInstanceResponseException.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/PropertiesFileException.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/UnknownKeyException.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java rename kstreamplify-core/src/main/java/com/michelin/kstreamplify/{properties => property}/PropertiesUtils.java (66%) delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/rest/DefaultProbeController.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/SerdesUtils.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/TopicWithSerde.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServer.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/InteractiveQueriesService.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/KubernetesService.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/TopologyService.java delete mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java rename kstreamplify-core/src/main/java/com/michelin/kstreamplify/{properties => store}/RocksDbConfig.java (99%) create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StateStoreRecord.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StreamsMetadata.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/WindowStateStoreUtils.java create mode 100644 kstreamplify-core/src/main/java/com/michelin/kstreamplify/topic/TopicUtils.java create mode 100644 kstreamplify-core/src/test/avro/kafka-person.avsc rename kstreamplify-core/src/test/avro/{kafka-test.avsc => kafka-record.avsc} (97%) create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java rename kstreamplify-core/src/test/java/com/michelin/kstreamplify/{integrations => integration}/KafkaStreamsInitializerIntegrationTest.java (59%) rename kstreamplify-core/src/test/java/com/michelin/kstreamplify/{properties => property}/PropertiesUtilsTest.java (94%) rename kstreamplify-core/src/test/java/com/michelin/kstreamplify/{properties => property}/RocksDbConfigTest.java (69%) delete mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/rest/DefaultProbeControllerTest.java rename kstreamplify-core/src/test/java/com/michelin/kstreamplify/{utils/TopicWithSerdesTest.java => serde/TopicWithSerdeTest.java} (95%) create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServerTest.java create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/InteractiveQueriesServiceTest.java create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/KubernetesServiceTest.java create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/TopologyServiceTest.java delete mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/services/ProbeServiceTest.java rename kstreamplify-core/src/test/java/com/michelin/kstreamplify/{utils => store}/WindowStateStoreUtilsTest.java (85%) create mode 100644 kstreamplify-core/src/test/java/com/michelin/kstreamplify/topic/TopicUtilsTest.java create mode 100644 kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/config/BeanConfig.java create mode 100644 kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/ControllerExceptionHandler.java create mode 100644 kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/InteractiveQueriesController.java create mode 100644 kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/KubernetesController.java create mode 100644 kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/TopologyController.java rename kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/{SpringKafkaStreamsInitializer.java => SpringBootKafkaStreamsInitializer.java} (94%) rename kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/{properties => property}/KafkaProperties.java (94%) delete mode 100644 kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java create mode 100644 kstreamplify-spring-boot/src/test/avro/kafka-person.avsc create mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/ControllerExceptionHandlerTest.java create mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/InteractiveQueriesControllerTest.java create mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/KubernetesControllerTest.java create mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/TopologyControllerTest.java rename kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/{SpringKafkaStreamsInitializerTest.java => SpringBootKafkaStreamsInitializerTest.java} (92%) create mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java create mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java rename kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/{integrations/SpringKafkaStreamsInitializerIntegrationTest.java => integration/SpringBootKafkaStreamsInitializerIntegrationTest.java} (61%) rename kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/{properties => property}/KafkaPropertiesTest.java (94%) delete mode 100644 kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/rest/SpringProbeControllerTest.java create mode 100644 kstreamplify-spring-boot/src/test/resources/application-interactive-queries.yml diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index b4cc1c36..559612fe 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,7 +39,7 @@ an Issue to discuss your proposal first. This is not required but can save time In general, we follow the ["fork-and-pull" Git workflow](https://github.com/susam/gitpr) -- Fork the repository to your own Github account +- Fork the repository to your own GitHub account - Clone the project to your machine - Create a branch locally from master with a succinct but descriptive name - Commit changes to the branch diff --git a/README.md b/README.md index d34bdbb5..fb3ca27e 100644 --- a/README.md +++ b/README.md @@ -23,45 +23,56 @@ need to do: ## Table of Contents -* [Features](#features) +* [Overview](#overview) * [Dependencies](#dependencies) * [Java](#java) * [Spring Boot](#spring-boot) * [Unit Test](#unit-test) -* [Getting Started](#getting-started) +* [Features](#features) + * [Bootstrapping](#bootstrapping) + * [Create your first Kstreamplify application](#create-your-first-kstreamplify-application) * [Properties Injection](#properties-injection) - * [Avro Serializer and Deserializer](#avro-serializer-and-deserializer) - * [Error Handling](#error-handling) - * [Topology](#topology) - * [Production and Deserialization](#production-and-deserialization) - * [Avro Schema](#avro-schema) - * [Uncaught Exception Handler](#uncaught-exception-handler) - * [REST Endpoints](#rest-endpoints) - * [Hooks](#hooks) - * [On Start](#on-start) - * [Deduplication](#deduplication) - * [By Key](#by-key) - * [By Key and Value](#by-key-and-value) - * [By Predicate](#by-predicate) - * [Interactive Queries](#interactive-queries) - * [Open Telemetry](#open-telemetry) - * [Testing](#testing) + * [Avro Serializer and Deserializer](#avro-serializer-and-deserializer) + * [Error Handling](#error-handling) + * [Topology](#topology) + * [Production and Deserialization](#production-and-deserialization) + * [Avro Schema](#avro-schema) + * [Uncaught Exception Handler](#uncaught-exception-handler) + * [Kubernetes](#kubernetes) + * [Hooks](#hooks) + * [On Start](#on-start) + * [Interactive Queries](#interactive-queries) + * [Application Server Configuration](#application-server-configuration) + * [Web Services](#web-services) + * [Service](#service) + * [Topology](#topology-2) + * [Deduplication](#deduplication) + * [By Key](#by-key) + * [By Key and Value](#by-key-and-value) + * [By Predicate](#by-predicate) + * [Open Telemetry](#open-telemetry) + * [Testing](#testing) * [Motivation](#motivation) * [Contribution](#contribution) -## Features +## Overview + +Wondering what makes Kstreamplify stand out? Here are some of the key features that make it a must-have for Kafka Streams: + +- **πŸš€ Bootstrapping**: Automatic startup, configuration, and initialization of Kafka Streams is handled for you. Focus on +business implementation rather than the setup. + +- **πŸ“ Avro Serializer and Deserializer**: Common serializers and deserializers for Avro. -- **Easy bootstrapping**: Kafka Streams application bootstrapping is handled for you, allowing you to focus on topology - implementation. +- **⛑️ Error Handling**: Catch and route errors to a dead-letter queue (DLQ) topic -- **Avro Schema Serializer and Deserializer**: Common serializers and deserializers for all your Avro specific records. +- **☸️ Kubernetes**: Accurate readiness and liveness probes for Kubernetes deployment. -- **Error Handling**: A strong error handling mechanism is provided for topology, production, and deserialization - errors, and it also allows routing them into a dead letter queue (DLQ) topic. +- **🀿 Interactive Queries**: Dive into Kafka Streams state stores. -- **REST endpoints**: Some useful REST endpoints, including Kubernetes liveness and readiness probes. +- **🫧 Deduplication**: Remove duplicate events from a stream. -- **Testing**: The library eases the use of Topology Test Driver, making it easier to write your tests. +- **πŸ§ͺ Testing**: Automatic Topology Test Driver setup. Start writing your tests with minimal effort. ## Dependencies @@ -85,7 +96,7 @@ To include the core Kstreamplify library in your project, add the following depe [![javadoc](https://javadoc.io/badge2/com.michelin/kstreamplify-spring-boot/javadoc.svg?style=for-the-badge&)](https://javadoc.io/doc/com.michelin/kstreamplify-spring-boot) -If you're using Spring Boot, you can integrate Kstreamplify with your Spring Boot application by adding the following +If you are using Spring Boot, you can integrate Kstreamplify with your Spring Boot application by adding the following dependency: ```xml @@ -113,12 +124,19 @@ For both Java and Spring Boot dependencies, a testing dependency is available to ``` -## Getting Started +## Features + +Kstreamplify offers a wide range of features to simplify the development of Kafka Streams applications. + +### Bootstrapping -To begin using Kstreamplify, you simply need to set up a `KafkaStreamsStarter` bean within you Spring Boot context, -overriding the `topology` method. +Kstreamplify simplifies the bootstrapping of Kafka Streams applications by handling the startup, configuration, and +initialization of Kafka Streams for you. -For instance, you can start by creating a class annotated with `@Component`: +#### Create your first Kstreamplify application + +To create a Kstreamplify application, define a `KafkaStreamsStarter` bean within your Spring Boot context and +override the `KafkaStreamsStarter#topology()` method: ```java @Component @@ -130,31 +148,12 @@ public class MyKafkaStreams extends KafkaStreamsStarter { @Override public String dlqTopic() { - return "dlqTopic"; + return "DLQ_TOPIC"; } } ``` -Alternatively, you can annotate a method that returns a `KafkaStreamsStarter` with `@Bean`: - -```java -@Bean -public KafkaStreamsStarter kafkaStreamsStarter() { - return new KafkaStreamsStarter() { - @Override - public void topology(StreamsBuilder streamsBuilder) { - // Your topology - } - - @Override - public String dlqTopic() { - return "dlqTopic"; - } - }; -} -``` - -### Properties Injection +#### Properties Injection You can define all your Kafka Streams properties directly from the `application.yml` file as follows: @@ -171,20 +170,20 @@ kafka: avro.remove.java.properties: true ``` -Note that all the properties have been moved under `kafka.properties`. +Note that all the Kafka Streams properties have been moved under `kafka.properties`. ### Avro Serializer and Deserializer -Whenever you need to serialize or deserialize records with Avro schemas, you can use the `SerdesUtils` class as follows: +Whenever you need to serialize or deserialize records with Avro schemas, you can use the `SerdeUtils` class as follows: ```java -SerdesUtils.getSerdesForValue() +SerdeUtils.getValueSerde() ``` or ```java -SerdesUtils.getSerdesForKey() +SerdeUtils.getKeySerde() ``` Here is an example of using these methods in your topology: @@ -195,42 +194,40 @@ public class MyKafkaStreams extends KafkaStreamsStarter { @Override public void topology(StreamsBuilder streamsBuilder) { streamsBuilder - .stream("inputTopic", Consumed.with(Serdes.String(), SerdesUtils.getSerdesForValue())) - .to("outputTopic", Produced.with(Serdes.String(), SerdesUtils.getSerdesForValue())); + .stream("INPUT_TOPIC", Consumed.with(Serdes.String(), SerdeUtils.getValueSerde())) + .to("OUTPUT_TOPIC", Produced.with(Serdes.String(), SerdeUtils.getValueSerde())); } } ``` ### Error Handling -The library provides the ability to handle errors that may occur in your topology as well as during the production or +Kstreamplify provides the ability to handle errors that may occur in your topology as well as during the production or deserialization of records and route them to a dead-letter queue (DLQ) topic. -To do this, the first step is to override the `dlqTopic` method and return the name of your DLQ topic: +To do it, start by overriding the `dlqTopic` method and return the name of your DLQ topic: ```java @Component public class MyKafkaStreams extends KafkaStreamsStarter { @Override - public void topology(StreamsBuilder streamsBuilder) { } + public void topology(StreamsBuilder streamsBuilder) { + } @Override public String dlqTopic() { - return "dlqTopic"; + return "DLQ_TOPIC"; } } ``` #### Topology -Kstreamplify provides utilities to handle all the unexpected errors that can occur in your topologies and route them to -a dead-letter queue (DLQ) topic automatically. +Kstreamplify provides utilities to handle errors that occur in your topology and route them to a DLQ topic +automatically. -The principle is simple: whenever you perform transformations on stream values, you can encapsulate the result as either -success or failure. Failed records will be routed to your DLQ topic, while successful records will still be up for -further processing. - -Here is a complete example of how to do this: +The processing result is encapsulated and marked as either success or failure. +Failed records will be routed to the DLQ topic, while successful records will still be up for further processing. ```java @Component @@ -238,16 +235,16 @@ public class MyKafkaStreams extends KafkaStreamsStarter { @Override public void topology(StreamsBuilder streamsBuilder) { KStream stream = streamsBuilder - .stream("inputTopic", Consumed.with(Serdes.String(), SerdesUtils.getSerdesForValue())); + .stream("INPUT_TOPIC", Consumed.with(Serdes.String(), SerdeUtils.getValueSerde())); TopologyErrorHandler - .catchErrors(stream.mapValues(MyKafkaStreams::toUpperCase)) - .to("outputTopic", Produced.with(Serdes.String(), SerdesUtils.getSerdesForValue())); + .catchErrors(stream.mapValues(MyKafkaStreams::toUpperCase)) + .to("OUTPUT_TOPIC", Produced.with(Serdes.String(), SerdeUtils.getValueSerde())); } @Override public String dlqTopic() { - return "dlqTopic"; + return "DLQ_TOPIC"; } private static ProcessingResult toUpperCase(KafkaPerson value) { @@ -261,49 +258,36 @@ public class MyKafkaStreams extends KafkaStreamsStarter { } ``` -The first step is during the map values processing. The operation should return a new value of -type `ProcessingResult`. -- The first templatized parameter is the type of the new value after a successful transformation. -- The second templatized parameter is the type of the current value for which the transformation failed. +The map values processing returns a `ProcessingResult`, where: + +- The first parameter is the type of the new value after a successful transformation. +- The second parameter is the type of the current value for which the transformation failed. You can use the following to mark the result as successful: ```java -return ProcessingResult.success(value); +ProcessingResult.success(value); ``` Or the following in a catch clause to mark the result as failed: ```java -return ProcessingResult.fail(e, value, "Something bad happened..."); +ProcessingResult.fail(e, value, "Something bad happened..."); ``` -The `ProcessingResult.fail()` method takes the exception, the record that failed and a custom error message. - -The second step is sending the new stream of `ProcessingResult` to the `TopologyErrorHandler.catchErrors()` -method, which will split the -stream into two branches: - -- The first branch will contain the `ProcessingError` and will be routed to the DLQ topic as a `KafkaError` Avro objects - that contains - multiple useful information such as the topic, the partition, the offsets, the exception, and the custom error message - of the failed record. -- The second branch will only contain the successful records and will be returned to continue the processing. +The stream of `ProcessingResult` needs to be lightened of the failed records by sending them to the DLQ topic. +This is done by invoking the `TopologyErrorHandler#catchErrors()` method. +A healthy stream is then returned and can be further processed. #### Production and Deserialization -The library provides handlers for production and deserialization errors, which can be used to route these errors to the -configured DLQ topic. - -Here's how to use them: +Kstreamplify provides production and deserialization handlers that send errors to the DLQ topic. ```yml kafka: properties: - ... default.production.exception.handler: com.michelin.kstreamplify.error.DlqProductionExceptionHandler default.deserialization.exception.handler: com.michelin.kstreamplify.error.DlqDeserializationExceptionHandler - ... ``` #### Avro Schema @@ -313,40 +297,43 @@ available [here](https://github.com/michelin/kstreamplify/blob/main/kstreamplify #### Uncaught Exception Handler -You may bring your own uncaught exception handler if you choose to do so. This provides an ability to -override the default behavior - for instance, there might be a special requirement to treat and handle certain -exception types differently. +Kstreamplify defines a default uncaught exception handler that catches all uncaught exceptions and shuts down the client. -To do this, simply override the `uncaughtExceptionHandler` method and return your own custom uncaught -exception handler that implements the standard `StreamsUncaughtExceptionHandler` interface. +If you want to override this behavior, you can override the `KafkaStreamsStarter#uncaughtExceptionHandler()` method and return your own +uncaught exception handler. ```java @Override public StreamsUncaughtExceptionHandler uncaughtExceptionHandler() { return throwable -> { - // Do something when an uncaught exception occurs - return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT; + return StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_APPLICATION; }; } ``` -### REST endpoints +### Kubernetes -The Kstreamplify library provides several REST endpoints, which are listed below: +Kstreamplify provides readiness and liveness probes for Kubernetes deployment based on the Kafka Streams state. -- `GET /ready`: This endpoint is used as a readiness probe for Kubernetes deployment. -- `GET /liveness`: This endpoint is used as a liveness probe for Kubernetes deployment. -- `GET /topology`: This endpoint returns the Kafka Streams topology as JSON. +By default, the endpoints are available at `/ready` and `/liveness`. + +The path can be customized by setting the following properties: + +```yml +kubernetes: + readiness: + path: custom-readiness + liveness: + path: custom-liveness +``` ### Hooks -Kstreamplify offers the flexibility to execute custom code through hooks. These hooks can be defined by overriding -specific methods. +Kstreamplify offers the flexibility to execute custom code through hooks. #### On Start -The `On Start` hook allows you to execute code right after the Kafka Streams instantiation. It provides the Kafka -Streams instance as a parameter. +The `On Start` hook allows you to execute code before starting the Kafka Streams instance. ```java @Component @@ -358,35 +345,83 @@ public class MyKafkaStreams extends KafkaStreamsStarter { } ``` -You can use this hook to perform any custom initialization or setup tasks for your Kafka Streams application. +### Interactive Queries + +Kstreamplify wants to ease the use of [interactive queries](https://docs.confluent.io/platform/current/streams/developer-guide/interactive-queries.html) in Kafka Streams application. -### Deduplication +#### Application Server Configuration + +The "[application.server](https://docs.confluent.io/platform/current/streams/developer-guide/config-streams.html#application-server)" property value is determined from different sources by the following order of priority: + +1. The value of an environment variable whose name is defined by the `application.server.var.name` property. + +```yml +kafka: + properties: + application.server.var.name: MY_APPLICATION_SERVER +``` + +2. The value of a default environment variable named `APPLICATION_SERVER`. +3. `localhost`. -Kstreamplify facilitates deduplication of a stream based on various criteria using window stores within a specified time -frame. +#### Web Services -The `DeduplicationUtils` class provides three deduplication implementations. Each deduplication method takes a duration -parameter that specifies how long a record will be kept in the window store for deduplication. +Kstreamplify provides web services to query the state stores of your Kafka Streams application. +It handles state stores being on different Kafka Streams instances by providing an [RPC layer](https://docs.confluent.io/platform/current/streams/developer-guide/interactive-queries.html#adding-an-rpc-layer-to-your-application). -All deduplication methods return a `KStream, ProcessingResult`. You may want to direct the result to -the `TopologyErrorHandler.catchErrors()` method. +Here is the list of supported state store types: +- Key-Value + +Only state stores with String keys are supported. + +#### Service + +You can leverage the interactive queries service used by the web services layer to serve your own needs. + +```java +@Component +public class MyService { + @Autowired + InteractiveQueriesService interactiveQueriesService; +} +``` + +

Topology

+ +Kstreamplify provides a web service to retrieve the Kafka Streams topology as JSON. + +By default, the endpoint is available at `/topology`. + +The path can be customized by setting the following properties: + +```yml +topology: + path: custom-topology +``` + +### Deduplication + +Kstreamplify facilitates deduplication of a stream through the `DeduplicationUtils` class, based on various criteria +and within a specified time frame. + +All deduplication methods return a `KStream` so you can redirect the result to the +`TopologyErrorHandler#catchErrors()`. **Note**: Only streams with String keys and Avro values are supported. #### By Key ```java - @Component public class MyKafkaStreams extends KafkaStreamsStarter { @Override public void topology(StreamsBuilder streamsBuilder) { KStream myStream = streamsBuilder - .stream("inputTopic"); + .stream("INPUT_TOPIC"); DeduplicationUtils .deduplicateKeys(streamsBuilder, myStream, Duration.ofDays(60)) - .to("outputTopicDeduplicated"); + .to("OUTPUT_TOPIC"); } } ``` @@ -394,17 +429,16 @@ public class MyKafkaStreams extends KafkaStreamsStarter { #### By Key and Value ```java - @Component public class MyKafkaStreams extends KafkaStreamsStarter { @Override public void topology(StreamsBuilder streamsBuilder) { KStream myStream = streamsBuilder - .stream("inputTopic"); + .stream("INPUT_TOPIC"); DeduplicationUtils .deduplicateKeyValues(streamsBuilder, myStream, Duration.ofDays(60)) - .to("outputTopicDeduplicated"); + .to("OUTPUT_TOPIC"); } } ``` @@ -412,95 +446,57 @@ public class MyKafkaStreams extends KafkaStreamsStarter { #### By Predicate ```java - @Component public class MyKafkaStreams extends KafkaStreamsStarter { @Override public void topology(StreamsBuilder streamsBuilder) { KStream myStream = streamsBuilder - .stream("inputTopic"); + .stream("INPUT_TOPIC"); DeduplicationUtils .deduplicateWithPredicate(streamsBuilder, myStream, Duration.ofDays(60), value -> value.getFirstName() + "#" + value.getLastName()) - .to("outputTopicDeduplicated"); + .to("OUTPUT_TOPIC"); } } ``` The given predicate will be used as a key in the window store. The stream will be deduplicated based on the predicate. -### Interactive Queries - -Kstreamplify is designed to make your Kafka Streams instance ready -for [interactive queries](https://docs.confluent.io/platform/current/streams/developer-guide/interactive-queries.html), -including support for RPC (Remote Procedure Call). - -The `application.server` property, which should contain the host:port information, is automatically handled by -Kstreamplify. -The property can be loaded in three different ways. -By order of priority: - -- an environment variable whose name is defined by the `ip.env.var.name` property. - -```yml -kafka: - properties: - ip.env.var.name: MY_APPLICATION_PORT_HOST -``` - -Where `MY_APPLICATION_PORT_HOST` contains the host:port information. - -- an environment variable named `MY_POD_IP`. This is particularly useful when loading host:port information from - Kubernetes. - -Here's an extract of a Kubernetes deployment which set the `MY_POD_IP` environment variable in a Kubernetes environment: - -```yml -... -containers: - env: - - name: MY_POD_IP - valueFrom: - fieldRef: - fieldPath: status.podIP -... -``` - -- If neither the variable environment nor the `MY_POD_IP` environment variable is set, Kstreamplify - sets `application.server` to the default value `localhost`. - ### Open Telemetry -The Kstreamplify Spring Boot module simplifies the integration of [Open Telemetry](https://opentelemetry.io/) into your Kafka Streams application -by binding all the metrics of the Kafka Streams instance to the Spring Boot registry which is used by the Open Telemetry Java agent. +The Kstreamplify Spring Boot module simplifies the integration of [Open Telemetry](https://opentelemetry.io/) +and its Java agent in Kafka Streams applications by binding all Kafka Streams metrics to the Spring Boot registry. You can run your application with the Open Telemetry Java agent by including the following JVM options: -```shell +```console -javaagent:/opentelemetry-javaagent.jar -Dotel.traces.exporter=otlp -Dotel.logs.exporter=otlp -Dotel.metrics.exporter=otlp ``` -It also facilitates the addition of custom tags to the metrics, allowing you to use them to organize your metrics in your Grafana dashboard. +It also facilitates the addition of custom tags to the metrics, allowing you to use them to organize your metrics in +your Grafana dashboard. -```shell --Dotel.resource.attributes=environment=production,service.name=myNamespace,service.name=myKafkaStreams,category=orders +```console +-Dotel.resource.attributes=environment=production,service.namespace=myNamespace,service.name=myKafkaStreams,category=orders ``` -All the tags specified in the `otel.resource.attributes` property will be included in the metrics and can be observed in the logs -during the application startup. +All the tags specified in the `otel.resource.attributes` property will be included in the metrics and can be observed in +the logs during the application startup. ### Testing -For testing, you can create a test class that extends `KafkaStreamsStarterTest` and override the `getKafkaStreamsStarter` method to return your `KafkaStreamsStarter` class. +Kstreamplify eases the use of the Topology Test Driver for testing Kafka Streams application. -Here is an example: +You can create a test class that extends `KafkaStreamsStarterTest`, override +the `KafkaStreamsStarterTest#getKafkaStreamsStarter()` to provide your `KafkaStreamsStarter` implementation, +and start writing your tests. ```java public class MyKafkaStreamsTest extends KafkaStreamsStarterTest { private TestInputTopic inputTopic; private TestOutputTopic outputTopic; - + @Override protected KafkaStreamsStarter getKafkaStreamsStarter() { return new MyKafkaStreams(); @@ -508,11 +504,11 @@ public class MyKafkaStreamsTest extends KafkaStreamsStarterTest { @BeforeEach void setUp() { - inputTopic = testDriver.createInputTopic("inputTopic", new StringSerializer(), - SerdesUtils.getSerdesForValue().serializer()); + inputTopic = testDriver.createInputTopic("INPUT_TOPIC", new StringSerializer(), + SerdeUtils.getValueSerde().serializer()); - outputTopic = testDriver.createOutputTopic("outputTopic", new StringDeserializer(), - SerdesUtils.getSerdesForValue().deserializer()); + outputTopic = testDriver.createOutputTopic("OUTPUT_TOPIC", new StringDeserializer(), + SerdeUtils.getValueSerde().deserializer()); } @Test diff --git a/kstreamplify-core-test/src/main/java/com/michelin/kstreamplify/KafkaStreamsStarterTest.java b/kstreamplify-core-test/src/main/java/com/michelin/kstreamplify/KafkaStreamsStarterTest.java index 51ab78d2..c2d21736 100644 --- a/kstreamplify-core-test/src/main/java/com/michelin/kstreamplify/KafkaStreamsStarterTest.java +++ b/kstreamplify-core-test/src/main/java/com/michelin/kstreamplify/KafkaStreamsStarterTest.java @@ -3,8 +3,8 @@ import com.michelin.kstreamplify.avro.KafkaError; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; -import com.michelin.kstreamplify.utils.SerdesUtils; -import com.michelin.kstreamplify.utils.TopicWithSerde; +import com.michelin.kstreamplify.serde.SerdesUtils; +import com.michelin.kstreamplify.serde.TopicWithSerde; import io.confluent.kafka.schemaregistry.testutil.MockSchemaRegistry; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import java.io.IOException; @@ -65,7 +65,7 @@ void generalSetUp() { new TopologyTestDriver(streamsBuilder.build(), properties, getInitialWallClockTime()); dlqTopic = testDriver.createOutputTopic(KafkaStreamsExecutionContext.getDlqTopicName(), - new StringDeserializer(), SerdesUtils.getSerdesForValue().deserializer()); + new StringDeserializer(), SerdesUtils.getValueSerdes().deserializer()); } /** diff --git a/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java b/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java index 3af13ac7..d7ada103 100644 --- a/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java +++ b/kstreamplify-core-test/src/test/java/com/michelin/kstreamplify/TopologyErrorHandlerTest.java @@ -6,8 +6,8 @@ import com.michelin.kstreamplify.error.ProcessingResult; import com.michelin.kstreamplify.error.TopologyErrorHandler; import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; -import com.michelin.kstreamplify.utils.SerdesUtils; -import com.michelin.kstreamplify.utils.TopicWithSerde; +import com.michelin.kstreamplify.serde.SerdesUtils; +import com.michelin.kstreamplify.serde.TopicWithSerde; import java.util.List; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.common.serialization.StringDeserializer; @@ -55,15 +55,13 @@ public void topology(StreamsBuilder streamsBuilder) { KStream> avroStream = streamsBuilder - .stream(AVRO_TOPIC, Consumed.with(Serdes.String(), - SerdesUtils.getSerdesForValue())) + .stream(AVRO_TOPIC, Consumed.with(Serdes.String(), SerdesUtils.getValueSerdes())) .mapValues(value -> value == null ? ProcessingResult.fail(new NullPointerException(), null) : ProcessingResult.success(value)); TopologyErrorHandler.catchErrors(avroStream) - .to(OUTPUT_AVRO_TOPIC, - Produced.with(Serdes.String(), SerdesUtils.getSerdesForValue())); + .to(OUTPUT_AVRO_TOPIC, Produced.with(Serdes.String(), SerdesUtils.getValueSerdes())); } }; } @@ -73,16 +71,16 @@ void setUp() { stringInputTopic = testDriver.createInputTopic(STRING_TOPIC, new StringSerializer(), new StringSerializer()); avroInputTopic = testDriver.createInputTopic(AVRO_TOPIC, new StringSerializer(), - SerdesUtils.getSerdesForValue().serializer()); + SerdesUtils.getValueSerdes().serializer()); stringOutputTopic = testDriver.createOutputTopic(OUTPUT_STRING_TOPIC, new StringDeserializer(), new StringDeserializer()); avroOutputTopic = testDriver.createOutputTopic(OUTPUT_AVRO_TOPIC, new StringDeserializer(), - SerdesUtils.getSerdesForValue().deserializer()); + SerdesUtils.getValueSerdes().deserializer()); dlqTopic = testDriver.createOutputTopic(DLQ_TOPIC, new StringDeserializer(), - SerdesUtils.getSerdesForValue().deserializer()); + SerdesUtils.getValueSerdes().deserializer()); } @Test diff --git a/kstreamplify-core/src/main/avro/kafka-error.avsc b/kstreamplify-core/src/main/avro/kafka-error.avsc index 71b0e13d..81471ce1 100644 --- a/kstreamplify-core/src/main/avro/kafka-error.avsc +++ b/kstreamplify-core/src/main/avro/kafka-error.avsc @@ -2,6 +2,7 @@ "namespace": "com.michelin.kstreamplify.avro", "type": "record", "name": "KafkaError", + "doc": "Kafka error message", "fields": [ { "name": "cause", diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java deleted file mode 100644 index 9ee74b92..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/HttpServerConstants.java +++ /dev/null @@ -1,40 +0,0 @@ -package com.michelin.kstreamplify.constants; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -/** - * HTTP server constants. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class HttpServerConstants { - /** - * Readiness probe path property name. - */ - public static final String READINESS_PROPERTY = "readiness_path"; - - /** - * Liveness probe path property name. - */ - public static final String LIVENESS_PROPERTY = "liveness_path"; - - /** - * Topology property name. - */ - public static final String TOPOLOGY_PROPERTY = "expose_topology_path"; - - /** - * Readiness default path. - */ - public static final String READINESS_DEFAULT_PATH = "ready"; - - /** - * Liveness default path. - */ - public static final String LIVENESS_DEFAULT_PATH = "liveness"; - - /** - * Topology default path. - */ - public static final String TOPOLOGY_DEFAULT_PATH = "topology"; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java deleted file mode 100644 index 1d278b7c..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/InitializerConstants.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.michelin.kstreamplify.constants; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -/** - * Kafka Streams initialization constants. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class InitializerConstants { - /** - * Server port property name. - */ - public static final String SERVER_PORT_PROPERTY = "server.port"; - - /** - * Default host. - */ - public static final String LOCALHOST = "localhost"; - - /** - * Name of the property containing of the name of the var env containing the IP. - */ - public static final String IP_SYSTEM_VARIABLE_PROPERTY = "ip.env.var.name"; - - /** - * Default var env name containing the IP. - */ - public static final String IP_SYSTEM_VARIABLE_DEFAULT = "MY_POD_IP"; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java deleted file mode 100644 index bb462dcd..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/constants/PropertyConstants.java +++ /dev/null @@ -1,45 +0,0 @@ -package com.michelin.kstreamplify.constants; - -import lombok.AccessLevel; -import lombok.NoArgsConstructor; - -/** - * Property constants. - */ -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class PropertyConstants { - /** - * Property separator. - */ - public static final String PROPERTY_SEPARATOR = "."; - - /** - * Kafka properties prefix. - */ - public static final String KAFKA_PROPERTIES_PREFIX = "kafka.properties"; - - /** - * Default property file name. - */ - public static final String DEFAULT_PROPERTY_FILE = "application.yml"; - - /** - * Prefix property name. - */ - public static final String PREFIX_PROPERTY_NAME = "prefix"; - - /** - * Topic property name. - */ - public static final String TOPIC_PROPERTY_NAME = "topic"; - - /** - * Remap property name. - */ - public static final String REMAP_PROPERTY_NAME = "remap"; - - /** - * Default prefix property name. - */ - public static final String SELF = "self"; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java index 6371ea36..f9b5d50d 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/context/KafkaStreamsExecutionContext.java @@ -1,8 +1,8 @@ package com.michelin.kstreamplify.context; -import static com.michelin.kstreamplify.constants.PropertyConstants.PREFIX_PROPERTY_NAME; -import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; -import static com.michelin.kstreamplify.constants.PropertyConstants.SELF; +import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR; +import static com.michelin.kstreamplify.serde.TopicWithSerde.SELF; +import static com.michelin.kstreamplify.topic.TopicUtils.PREFIX_PROPERTY_NAME; import java.util.Map; import java.util.Properties; diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java index 9ea6d541..34c577b9 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/AvroToJsonConverter.java @@ -26,6 +26,7 @@ import com.google.gson.JsonPrimitive; import com.google.gson.JsonSerializationContext; import com.google.gson.JsonSerializer; +import com.google.gson.ToNumberPolicy; import java.lang.reflect.Type; import java.time.Instant; import java.time.LocalDate; @@ -35,6 +36,8 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import lombok.AccessLevel; +import lombok.NoArgsConstructor; import org.apache.avro.Schema.Field; import org.apache.avro.generic.GenericRecord; import org.apache.avro.util.Utf8; @@ -42,16 +45,46 @@ /** * The class to convert Avro to Json. */ +@NoArgsConstructor(access = AccessLevel.PRIVATE) public class AvroToJsonConverter { - private AvroToJsonConverter() { + private static final Gson gson = new GsonBuilder() + .registerTypeAdapter(LocalDate.class, new LocalDateTypeAdapter()) + .registerTypeAdapter(LocalDateTime.class, new LocalDateTimeTypeAdapter()) + .registerTypeAdapter(LocalTime.class, new LocalTimeTypeAdapter()) + .setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE) + .setPrettyPrinting() + .create(); + + /** + * Convert the value to JSON. + * + * @param value The value + * @return The JSON + */ + public static String convertObject(Object value) { + if (value == null) { + return null; + } + + if (value instanceof GenericRecord genericRecord) { + return convertRecord(genericRecord); + } + + return gson.toJson(value); } - private static final Gson gson = new GsonBuilder() - .registerTypeAdapter(LocalDate.class, new LocalDateTypeAdapter()) - .registerTypeAdapter(LocalDateTime.class, new LocalDateTimeTypeAdapter()) - .registerTypeAdapter(LocalTime.class, new LocalTimeTypeAdapter()) - .setPrettyPrinting() - .create(); + /** + * Convert the values to JSON. + * + * @param values The values + * @return The JSON + */ + public static String convertObject(List values) { + return values.stream() + .map(AvroToJsonConverter::convertObject) + .toList() + .toString(); + } /** * Convert the record from avro format to json format. @@ -75,21 +108,21 @@ private static Map recordAsMap(GenericRecord inputRecord) { for (Field field : inputRecord.getSchema().getFields()) { Object recordValue = inputRecord.get(field.name()); - if ((recordValue instanceof Utf8 || recordValue instanceof Instant)) { + if (recordValue instanceof Utf8 || recordValue instanceof Instant) { recordValue = recordValue.toString(); } if (recordValue instanceof List recordValueAsList) { recordValue = recordValueAsList - .stream() - .map(value -> { - if (value instanceof GenericRecord genericRecord) { - return recordAsMap(genericRecord); - } else { - return value.toString(); - } - }) - .toList(); + .stream() + .map(value -> { + if (value instanceof GenericRecord genericRecord) { + return recordAsMap(genericRecord); + } else { + return value.toString(); + } + }) + .toList(); } if (recordValue instanceof Map recordValueAsMap) { @@ -133,12 +166,12 @@ public LocalDate deserialize(JsonElement json, Type typeOfT, } private static class LocalDateTimeTypeAdapter implements JsonSerializer, - JsonDeserializer { + JsonDeserializer { private static final DateTimeFormatter formatter = - DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS"); + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSS"); private static final DateTimeFormatter formatterNano = - DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS"); + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS"); @Override public JsonElement serialize(LocalDateTime localDateTime, Type srcType, diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java index 2ca2c587..6f496a57 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/converter/JsonToAvroConverter.java @@ -1,9 +1,12 @@ package com.michelin.kstreamplify.converter; +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; import com.google.gson.JsonArray; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; +import com.google.gson.ToNumberPolicy; import java.math.BigDecimal; import java.math.MathContext; import java.math.RoundingMode; @@ -32,6 +35,25 @@ */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class JsonToAvroConverter { + private static final Gson gson = new GsonBuilder() + .setObjectToNumberStrategy(ToNumberPolicy.LONG_OR_DOUBLE) + .setPrettyPrinting() + .create(); + + /** + * Convert a json string to an object. + * + * @param json the json string + * @return the object + */ + public static Object jsonToObject(String json) { + if (json == null) { + return null; + } + + return gson.fromJson(json, Object.class); + } + /** * Convert a file in json to avro. * @@ -52,9 +74,8 @@ public static SpecificRecordBase jsonToAvro(String file, Schema schema) { */ public static SpecificRecordBase jsonToAvro(JsonObject jsonEvent, Schema schema) { try { - SpecificRecordBase message = - baseClass(schema.getNamespace(), schema.getName()).getDeclaredConstructor() - .newInstance(); + SpecificRecordBase message = baseClass(schema.getNamespace(), schema.getName()).getDeclaredConstructor() + .newInstance(); populateGenericRecordFromJson(jsonEvent, message); return message; } catch (Exception e) { @@ -204,6 +225,7 @@ private static Object populateFieldWithCorrespondingType(JsonElement jsonElement * @param fieldName the name to populate * @param result the avro record populated */ + @SuppressWarnings("unchecked") private static void populateFieldInRecordWithCorrespondingType(JsonObject jsonObject, String fieldName, GenericRecord result) { diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java index 20c47a4c..e9cb864c 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessor.java @@ -15,7 +15,7 @@ * @param The type of the value */ public class DedupKeyProcessor - implements Processor> { + implements Processor> { /** * Kstream context for this transformer. @@ -33,14 +33,14 @@ public class DedupKeyProcessor private final String windowStoreName; /** - * Retention window for the statestore. Used for fetching data. + * Retention window for the state store. Used for fetching data. */ private final Duration retentionWindowDuration; /** * Constructor. * - * @param windowStoreName The name of the constructor + * @param windowStoreName The name of the constructor * @param retentionWindowDuration The retentionWindow Duration */ public DedupKeyProcessor(String windowStoreName, Duration retentionWindowDuration) { @@ -56,15 +56,14 @@ public void init(ProcessorContext> context) { @Override public void process(Record message) { - try { // Get the record timestamp var currentInstant = Instant.ofEpochMilli(message.timestamp()); // Retrieve all the matching keys in the stateStore and return null if found it (signaling a duplicate) try (var resultIterator = dedupWindowStore.backwardFetch(message.key(), - currentInstant.minus(retentionWindowDuration), - currentInstant.plus(retentionWindowDuration))) { + currentInstant.minus(retentionWindowDuration), + currentInstant.plus(retentionWindowDuration))) { while (resultIterator != null && resultIterator.hasNext()) { var currentKeyValue = resultIterator.next(); if (message.key().equals(currentKeyValue.value)) { @@ -78,8 +77,8 @@ public void process(Record message) { processorContext.forward(ProcessingResult.wrapRecordSuccess(message)); } catch (Exception e) { processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, - "Couldn't figure out what to do with the current payload: " - + "An unlikely error occurred during deduplication transform")); + "Could not figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform")); } } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java index cc8a48da..f43335d7 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessor.java @@ -34,7 +34,7 @@ public class DedupKeyValueProcessor private final String windowStoreName; /** - * Retention window for the statestore. Used for fetching data. + * Retention window for the state store. Used for fetching data. */ private final Duration retentionWindowDuration; @@ -79,7 +79,7 @@ public void process(Record message) { processorContext.forward(ProcessingResult.wrapRecordSuccess(message)); } catch (Exception e) { processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, - "Couldn't figure out what to do with the current payload: " + "Could not figure out what to do with the current payload: " + "An unlikely error occurred during deduplication transform")); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java index ce3e1dee..b2ee8793 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessor.java @@ -19,7 +19,7 @@ * @param The type of the value */ public class DedupWithPredicateProcessor - implements Processor> { + implements Processor> { /** * Kstream context for this transformer. @@ -37,7 +37,7 @@ public class DedupWithPredicateProcessor private final String windowStoreName; /** - * Retention window for the statestore. Used for fetching data. + * Retention window for the state store. Used for fetching data. */ private final Duration retentionWindowDuration; @@ -63,13 +63,11 @@ public DedupWithPredicateProcessor(String windowStoreName, Duration retentionWin @Override public void init(ProcessorContext> context) { this.processorContext = context; - dedupWindowStore = this.processorContext.getStateStore(windowStoreName); } @Override public void process(Record message) { - try { // Get the record timestamp var currentInstant = Instant.ofEpochMilli(message.timestamp()); @@ -77,8 +75,8 @@ public void process(Record message) { // Retrieve all the matching keys in the stateStore and return null if found it (signaling a duplicate) try (var resultIterator = dedupWindowStore.backwardFetch(identifier, - currentInstant.minus(retentionWindowDuration), - currentInstant.plus(retentionWindowDuration))) { + currentInstant.minus(retentionWindowDuration), + currentInstant.plus(retentionWindowDuration))) { while (resultIterator != null && resultIterator.hasNext()) { var currentKeyValue = resultIterator.next(); if (identifier.equals(deduplicationKeyExtractor.apply(currentKeyValue.value))) { @@ -93,8 +91,8 @@ public void process(Record message) { } catch (Exception e) { processorContext.forward(ProcessingResult.wrapRecordFailure(e, message, - "Couldn't figure out what to do with the current payload: " - + "An unlikely error occurred during deduplication transform")); + "Could not figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform")); } } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java index cb6460a2..2fa456e1 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/deduplication/DeduplicationUtils.java @@ -1,9 +1,10 @@ package com.michelin.kstreamplify.deduplication; import com.michelin.kstreamplify.error.ProcessingResult; -import com.michelin.kstreamplify.utils.SerdesUtils; +import com.michelin.kstreamplify.serde.SerdesUtils; import java.time.Duration; import java.util.function.Function; +import lombok.NoArgsConstructor; import org.apache.avro.specific.SpecificRecord; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.StreamsBuilder; @@ -16,12 +17,8 @@ /** * Deduplication utility class. Only streams with String keys are supported. */ +@NoArgsConstructor(access = lombok.AccessLevel.PRIVATE) public final class DeduplicationUtils { - - private DeduplicationUtils() { - - } - /** * Default values for the topic names. It should be noted that if used multiple times, this dedup will not work */ @@ -55,7 +52,7 @@ public static KStream> * * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated - * @param storeName Statestore name + * @param storeName State store name * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store * @param Generic Type of the Stream value. @@ -69,15 +66,15 @@ public static KStream> String repartitionName, Duration windowDuration) { StoreBuilder> dedupWindowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), - Serdes.String(), Serdes.String()); + Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), + Serdes.String(), Serdes.String()); streamsBuilder.addStateStore(dedupWindowStore); var repartitioned = initialStream.repartition( - Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()) - .withName(repartitionName)); + Repartitioned.with(Serdes.String(), SerdesUtils.getValueSerdes()) + .withName(repartitionName)); return repartitioned.process(() -> new DedupKeyProcessor<>(storeName, windowDuration), - storeName); + storeName); } /** @@ -107,7 +104,7 @@ public static KStream> * * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated - * @param storeName Statestore name + * @param storeName State store name * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store * @param Generic Type of the Stream value. @@ -122,11 +119,11 @@ public static KStream> StoreBuilder> dedupWindowStore = Stores.windowStoreBuilder( Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), - Serdes.String(), SerdesUtils.getSerdesForValue()); + Serdes.String(), SerdesUtils.getValueSerdes()); streamsBuilder.addStateStore(dedupWindowStore); var repartitioned = initialStream.repartition( - Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()) + Repartitioned.with(Serdes.String(), SerdesUtils.getValueSerdes()) .withName(repartitionName)); return repartitioned.process(() -> new DedupKeyValueProcessor<>(storeName, windowDuration), storeName); @@ -172,7 +169,7 @@ public static KStream> * * @param streamsBuilder Stream builder instance for topology editing * @param initialStream Stream containing the events that should be deduplicated - * @param storeName Statestore name + * @param storeName State store name * @param repartitionName Repartition topic name * @param windowDuration Window of time to keep in the window store * @param deduplicationKeyExtractor Function that should extract a deduplication key in String format. @@ -191,12 +188,12 @@ public static KStream> Function deduplicationKeyExtractor) { StoreBuilder> dedupWindowStore = Stores.windowStoreBuilder( - Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), - Serdes.String(), SerdesUtils.getSerdesForValue()); + Stores.persistentWindowStore(storeName, windowDuration, windowDuration, false), + Serdes.String(), SerdesUtils.getValueSerdes()); streamsBuilder.addStateStore(dedupWindowStore); var repartitioned = initialStream.repartition( - Repartitioned.with(Serdes.String(), SerdesUtils.getSerdesForValue()) + Repartitioned.with(Serdes.String(), SerdesUtils.getValueSerdes()) .withName(repartitionName)); return repartitioned.process( () -> new DedupWithPredicateProcessor<>(storeName, windowDuration, diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java index b3fda449..be9a2f67 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandler.java @@ -18,8 +18,7 @@ */ @Slf4j @NoArgsConstructor -public class DlqDeserializationExceptionHandler extends DlqExceptionHandler - implements DeserializationExceptionHandler { +public class DlqDeserializationExceptionHandler extends DlqExceptionHandler implements DeserializationExceptionHandler { private static final Object GUARD = new Object(); /** diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java index a75e6807..931fd7f6 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandler.java @@ -16,8 +16,7 @@ */ @Slf4j @NoArgsConstructor -public class DlqProductionExceptionHandler extends DlqExceptionHandler - implements ProductionExceptionHandler { +public class DlqProductionExceptionHandler extends DlqExceptionHandler implements ProductionExceptionHandler { private static final Object GUARD = new Object(); /** diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java index 73e60f5f..fcce0bf0 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/GenericErrorProcessor.java @@ -3,8 +3,7 @@ import com.michelin.kstreamplify.avro.KafkaError; import java.io.PrintWriter; import java.io.StringWriter; -import org.apache.kafka.streams.processor.api.FixedKeyProcessor; -import org.apache.kafka.streams.processor.api.FixedKeyProcessorContext; +import org.apache.kafka.streams.processor.api.ContextualFixedKeyProcessor; import org.apache.kafka.streams.processor.api.FixedKeyRecord; import org.apache.kafka.streams.processor.api.RecordMetadata; @@ -13,20 +12,7 @@ * * @param The type of the failed record */ -public class GenericErrorProcessor - implements FixedKeyProcessor, KafkaError> { - private FixedKeyProcessorContext context; - - /** - * Init context. - * - * @param context the context to init - */ - @Override - public void init(FixedKeyProcessorContext context) { - this.context = context; - } - +class GenericErrorProcessor extends ContextualFixedKeyProcessor, KafkaError> { /** * Process the error. * @@ -38,7 +24,7 @@ public void process(FixedKeyRecord> fixedKeyRecord) { PrintWriter pw = new PrintWriter(sw); fixedKeyRecord.value().getException().printStackTrace(pw); - RecordMetadata recordMetadata = context.recordMetadata().orElse(null); + RecordMetadata recordMetadata = context().recordMetadata().orElse(null); KafkaError error = KafkaError.newBuilder() .setCause(fixedKeyRecord.value().getException().getMessage()) @@ -52,7 +38,7 @@ public void process(FixedKeyRecord> fixedKeyRecord) { .setValue(fixedKeyRecord.value().getKafkaRecord()) .build(); - context.forward(fixedKeyRecord.withValue(error)); + context().forward(fixedKeyRecord.withValue(error)); } /** diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java index 0c9a5c5b..2ed5ef85 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/ProcessingResult.java @@ -26,7 +26,7 @@ public class ProcessingResult { * * @param value The success value */ - public ProcessingResult(V value) { + private ProcessingResult(V value) { this.value = value; } @@ -35,7 +35,7 @@ public ProcessingResult(V value) { * * @param error the ProcessingError containing the */ - public ProcessingResult(ProcessingError error) { + private ProcessingResult(ProcessingError error) { this.error = error; } @@ -51,7 +51,6 @@ public static ProcessingResult success(V value) { return new ProcessingResult<>(value); } - /** *

Wraps a record's value with ProcessingResult.success(V value).

*

The resulting stream needs to be handled with TopologyErrorHandler#catchErrors(KStream) @@ -63,8 +62,7 @@ public static ProcessingResult success(V value) { * @param The type of the ProcessingResult error value * @return The initial Record, with value wrapped in a ProcessingResult */ - public static Record> wrapRecordSuccess( - Record message) { + public static Record> wrapRecordSuccess(Record message) { return new Record<>(message.key(), ProcessingResult.success(message.value()), message.timestamp()); } @@ -86,8 +84,7 @@ public static Record> wrapRecordSuccess( * @param The type of the ProcessingResult error value * @return A Record with value wrapped in a {@link ProcessingResult} */ - public static Record> wrapRecordSuccess(K key, V value, - long timestamp) { + public static Record> wrapRecordSuccess(K key, V value, long timestamp) { return new Record<>(key, ProcessingResult.success(value), timestamp); } @@ -127,8 +124,7 @@ public static ProcessingResult fail(Exception e, V2 failedRecordV * @param The type of the failed record * @return A processing result containing the failed record */ - public static ProcessingResult fail(Exception e, V2 failedRecordValue, - String contextMessage) { + public static ProcessingResult fail(Exception e, V2 failedRecordValue, String contextMessage) { return new ProcessingResult<>(new ProcessingError<>(e, contextMessage, failedRecordValue)); } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java index e8e6ee06..4df8600b 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/error/TopologyErrorHandler.java @@ -1,8 +1,9 @@ package com.michelin.kstreamplify.error; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.utils.SerdesUtils; +import com.michelin.kstreamplify.serde.SerdesUtils; import java.util.Map; +import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.apache.kafka.common.serialization.Serdes; @@ -16,12 +17,10 @@ * The topology error handler class. */ @Slf4j +@NoArgsConstructor(access = lombok.AccessLevel.PRIVATE) public class TopologyErrorHandler { private static final String BRANCHING_NAME_NOMINAL = "branch-nominal"; - private TopologyErrorHandler() { - } - /** * Catch the errors from the given stream. * @@ -82,16 +81,14 @@ public static KStream catchErrors(KStream void handleErrors(KStream> errorsStream) { if (StringUtils.isBlank(KafkaStreamsExecutionContext.getDlqTopicName())) { - log.warn( - "Failed to route topology error to the designated DLQ (Dead Letter Queue) topic. " - + - "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); + log.warn("Failed to route topology error to the designated DLQ (Dead Letter Queue) topic. " + + "Please make sure to define a DLQ topic in your KafkaStreamsStarter bean configuration."); return; } errorsStream .map((key, value) -> new KeyValue<>(key == null ? "null" : key.toString(), value)) .processValues(GenericErrorProcessor::new) .to(KafkaStreamsExecutionContext.getDlqTopicName(), Produced.with(Serdes.String(), - SerdesUtils.getSerdesForValue())); + SerdesUtils.getValueSerdes())); } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/HttpServerException.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/HttpServerException.java new file mode 100644 index 00000000..2b8a7211 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/HttpServerException.java @@ -0,0 +1,17 @@ +package com.michelin.kstreamplify.exception; + +/** + * Exception thrown when the HTTP server cannot be created. + */ +public class HttpServerException extends RuntimeException { + private static final String FAIL_TO_CREATE = "Fail to create the HTTP server"; + + /** + * Constructor. + * + * @param cause The cause of the exception + */ + public HttpServerException(Throwable cause) { + super(FAIL_TO_CREATE, cause); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/OtherInstanceResponseException.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/OtherInstanceResponseException.java new file mode 100644 index 00000000..9ba27113 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/OtherInstanceResponseException.java @@ -0,0 +1,17 @@ +package com.michelin.kstreamplify.exception; + +/** + * Exception thrown when a response from another instance cannot be read. + */ +public class OtherInstanceResponseException extends RuntimeException { + private static final String OTHER_INSTANCE_RESPONSE = "Fail to read other instance response"; + + /** + * Constructor. + * + * @param cause The cause of the exception + */ + public OtherInstanceResponseException(Throwable cause) { + super(OTHER_INSTANCE_RESPONSE, cause); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/PropertiesFileException.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/PropertiesFileException.java new file mode 100644 index 00000000..cd602710 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/PropertiesFileException.java @@ -0,0 +1,17 @@ +package com.michelin.kstreamplify.exception; + +/** + * Exception thrown when a properties file cannot be read. + */ +public class PropertiesFileException extends RuntimeException { + private static final String CANNOT_READ_PROPERTIES_FILE = "Cannot read properties file"; + + /** + * Constructor. + * + * @param cause The cause of the exception + */ + public PropertiesFileException(Throwable cause) { + super(CANNOT_READ_PROPERTIES_FILE, cause); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/UnknownKeyException.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/UnknownKeyException.java new file mode 100644 index 00000000..a3c29f9a --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/exception/UnknownKeyException.java @@ -0,0 +1,17 @@ +package com.michelin.kstreamplify.exception; + +/** + * Exception thrown when a key is not found. + */ +public class UnknownKeyException extends RuntimeException { + private static final String UNKNOWN_KEY = "Key %s not found"; + + /** + * Constructor. + * + * @param key The key that was not found + */ + public UnknownKeyException(String key) { + super(String.format(UNKNOWN_KEY, key)); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java index 72604ab2..7c4afb64 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializer.java @@ -1,12 +1,10 @@ package com.michelin.kstreamplify.initializer; -import static com.michelin.kstreamplify.constants.InitializerConstants.SERVER_PORT_PROPERTY; import static java.util.Optional.ofNullable; -import com.michelin.kstreamplify.constants.InitializerConstants; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.properties.PropertiesUtils; -import com.michelin.kstreamplify.rest.DefaultProbeController; +import com.michelin.kstreamplify.property.PropertiesUtils; +import com.michelin.kstreamplify.server.KafkaStreamsHttpServer; import java.util.HashMap; import java.util.Properties; import java.util.stream.Collectors; @@ -26,6 +24,21 @@ @Slf4j @Getter public class KafkaStreamsInitializer { + /** + * The application server property name. + */ + public static final String APPLICATION_SERVER_PROPERTY_NAME = "application.server.var.name"; + + /** + * The server port property name. + */ + public static final String SERVER_PORT_PROPERTY_NAME = "server.port"; + + /** + * The default application server variable name. + */ + public static final String DEFAULT_APPLICATION_SERVER_VARIABLE_NAME = "APPLICATION_SERVER"; + /** * The Kafka Streams instance. */ @@ -76,7 +89,7 @@ public void init(KafkaStreamsStarter streamsStarter) { initProperties(); - initSerdesConfig(); + initSerdeConfig(); initDlq(); @@ -97,24 +110,24 @@ public void init(KafkaStreamsStarter streamsStarter) { Runtime.getRuntime().addShutdownHook(new Thread(kafkaStreams::close)); - // Inject a BYO (Bring Your Own) uncaught exception handler if it is provided kafkaStreams.setUncaughtExceptionHandler( - ofNullable(kafkaStreamsStarter.uncaughtExceptionHandler()) - .orElse(this::onStreamsUncaughtException)); + ofNullable(kafkaStreamsStarter.uncaughtExceptionHandler()) + .orElse(this::onStreamsUncaughtException)); kafkaStreams.setStateListener(this::onStateChange); kafkaStreams.start(); - initHttpServer(); + startHttpServer(); } /** * Init the Kafka Streams execution context. */ - private void initSerdesConfig() { + private void initSerdeConfig() { KafkaStreamsExecutionContext.setSerdesConfig( - kafkaProperties.entrySet().stream().collect( - Collectors.toMap( + kafkaProperties.entrySet() + .stream() + .collect(Collectors.toMap( e -> String.valueOf(e.getKey()), e -> String.valueOf(e.getValue()), (prev, next) -> next, HashMap::new @@ -134,12 +147,12 @@ private void initDlq() { * Init the host information. */ private void initHostInfo() { - String ipEnvVarName = (String) kafkaProperties.get(InitializerConstants.IP_SYSTEM_VARIABLE_PROPERTY); - if (StringUtils.isBlank(ipEnvVarName)) { - ipEnvVarName = InitializerConstants.IP_SYSTEM_VARIABLE_DEFAULT; - } - String myIp = System.getenv(ipEnvVarName); - String host = StringUtils.isNotBlank(myIp) ? myIp : InitializerConstants.LOCALHOST; + String applicationServerVarName = + (String) kafkaProperties.getOrDefault(APPLICATION_SERVER_PROPERTY_NAME, + DEFAULT_APPLICATION_SERVER_VARIABLE_NAME); + + String applicationServer = System.getenv(applicationServerVarName); + String host = StringUtils.isNotBlank(applicationServer) ? applicationServer : "localhost"; hostInfo = new HostInfo(host, serverPort); @@ -155,8 +168,9 @@ private void initHostInfo() { /** * Init the HTTP server. */ - protected void initHttpServer() { - new DefaultProbeController(this); + protected void startHttpServer() { + KafkaStreamsHttpServer server = new KafkaStreamsHttpServer(this); + server.start(); } /** @@ -164,7 +178,7 @@ protected void initHttpServer() { */ protected void initProperties() { properties = PropertiesUtils.loadProperties(); - serverPort = (Integer) properties.get(SERVER_PORT_PROPERTY); + serverPort = (Integer) properties.get(SERVER_PORT_PROPERTY_NAME); kafkaProperties = PropertiesUtils.loadKafkaProperties(properties); KafkaStreamsExecutionContext.registerProperties(kafkaProperties); } @@ -197,9 +211,20 @@ protected void onStateChange(KafkaStreams.State newState, KafkaStreams.State old } /** - * Register metrics. + * Register the metrics. + * + * @param kafkaStreams The Kafka Streams instance */ protected void registerMetrics(KafkaStreams kafkaStreams) { // Nothing to do here } + + /** + * Check if the Kafka Streams is running. + * + * @return True if the Kafka Streams is running + */ + public boolean isNotRunning() { + return !kafkaStreams.state().equals(KafkaStreams.State.RUNNING); + } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java index fa67487a..52181d74 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarter.java @@ -34,7 +34,7 @@ public void onStart(KafkaStreams kafkaStreams) { /** * Register a custom uncaught exception handler. * - * @return StreamsUncaughtExceptionHandler - uncaught exception handler + * @return StreamsUncaughtExceptionHandler The custom uncaught exception handler */ public StreamsUncaughtExceptionHandler uncaughtExceptionHandler() { return null; diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java deleted file mode 100644 index 9bd57a1b..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/DlqTopic.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.michelin.kstreamplify.model; - -import lombok.Builder; -import lombok.Getter; - -/** - * The dead letter queue (DLQ) topic. - */ -@Getter -@Builder -public class DlqTopic { - /** - * The DLQ topic name. - */ - private String name; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java deleted file mode 100644 index eb61978f..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/model/RestServiceResponse.java +++ /dev/null @@ -1,27 +0,0 @@ -package com.michelin.kstreamplify.model; - -import lombok.AllArgsConstructor; -import lombok.Builder; -import lombok.Getter; -import lombok.Setter; - -/** - * Rest service response. - * - * @param The body type - */ -@Getter -@Setter -@AllArgsConstructor -@Builder -public class RestServiceResponse { - /** - * The HTTP status. - */ - private int status; - - /** - * The request body. - */ - private T body; -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/PropertiesUtils.java similarity index 66% rename from kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java rename to kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/PropertiesUtils.java index 879e19e9..961e3bbb 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/PropertiesUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/property/PropertiesUtils.java @@ -1,9 +1,6 @@ -package com.michelin.kstreamplify.properties; - -import static com.michelin.kstreamplify.constants.PropertyConstants.DEFAULT_PROPERTY_FILE; -import static com.michelin.kstreamplify.constants.PropertyConstants.KAFKA_PROPERTIES_PREFIX; -import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; +package com.michelin.kstreamplify.property; +import com.michelin.kstreamplify.exception.PropertiesFileException; import java.io.IOException; import java.io.InputStream; import java.util.LinkedHashMap; @@ -18,6 +15,21 @@ */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public final class PropertiesUtils { + /** + * The Kafka properties prefix. + */ + public static final String KAFKA_PROPERTIES_PREFIX = "kafka.properties"; + + /** + * The default property file. + */ + public static final String DEFAULT_PROPERTY_FILE = "application.yml"; + + /** + * The property separator. + */ + public static final String PROPERTY_SEPARATOR = "."; + /** * Load the properties from the default properties file. * @@ -31,7 +43,7 @@ public static Properties loadProperties() { LinkedHashMap propsMap = yaml.load(inputStream); return parsePropertiesMap(propsMap); } catch (IOException e) { - throw new RuntimeException(e); + throw new PropertiesFileException(e); } } @@ -65,26 +77,28 @@ private static Properties parsePropertiesMap(LinkedHashMap map) /** * Parse a given key. * - * @param key The key - * @param map The underlying map - * @param props The properties + * @param key The key + * @param map The underlying map + * @param properties The properties * @return The properties */ - private static Properties parseKey(String key, Object map, Properties props) { - if (props == null) { - props = new Properties(); + private static Properties parseKey(String key, Object map, Properties properties) { + if (properties == null) { + properties = new Properties(); } - String sep = PROPERTY_SEPARATOR; + + String separator = PROPERTY_SEPARATOR; if (StringUtils.isBlank(key)) { - sep = ""; + separator = ""; } + if (map instanceof LinkedHashMap) { - for (Object k : ((LinkedHashMap) map).keySet()) { - parseKey(key + sep + k, ((LinkedHashMap) map).get(k), props); + for (Object mapKey : ((LinkedHashMap) map).keySet()) { + parseKey(key + separator + mapKey, ((LinkedHashMap) map).get(mapKey), properties); } } else { - props.put(key, map); + properties.put(key, map); } - return props; + return properties; } } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/rest/DefaultProbeController.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/rest/DefaultProbeController.java deleted file mode 100644 index 323f2cb5..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/rest/DefaultProbeController.java +++ /dev/null @@ -1,118 +0,0 @@ -package com.michelin.kstreamplify.rest; - -import com.michelin.kstreamplify.constants.HttpServerConstants; -import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; -import com.michelin.kstreamplify.services.ProbeService; -import com.sun.net.httpserver.HttpServer; -import java.io.IOException; -import java.net.InetSocketAddress; -import org.apache.commons.lang3.StringUtils; - -/** - * Default probe controller. - */ -public class DefaultProbeController { - /** - * HTTP server. - */ - protected HttpServer server; - - /** - * Constructor. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer - */ - public DefaultProbeController(KafkaStreamsInitializer kafkaStreamsInitializer) { - try { - server = - HttpServer.create(new InetSocketAddress(kafkaStreamsInitializer.getServerPort()), - 0); - } catch (IOException e) { - throw new RuntimeException(e); - } - - var readinessPath = (String) kafkaStreamsInitializer.getProperties() - .get(HttpServerConstants.READINESS_PROPERTY); - var livenessPath = (String) kafkaStreamsInitializer.getProperties() - .get(HttpServerConstants.LIVENESS_PROPERTY); - var exposeTopologyPath = (String) kafkaStreamsInitializer.getProperties() - .get(HttpServerConstants.TOPOLOGY_PROPERTY); - - readinessProbe(kafkaStreamsInitializer, '/' - + (StringUtils.isBlank(readinessPath) ? HttpServerConstants.READINESS_DEFAULT_PATH : - readinessPath)); - livenessProbe(kafkaStreamsInitializer, '/' - + (StringUtils.isBlank(livenessPath) ? HttpServerConstants.LIVENESS_DEFAULT_PATH : - livenessPath)); - exposeTopology(kafkaStreamsInitializer, '/' - + (StringUtils.isBlank(exposeTopologyPath) ? HttpServerConstants.TOPOLOGY_DEFAULT_PATH : - exposeTopologyPath)); - endpointCaller(kafkaStreamsInitializer); - server.start(); - } - - /** - * Kubernetes' readiness probe. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer. - * @param readinessPath The readiness path. - */ - private void readinessProbe(KafkaStreamsInitializer kafkaStreamsInitializer, - String readinessPath) { - server.createContext(readinessPath, (exchange -> { - exchange.sendResponseHeaders( - ProbeService.readinessProbe(kafkaStreamsInitializer).getStatus(), 0); - var output = exchange.getResponseBody(); - output.close(); - exchange.close(); - })); - } - - - /** - * Kubernetes' liveness probe. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer. - * @param livenessPath The liveness path. - */ - private void livenessProbe(KafkaStreamsInitializer kafkaStreamsInitializer, - String livenessPath) { - server.createContext(livenessPath, (exchange -> { - exchange.sendResponseHeaders( - ProbeService.livenessProbe(kafkaStreamsInitializer).getStatus(), 0); - var output = exchange.getResponseBody(); - output.close(); - exchange.close(); - })); - } - - /** - * Expose the topology tree. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer. - * @param exposeTopologyPath The expose topology path. - */ - private void exposeTopology(KafkaStreamsInitializer kafkaStreamsInitializer, - String exposeTopologyPath) { - server.createContext(exposeTopologyPath, (exchange -> { - var restServiceResponse = ProbeService.exposeTopology(kafkaStreamsInitializer); - - exchange.sendResponseHeaders(restServiceResponse.getStatus(), 0); - - var output = exchange.getResponseBody(); - output.write((restServiceResponse.getBody()).getBytes()); - output.close(); - exchange.close(); - })); - } - - - /** - * Callback to override in case of custom endpoint definition. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer - */ - protected void endpointCaller(KafkaStreamsInitializer kafkaStreamsInitializer) { - // Nothing to do here - } -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/SerdesUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/SerdesUtils.java new file mode 100644 index 00000000..0512073e --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/SerdesUtils.java @@ -0,0 +1,45 @@ +package com.michelin.kstreamplify.serde; + +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde; +import lombok.NoArgsConstructor; +import org.apache.avro.specific.SpecificRecord; + +/** + * The Serde utils class. + */ +@NoArgsConstructor(access = lombok.AccessLevel.PRIVATE) +public final class SerdesUtils { + /** + * Return a key serde for a requested class. + * + * @param The class of requested serdes + * @return a serdes for requested class + */ + public static SpecificAvroSerde getKeySerdes() { + return getSerdes(true); + } + + /** + * Return a value serdes for a requested class. + * + * @param The class of requested serdes + * @return a serdes for requested class + */ + public static SpecificAvroSerde getValueSerdes() { + return getSerdes(false); + } + + /** + * Return a serdes for a requested class. + * + * @param isSerdeForKey Is the serdes for a key or a value + * @param The class of requested serdes + * @return a serdes for requested class + */ + private static SpecificAvroSerde getSerdes(boolean isSerdeForKey) { + SpecificAvroSerde serde = new SpecificAvroSerde<>(); + serde.configure(KafkaStreamsExecutionContext.getSerdesConfig(), isSerdeForKey); + return serde; + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/TopicWithSerde.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/TopicWithSerde.java new file mode 100644 index 00000000..a2c7ad8f --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/serde/TopicWithSerde.java @@ -0,0 +1,153 @@ +package com.michelin.kstreamplify.serde; + + +import com.michelin.kstreamplify.topic.TopicUtils; +import lombok.AccessLevel; +import lombok.AllArgsConstructor; +import lombok.Getter; +import org.apache.kafka.common.serialization.Serde; +import org.apache.kafka.common.utils.Bytes; +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.kstream.Consumed; +import org.apache.kafka.streams.kstream.GlobalKTable; +import org.apache.kafka.streams.kstream.KStream; +import org.apache.kafka.streams.kstream.KTable; +import org.apache.kafka.streams.kstream.Materialized; +import org.apache.kafka.streams.kstream.Produced; +import org.apache.kafka.streams.state.KeyValueStore; + +/** + * Wrapper class for simplifying topics interactions and their behaviors. + * + * @param The model used as the key avro of the topic. Can be String (Recommended) + * @param The model used as the value avro of the topic. + */ +@AllArgsConstructor(access = AccessLevel.PUBLIC) +public class TopicWithSerde { + /** + * Default prefix property name. + */ + public static final String SELF = "self"; + + /** + * Name of the topic. + */ + private final String topicName; + + /** + *

Name of the property key defined under kafka.properties.prefix. + * Used to prefix the topicName dynamically at runtime.

+ *

For instance, with the given following configuration :

+ *
{@code
+     * kafka:
+     *   properties:
+     *     prefix:
+     *       nsKey: "myNamespacePrefix."
+     * }
+ *

If the topic name is {@code myTopic} , at stream initialization the topic name wil resolve + * to {@code myNamespacePrefix.myTopic}

+ */ + private final String prefixPropertyKey; + + /** + * Key serde for the topic. + */ + @Getter + private final Serde keySerde; + + /** + * Value serde for the topic. + */ + @Getter + private final Serde valueSerde; + + /** + *

Additional constructor which uses default parameter "self" for prefixPropertyKey.

+ * + *

For instance, with the given following configuration :

+ *
{@code
+     * kafka:
+     *   properties:
+     *     prefix:
+     *       self: "myNamespacePrefix."
+     * }
+ *

If the topic name is {@code myTopic} , at stream initialization the topic name wil resolve + * to {@code myNamespacePrefix.myTopic}

+ * + * @param topicName Name of the topic + * @param keySerde Key serde for the topic + * @param valueSerde Value serde for the topic + */ + public TopicWithSerde(String topicName, Serde keySerde, Serde valueSerde) { + this.topicName = topicName; + this.prefixPropertyKey = SELF; + this.keySerde = keySerde; + this.valueSerde = valueSerde; + } + + /** + * Get the un-prefixed name of the Topic for specific usage. + * + * @return The name of the topic, as defined during initialization + */ + public String getUnPrefixedName() { + return topicName; + } + + /** + * Override of the toString method, dynamically builds the topicName based on springBoot + * properties for environment/application. + * + * @return The prefixed name of the topic + */ + @Override + public String toString() { + return TopicUtils.remapAndPrefix(topicName, prefixPropertyKey); + } + + /** + * Wrapper for the .stream method of KafkaStreams. + * Allows simple usage of a topic with type inference + * + * @param sb The streamsBuilder + * @return a Kstream from the given topic + */ + public KStream stream(StreamsBuilder sb) { + return sb.stream(this.toString(), Consumed.with(keySerde, valueSerde)); + } + + /** + * Wrapper for the .table method of KafkaStreams. Allows simple usage of a topic with type inference + * + * @param sb The streamsBuilder + * @param storeName The StoreName + * @return a KTable from the given topic + */ + public KTable table(StreamsBuilder sb, String storeName) { + return sb.table(this.toString(), Consumed.with(keySerde, valueSerde), + Materialized.>as(storeName).withKeySerde(keySerde) + .withValueSerde(valueSerde)); + } + + /** + * Wrapper for the .globalTable method of KafkaStreams. Allows simple usage of a topic with type inference + * + * @param sb The streamsBuilder + * @param storeName The StoreName + * @return a GlobalKTable from the given topic + */ + public GlobalKTable globalTable(StreamsBuilder sb, String storeName) { + return sb.globalTable(this.toString(), Consumed.with(keySerde, valueSerde), + Materialized.>as(storeName).withKeySerde(keySerde) + .withValueSerde(valueSerde)); + } + + /** + * Wrapper for the .to method of Kafka streams. Allows simple usage of a topic with type inference + * + * @param stream The stream to produce in the topic + */ + public void produce(KStream stream) { + stream.to(this.toString(), Produced.with(keySerde, valueSerde)); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServer.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServer.java new file mode 100644 index 00000000..c9d72e5f --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServer.java @@ -0,0 +1,192 @@ +package com.michelin.kstreamplify.server; + +import static com.michelin.kstreamplify.service.InteractiveQueriesService.DEFAULT_STORE_PATH; +import static com.michelin.kstreamplify.service.KubernetesService.DEFAULT_LIVENESS_PATH; +import static com.michelin.kstreamplify.service.KubernetesService.DEFAULT_READINESS_PATH; +import static com.michelin.kstreamplify.service.KubernetesService.LIVENESS_PATH_PROPERTY_NAME; +import static com.michelin.kstreamplify.service.KubernetesService.READINESS_PATH_PROPERTY_NAME; +import static com.michelin.kstreamplify.service.TopologyService.TOPOLOGY_DEFAULT_PATH; +import static com.michelin.kstreamplify.service.TopologyService.TOPOLOGY_PROPERTY; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.google.common.net.HttpHeaders; +import com.google.common.net.MediaType; +import com.michelin.kstreamplify.exception.HttpServerException; +import com.michelin.kstreamplify.exception.UnknownKeyException; +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import com.michelin.kstreamplify.service.InteractiveQueriesService; +import com.michelin.kstreamplify.service.KubernetesService; +import com.michelin.kstreamplify.service.TopologyService; +import com.michelin.kstreamplify.store.StreamsMetadata; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpServer; +import java.io.OutputStream; +import java.net.HttpURLConnection; +import java.net.InetSocketAddress; +import java.util.Properties; +import java.util.function.IntSupplier; +import org.apache.kafka.streams.errors.StreamsNotStartedException; +import org.apache.kafka.streams.errors.UnknownStateStoreException; + +/** + * Kafka Streams HTTP server. + */ +public class KafkaStreamsHttpServer { + private final KafkaStreamsInitializer kafkaStreamsInitializer; + private final ObjectMapper objectMapper; + private final KubernetesService kubernetesService; + private final TopologyService topologyService; + private final InteractiveQueriesService interactiveQueriesService; + + /** + * The HTTP server. + */ + protected HttpServer server; + + /** + * Constructor. + * + * @param kafkaStreamsInitializer The Kafka Streams initializer + */ + public KafkaStreamsHttpServer(KafkaStreamsInitializer kafkaStreamsInitializer) { + this.kafkaStreamsInitializer = kafkaStreamsInitializer; + this.objectMapper = new ObjectMapper(); + this.kubernetesService = new KubernetesService(kafkaStreamsInitializer); + this.topologyService = new TopologyService(kafkaStreamsInitializer); + this.interactiveQueriesService = new InteractiveQueriesService(kafkaStreamsInitializer); + } + + /** + * Start the HTTP server. + */ + public void start() { + try { + server = HttpServer.create(new InetSocketAddress(kafkaStreamsInitializer.getServerPort()), 0); + + Properties properties = kafkaStreamsInitializer.getProperties(); + + createKubernetesEndpoint( + (String) properties.getOrDefault(READINESS_PATH_PROPERTY_NAME, DEFAULT_READINESS_PATH), + kubernetesService::getReadiness); + + createKubernetesEndpoint( + (String) properties.getOrDefault(LIVENESS_PATH_PROPERTY_NAME, DEFAULT_LIVENESS_PATH), + kubernetesService::getLiveness); + + createTopologyEndpoint(); + createStoreEndpoints(); + + addEndpoint(kafkaStreamsInitializer); + server.start(); + } catch (Exception e) { + throw new HttpServerException(e); + } + } + + private void createKubernetesEndpoint(String path, IntSupplier kubernetesSupplier) { + server.createContext("/" + path, + (exchange -> { + int code = kubernetesSupplier.getAsInt(); + exchange.sendResponseHeaders(code, 0); + exchange.close(); + })); + } + + private void createTopologyEndpoint() { + String topologyEndpointPath = (String) kafkaStreamsInitializer.getProperties() + .getOrDefault(TOPOLOGY_PROPERTY, TOPOLOGY_DEFAULT_PATH); + + server.createContext("/" + topologyEndpointPath, + (exchange -> { + String response = topologyService.getTopology(); + exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, response.length()); + exchange.getResponseHeaders().set(HttpHeaders.CONTENT_TYPE, MediaType.PLAIN_TEXT_UTF_8.toString()); + + OutputStream output = exchange.getResponseBody(); + output.write(response.getBytes()); + + exchange.close(); + })); + } + + private void createStoreEndpoints() { + server.createContext("/" + DEFAULT_STORE_PATH, + (exchange -> { + try { + Object response = getResponseForStoreEndpoints(exchange); + String jsonResponse = objectMapper.writeValueAsString(response); + + exchange.sendResponseHeaders(HttpURLConnection.HTTP_OK, jsonResponse.length()); + exchange.getResponseHeaders().set(HttpHeaders.CONTENT_TYPE, MediaType.JSON_UTF_8.toString()); + + OutputStream output = exchange.getResponseBody(); + output.write(jsonResponse.getBytes()); + } catch (StreamsNotStartedException e) { + exchange.sendResponseHeaders(HttpURLConnection.HTTP_UNAVAILABLE, e.getMessage().length()); + OutputStream output = exchange.getResponseBody(); + output.write(e.getMessage().getBytes()); + } catch (UnknownStateStoreException | UnknownKeyException e) { + exchange.sendResponseHeaders(HttpURLConnection.HTTP_NOT_FOUND, e.getMessage().length()); + OutputStream output = exchange.getResponseBody(); + output.write(e.getMessage().getBytes()); + } catch (IllegalArgumentException e) { + exchange.sendResponseHeaders(HttpURLConnection.HTTP_BAD_REQUEST, e.getMessage().length()); + OutputStream output = exchange.getResponseBody(); + output.write(e.getMessage().getBytes()); + } finally { + exchange.close(); + } + })); + } + + private Object getResponseForStoreEndpoints(HttpExchange exchange) { + if (exchange.getRequestURI().toString().equals("/" + DEFAULT_STORE_PATH)) { + return interactiveQueriesService.getStateStores(); + } + + String store; + if (exchange.getRequestURI().toString().matches("/" + DEFAULT_STORE_PATH + "/metadata/.*")) { + store = parsePathParam(exchange, 3); + return interactiveQueriesService.getStreamsMetadataForStore(store) + .stream() + .map(streamsMetadata -> new StreamsMetadata( + streamsMetadata.stateStoreNames(), + streamsMetadata.hostInfo(), + streamsMetadata.topicPartitions())) + .toList(); + } + + if (exchange.getRequestURI().toString().matches("/" + DEFAULT_STORE_PATH + "/local/.*")) { + store = parsePathParam(exchange, 3); + return interactiveQueriesService.getAllOnLocalhost(store); + } + + store = parsePathParam(exchange, 2); + if (exchange.getRequestURI().toString().matches("/" + DEFAULT_STORE_PATH + "/.*/.*")) { + String key = parsePathParam(exchange, 3); + return interactiveQueriesService.getByKey(store, key); + } + + if (exchange.getRequestURI().toString().matches("/" + DEFAULT_STORE_PATH + "/.*")) { + return interactiveQueriesService.getAll(store); + } + + return null; + } + + private String parsePathParam(HttpExchange exchange, int index) { + return exchange.getRequestURI() + .toString() + .split("\\?")[0] + .split("/")[index]; + } + + /** + * Callback to override in case of adding endpoints. + * + * @param kafkaStreamsInitializer The Kafka Streams initializer + */ + protected void addEndpoint(KafkaStreamsInitializer kafkaStreamsInitializer) { + // Nothing to do here + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/InteractiveQueriesService.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/InteractiveQueriesService.java new file mode 100644 index 00000000..a1609cad --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/InteractiveQueriesService.java @@ -0,0 +1,314 @@ +package com.michelin.kstreamplify.service; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.michelin.kstreamplify.exception.OtherInstanceResponseException; +import com.michelin.kstreamplify.exception.UnknownKeyException; +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import com.michelin.kstreamplify.store.StateStoreRecord; +import java.net.URI; +import java.net.URISyntaxException; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import java.util.stream.Collectors; +import lombok.AllArgsConstructor; +import lombok.Getter; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.common.serialization.Serializer; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.KeyQueryMetadata; +import org.apache.kafka.streams.StreamsMetadata; +import org.apache.kafka.streams.errors.StreamsNotStartedException; +import org.apache.kafka.streams.errors.UnknownStateStoreException; +import org.apache.kafka.streams.query.KeyQuery; +import org.apache.kafka.streams.query.RangeQuery; +import org.apache.kafka.streams.query.StateQueryRequest; +import org.apache.kafka.streams.query.StateQueryResult; +import org.apache.kafka.streams.state.HostInfo; +import org.apache.kafka.streams.state.KeyValueIterator; +import org.apache.kafka.streams.state.ValueAndTimestamp; + +/** + * Interactive queries service. + */ +@Slf4j +@AllArgsConstructor +public class InteractiveQueriesService { + private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in %s state"; + private static final String UNKNOWN_STATE_STORE = "State store %s not found"; + private final ObjectMapper objectMapper = new ObjectMapper(); + private final HttpClient httpClient; + + @Getter + private final KafkaStreamsInitializer kafkaStreamsInitializer; + + /** + * The default store endpoint path. + */ + public static final String DEFAULT_STORE_PATH = "store"; + + /** + * Constructor. + * + * @param kafkaStreamsInitializer The Kafka Streams initializer + */ + public InteractiveQueriesService(KafkaStreamsInitializer kafkaStreamsInitializer) { + this.kafkaStreamsInitializer = kafkaStreamsInitializer; + this.httpClient = HttpClient.newHttpClient(); + } + + /** + * Get the stores. + * + * @return The stores + */ + public Set getStateStores() { + checkStreamsRunning(); + + final Collection metadata = kafkaStreamsInitializer + .getKafkaStreams() + .metadataForAllStreamsClients(); + + if (metadata == null || metadata.isEmpty()) { + return Collections.emptySet(); + } + + return metadata + .stream() + .flatMap(streamsMetadata -> streamsMetadata.stateStoreNames().stream()) + .collect(Collectors.toSet()); + } + + /** + * Get the hosts of the store. + * + * @param store The store + * @return The hosts + */ + public Collection getStreamsMetadataForStore(final String store) { + checkStreamsRunning(); + + return kafkaStreamsInitializer + .getKafkaStreams() + .streamsMetadataForStore(store); + } + + /** + * Get all values from the store. + * + * @param store The store + * @return The values + */ + public List getAll(String store) { + final Collection streamsMetadata = getStreamsMetadataForStore(store); + + if (streamsMetadata == null || streamsMetadata.isEmpty()) { + throw new UnknownStateStoreException(String.format(UNKNOWN_STATE_STORE, store)); + } + + List results = new ArrayList<>(); + streamsMetadata.forEach(metadata -> { + if (isNotCurrentHost(metadata.hostInfo())) { + log.debug("Fetching data on other instance ({}:{})", metadata.host(), metadata.port()); + + results.addAll(getAllOnOtherHost(metadata.hostInfo(), "store/local/" + store)); + } else { + log.debug("Fetching data on this instance ({}:{})", metadata.host(), metadata.port()); + + results.addAll(queryAllRecords(store)); + } + }); + + return results; + } + + /** + * Get all values from the store on the local host. + * + * @param store The store + * @return The values + */ + public List getAllOnLocalhost(String store) { + final Collection streamsMetadata = getStreamsMetadataForStore(store); + + if (streamsMetadata == null || streamsMetadata.isEmpty()) { + throw new UnknownStateStoreException(String.format(UNKNOWN_STATE_STORE, store)); + } + + return queryAllRecords(store); + } + + @SuppressWarnings("unchecked") + private List queryAllRecords(String store) { + RangeQuery rangeQuery = RangeQuery.withNoBounds(); + StateQueryResult> result = kafkaStreamsInitializer + .getKafkaStreams() + .query(StateQueryRequest + .inStore(store) + .withQuery(rangeQuery)); + + List partitionsResult = new ArrayList<>(); + result.getPartitionResults().forEach((key, queryResult) -> + queryResult.getResult().forEachRemaining(kv -> { + if (kv.value instanceof ValueAndTimestamp) { + ValueAndTimestamp valueAndTimestamp = (ValueAndTimestamp) kv.value; + + partitionsResult.add( + new StateStoreRecord( + kv.key, + valueAndTimestamp.value(), + valueAndTimestamp.timestamp() + ) + ); + } else { + partitionsResult.add(new StateStoreRecord(kv.key, kv.value)); + } + })); + + return new ArrayList<>(partitionsResult); + } + + /** + * Get the value by key from the store. + * + * @param store The store name + * @param key The key + * @return The value + */ + @SuppressWarnings("unchecked") + public StateStoreRecord getByKey(String store, String key) { + KeyQueryMetadata keyQueryMetadata = getKeyQueryMetadata(store, key, new StringSerializer()); + + if (keyQueryMetadata == null) { + throw new UnknownStateStoreException(String.format(UNKNOWN_STATE_STORE, store)); + } + + HostInfo host = keyQueryMetadata.activeHost(); + if (isNotCurrentHost(host)) { + log.debug("The key {} has been located on another instance ({}:{})", key, + host.host(), host.port()); + + return getByKeyOnOtherHost(host, "store/" + store + "/" + key); + } + + log.debug("The key {} has been located on the current instance ({}:{})", key, + host.host(), host.port()); + + KeyQuery keyQuery = KeyQuery.withKey(key); + StateQueryResult result = kafkaStreamsInitializer + .getKafkaStreams() + .query(StateQueryRequest + .inStore(store) + .withQuery(keyQuery) + .withPartitions(Collections.singleton(keyQueryMetadata.partition()))); + + if (result.getOnlyPartitionResult() == null) { + throw new UnknownKeyException(key); + } + + if (result.getOnlyPartitionResult().getResult() instanceof ValueAndTimestamp) { + ValueAndTimestamp valueAndTimestamp = (ValueAndTimestamp) result.getOnlyPartitionResult() + .getResult(); + + return new StateStoreRecord(key, valueAndTimestamp.value(), valueAndTimestamp.timestamp()); + } + + return new StateStoreRecord(key, result.getOnlyPartitionResult().getResult()); + } + + /** + * Get the host by store and key. + * + * @param store The store + * @param key The key + * @return The host + */ + private KeyQueryMetadata getKeyQueryMetadata(String store, K key, Serializer serializer) { + checkStreamsRunning(); + + return kafkaStreamsInitializer + .getKafkaStreams() + .queryMetadataForKey(store, key, serializer); + } + + /** + * Request other instance. + * + * @param host The host instance + * @param endpointPath The endpoint path to request + * @return The response + */ + private List getAllOnOtherHost(HostInfo host, String endpointPath) { + try { + String jsonResponse = sendRequest(host, endpointPath); + return objectMapper.readValue(jsonResponse, new TypeReference<>() {}); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return Collections.emptyList(); + } catch (Exception e) { + throw new OtherInstanceResponseException(e); + } + } + + /** + * Request other instance. + * + * @param host The host instance + * @param endpointPath The endpoint path to request + * @return The response + */ + private StateStoreRecord getByKeyOnOtherHost(HostInfo host, String endpointPath) { + try { + String jsonResponse = sendRequest(host, endpointPath); + return objectMapper.readValue(jsonResponse, StateStoreRecord.class); + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + return null; + } catch (Exception e) { + throw new OtherInstanceResponseException(e); + } + } + + private String sendRequest(HostInfo host, String endpointPath) + throws URISyntaxException, ExecutionException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .header("Accept", "application/json") + .uri(new URI(String.format("http://%s:%d/%s", host.host(), host.port(), endpointPath))) + .GET() + .build(); + + return httpClient + .sendAsync(request, HttpResponse.BodyHandlers.ofString()) + .thenApply(HttpResponse::body) + .get(); + } + + /** + * Check if given host is equals to the current stream host. + * + * @param compareHostInfo The host to compare + * @return True if the host is not the current host + */ + private boolean isNotCurrentHost(HostInfo compareHostInfo) { + return !kafkaStreamsInitializer.getHostInfo().host().equals(compareHostInfo.host()) + || kafkaStreamsInitializer.getHostInfo().port() != compareHostInfo.port(); + } + + /** + * Check if the streams are started. + */ + private void checkStreamsRunning() { + if (kafkaStreamsInitializer.isNotRunning()) { + KafkaStreams.State state = kafkaStreamsInitializer.getKafkaStreams().state(); + throw new StreamsNotStartedException(String.format(STREAMS_NOT_STARTED, state)); + } + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/KubernetesService.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/KubernetesService.java new file mode 100644 index 00000000..b5525fcb --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/KubernetesService.java @@ -0,0 +1,85 @@ +package com.michelin.kstreamplify.service; + +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import java.net.HttpURLConnection; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsConfig; +import org.apache.kafka.streams.processor.internals.StreamThread; + +/** + * Kafka Streams Kubernetes service. + */ +@Slf4j +@AllArgsConstructor +public final class KubernetesService { + /** + * The readiness path property name. + */ + public static final String READINESS_PATH_PROPERTY_NAME = "kubernetes.readiness.path"; + + /** + * The liveness path property name. + */ + public static final String LIVENESS_PATH_PROPERTY_NAME = "kubernetes.liveness.path"; + + /** + * The default readiness path. + */ + public static final String DEFAULT_READINESS_PATH = "ready"; + + /** + * The default liveness path. + */ + public static final String DEFAULT_LIVENESS_PATH = "liveness"; + + /** + * The Kafka Streams initializer. + */ + private final KafkaStreamsInitializer kafkaStreamsInitializer; + + /** + * Kubernetes' readiness probe. + * + * @return An HTTP response code + */ + public int getReadiness() { + if (kafkaStreamsInitializer.getKafkaStreams() != null) { + log.debug("Kafka Stream \"{}\" state: {}", + KafkaStreamsExecutionContext.getProperties().getProperty(StreamsConfig.APPLICATION_ID_CONFIG), + kafkaStreamsInitializer.getKafkaStreams().state()); + + if (kafkaStreamsInitializer.getKafkaStreams().state() == KafkaStreams.State.REBALANCING) { + long startingThreadCount = kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads() + .stream() + .filter(t -> StreamThread.State.STARTING.name() + .compareToIgnoreCase(t.threadState()) == 0 || StreamThread.State.CREATED.name() + .compareToIgnoreCase(t.threadState()) == 0) + .count(); + + if (startingThreadCount == kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads().size()) { + return HttpURLConnection.HTTP_NO_CONTENT; + } + } + + return kafkaStreamsInitializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING) + ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_UNAVAILABLE; + } + return HttpURLConnection.HTTP_BAD_REQUEST; + } + + /** + * Kubernetes' liveness probe. + * + * @return An HTTP response code + */ + public int getLiveness() { + if (kafkaStreamsInitializer.getKafkaStreams() != null) { + return kafkaStreamsInitializer.getKafkaStreams().state() != KafkaStreams.State.NOT_RUNNING + ? HttpURLConnection.HTTP_OK : HttpURLConnection.HTTP_INTERNAL_ERROR; + } + return HttpURLConnection.HTTP_NO_CONTENT; + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/TopologyService.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/TopologyService.java new file mode 100644 index 00000000..a6d6eb77 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/service/TopologyService.java @@ -0,0 +1,36 @@ +package com.michelin.kstreamplify.service; + +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; + +/** + * Kafka Streams topology service. + */ +@Slf4j +@AllArgsConstructor +public class TopologyService { + /** + * The topology path property. + */ + public static final String TOPOLOGY_PROPERTY = "topology.topology-path"; + + /** + * The default topology path. + */ + public static final String TOPOLOGY_DEFAULT_PATH = "topology"; + + /** + * The Kafka Streams initializer. + */ + private final KafkaStreamsInitializer kafkaStreamsInitializer; + + /** + * Get the Kafka Streams topology. + * + * @return The Kafka Streams topology + */ + public String getTopology() { + return kafkaStreamsInitializer.getTopology().describe().toString(); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java deleted file mode 100644 index 283a143e..00000000 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/services/ProbeService.java +++ /dev/null @@ -1,96 +0,0 @@ -package com.michelin.kstreamplify.services; - -import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; -import com.michelin.kstreamplify.model.RestServiceResponse; -import java.net.HttpURLConnection; -import lombok.AccessLevel; -import lombok.NoArgsConstructor; -import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.StreamsConfig; -import org.apache.kafka.streams.processor.internals.StreamThread; - -/** - * Kafka Streams probe service. - */ -@Slf4j -@NoArgsConstructor(access = AccessLevel.PRIVATE) -public final class ProbeService { - /** - * Kubernetes' readiness probe. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer - * @return An HTTP response based on the Kafka Streams state - */ - public static RestServiceResponse readinessProbe( - KafkaStreamsInitializer kafkaStreamsInitializer) { - if (kafkaStreamsInitializer.getKafkaStreams() != null) { - log.debug("Kafka Stream \"{}\" state: {}", - KafkaStreamsExecutionContext.getProperties() - .getProperty(StreamsConfig.APPLICATION_ID_CONFIG), - kafkaStreamsInitializer.getKafkaStreams().state()); - - if (kafkaStreamsInitializer.getKafkaStreams().state() - == KafkaStreams.State.REBALANCING) { - long startingThreadCount = - kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads() - .stream() - .filter(t -> StreamThread.State.STARTING.name() - .compareToIgnoreCase(t.threadState()) == 0 - || StreamThread.State.CREATED.name() - .compareToIgnoreCase(t.threadState()) == 0) - .count(); - - if (startingThreadCount - == kafkaStreamsInitializer.getKafkaStreams().metadataForLocalThreads().size()) { - return RestServiceResponse.builder() - .status(HttpURLConnection.HTTP_NO_CONTENT).build(); - } - } - - return kafkaStreamsInitializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING) - ? RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).build() : - RestServiceResponse.builder().status(HttpURLConnection.HTTP_UNAVAILABLE) - .build(); - } - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_BAD_REQUEST) - .build(); - } - - /** - * Kubernetes' liveness probe. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer - * @return An HTTP response based on the Kafka Streams state - */ - public static RestServiceResponse livenessProbe( - KafkaStreamsInitializer kafkaStreamsInitializer) { - if (kafkaStreamsInitializer.getKafkaStreams() != null) { - return kafkaStreamsInitializer.getKafkaStreams().state() - != KafkaStreams.State.NOT_RUNNING - ? RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK).build() - : - RestServiceResponse.builder().status(HttpURLConnection.HTTP_INTERNAL_ERROR) - .build(); - } - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT) - .build(); - } - - /** - * Get the Kafka Streams topology. - * - * @param kafkaStreamsInitializer The Kafka Streams initializer - * @return The Kafka Streams topology - */ - public static RestServiceResponse exposeTopology( - KafkaStreamsInitializer kafkaStreamsInitializer) { - if (kafkaStreamsInitializer.getTopology() != null) { - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_OK) - .body(kafkaStreamsInitializer.getTopology().describe().toString()).build(); - } - return RestServiceResponse.builder().status(HttpURLConnection.HTTP_NO_CONTENT) - .build(); - } -} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/RocksDbConfig.java similarity index 99% rename from kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java rename to kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/RocksDbConfig.java index 85ce2884..37169d8a 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/properties/RocksDbConfig.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/RocksDbConfig.java @@ -1,4 +1,4 @@ -package com.michelin.kstreamplify.properties; +package com.michelin.kstreamplify.store; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; import java.util.Map; diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StateStoreRecord.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StateStoreRecord.java new file mode 100644 index 00000000..1f5e32a2 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StateStoreRecord.java @@ -0,0 +1,44 @@ +package com.michelin.kstreamplify.store; + +import static com.michelin.kstreamplify.converter.AvroToJsonConverter.convertObject; +import static com.michelin.kstreamplify.converter.JsonToAvroConverter.jsonToObject; + +import com.fasterxml.jackson.annotation.JsonInclude; +import lombok.Getter; +import lombok.NoArgsConstructor; + +/** + * The state store record class. + */ +@Getter +@NoArgsConstructor +@JsonInclude(JsonInclude.Include.NON_NULL) +public class StateStoreRecord { + private String key; + private Object value; + private Long timestamp; + + /** + * Constructor. + * + * @param key The key + * @param value The value + */ + public StateStoreRecord(String key, Object value) { + this.key = key; + // Convert the value to JSON then to object to avoid issue between Avro and Jackson + this.value = jsonToObject(convertObject(value)); + } + + /** + * Constructor. + * + * @param key The key + * @param value The value + * @param timestamp The timestamp + */ + public StateStoreRecord(String key, Object value, Long timestamp) { + this(key, value); + this.timestamp = timestamp; + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StreamsMetadata.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StreamsMetadata.java new file mode 100644 index 00000000..73189b57 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/StreamsMetadata.java @@ -0,0 +1,43 @@ +package com.michelin.kstreamplify.store; + +import java.util.Set; +import java.util.stream.Collectors; +import lombok.Getter; +import lombok.NoArgsConstructor; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.streams.state.HostInfo; + +/** + * State store metadata. + */ +@Getter +@NoArgsConstructor +public class StreamsMetadata { + private Set stateStoreNames; + private StreamsHostInfo hostInfo; + private Set topicPartitions; + + /** + * Constructor. + * + * @param stateStoreNames The state store names + * @param host The host + * @param topicPartitions The topic partitions + */ + public StreamsMetadata(Set stateStoreNames, HostInfo host, Set topicPartitions) { + this.stateStoreNames = stateStoreNames; + this.hostInfo = new StreamsHostInfo(host.host(), host.port()); + this.topicPartitions = topicPartitions + .stream() + .map(topicPartition -> topicPartition.topic() + "-" + topicPartition.partition()) + .collect(Collectors.toSet()); + } + + /** + * State store host information. + * + * @param host The host + * @param port The port + */ + public record StreamsHostInfo(String host, Integer port) {} +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/WindowStateStoreUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/WindowStateStoreUtils.java new file mode 100644 index 00000000..52875ac5 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/store/WindowStateStoreUtils.java @@ -0,0 +1,47 @@ +package com.michelin.kstreamplify.store; + +import java.time.Duration; +import java.time.Instant; +import lombok.NoArgsConstructor; +import org.apache.kafka.streams.state.WindowStore; + +/** + * The window state store utils. + */ +@NoArgsConstructor(access = lombok.AccessLevel.PRIVATE) +public final class WindowStateStoreUtils { + /** + * Put the key/value into the state store. + * + * @param stateStore The stateStore + * @param key The key + * @param value The value + * @param The template for the key + * @param The template for the value + */ + public static void put(WindowStore stateStore, K key, V value) { + stateStore.put(key, value, Instant.now().toEpochMilli()); + } + + /** + * Get the value by the key from the state store. + * + * @param stateStore The stateStore + * @param key The key + * @param retentionDays The delay of retention + * @param The template for the key + * @param The template for the value + * @return The last value inserted in the state store for the key + */ + public static V get(WindowStore stateStore, K key, int retentionDays) { + var resultIterator = + stateStore.backwardFetch(key, Instant.now().minus(Duration.ofDays(retentionDays)), + Instant.now()); + + if (resultIterator != null && resultIterator.hasNext()) { + return resultIterator.next().value; + } + + return null; + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/topic/TopicUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/topic/TopicUtils.java new file mode 100644 index 00000000..0ae22e80 --- /dev/null +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/topic/TopicUtils.java @@ -0,0 +1,66 @@ +package com.michelin.kstreamplify.topic; + +import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR; + +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import java.util.Properties; +import lombok.NoArgsConstructor; + +/** + * The topic utils class. + */ +@NoArgsConstructor(access = lombok.AccessLevel.PRIVATE) +public final class TopicUtils { + /** + * The topic property name. + */ + public static final String TOPIC_PROPERTY_NAME = "topic"; + + /** + * The prefix property name. + */ + public static final String PREFIX_PROPERTY_NAME = "prefix"; + + /** + * The remap property name. + */ + public static final String REMAP_PROPERTY_NAME = "remap"; + + /** + *

Prefix the given topic name with the configured prefix and applies the dynamic remap.

+ *

Prefix is retrieved at runtime from kafka.properties.prefix.[prefixPropertyKey]

+ *
{@code
+     * kafka:
+     *   properties:
+     *     prefix:
+     *       self: "myNamespacePrefix."
+     * }
+ *

This allows interactions with multiple topics from different owners/namespaces

+ *

If not provided, prefixing will not occur.

+ *
+ *

Dynamic remap is retrieved from the configuration like so:

+ *
{@code
+     * kafka:
+     *   properties:
+     *     topic:
+     *       remap:
+     *          myInitialTopicName: "myRemappedTopicName"
+     * }
+ *

It can be applied to both input and output topics

+ * + * @param topicName The topicName that needs to be prefixed and remapped + * @param prefixPropertyKey The prefixPropertyKey matching the configuration file + * @return The prefixed and/or remapped topic. + */ + public static String remapAndPrefix(String topicName, String prefixPropertyKey) { + Properties properties = KafkaStreamsExecutionContext.getProperties(); + + // Check for dynamic remap in properties + String resultTopicName = properties.getProperty(TOPIC_PROPERTY_NAME + + PROPERTY_SEPARATOR + REMAP_PROPERTY_NAME + PROPERTY_SEPARATOR + topicName, topicName); + + // Check if topic prefix property exists + String prefix = properties.getProperty(PREFIX_PROPERTY_NAME + PROPERTY_SEPARATOR + prefixPropertyKey, ""); + return prefix.concat(resultTopicName); + } +} diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java index d7086849..31b82de0 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/SerdesUtils.java @@ -6,7 +6,10 @@ /** * The Serdes utils class. + * + * @deprecated Use {@link com.michelin.kstreamplify.serde.SerdesUtils}. */ +@Deprecated(forRemoval = true, since = "1.1.0") public final class SerdesUtils { private SerdesUtils() { } @@ -44,4 +47,4 @@ private static SpecificAvroSerde getSerdes( serde.configure(KafkaStreamsExecutionContext.getSerdesConfig(), isSerdeForKey); return serde; } -} +} \ No newline at end of file diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java index 3e56e3a7..03e566da 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicUtils.java @@ -1,15 +1,18 @@ package com.michelin.kstreamplify.utils; -import static com.michelin.kstreamplify.constants.PropertyConstants.PREFIX_PROPERTY_NAME; -import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; -import static com.michelin.kstreamplify.constants.PropertyConstants.REMAP_PROPERTY_NAME; -import static com.michelin.kstreamplify.constants.PropertyConstants.TOPIC_PROPERTY_NAME; +import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR; +import static com.michelin.kstreamplify.topic.TopicUtils.PREFIX_PROPERTY_NAME; +import static com.michelin.kstreamplify.topic.TopicUtils.REMAP_PROPERTY_NAME; +import static com.michelin.kstreamplify.topic.TopicUtils.TOPIC_PROPERTY_NAME; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; /** * The topic utils class. + * + * @deprecated Use {@link com.michelin.kstreamplify.topic.TopicUtils}. */ +@Deprecated(forRemoval = true, since = "1.1.0") public final class TopicUtils { private TopicUtils() { } diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java index 2adce2d3..cbf8d430 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/TopicWithSerde.java @@ -1,8 +1,8 @@ package com.michelin.kstreamplify.utils; +import static com.michelin.kstreamplify.serde.TopicWithSerde.SELF; -import static com.michelin.kstreamplify.constants.PropertyConstants.SELF; - +import com.michelin.kstreamplify.topic.TopicUtils; import lombok.AccessLevel; import lombok.AllArgsConstructor; import lombok.Getter; @@ -22,7 +22,9 @@ * * @param The model used as the key avro of the topic. Can be String (Recommended) * @param The model used as the value avro of the topic. + * @deprecated Use {@link com.michelin.kstreamplify.serde.TopicWithSerde}. */ +@Deprecated(forRemoval = true, since = "1.1.0") @AllArgsConstructor(access = AccessLevel.PUBLIC) public class TopicWithSerde { /** @@ -98,7 +100,7 @@ public String getUnPrefixedName() { */ @Override public String toString() { - return TopicUtils.prefixAndDynamicRemap(topicName, prefixPropertyKey); + return TopicUtils.remapAndPrefix(topicName, prefixPropertyKey); } /** @@ -146,4 +148,4 @@ public GlobalKTable globalTable(StreamsBuilder sb, String storeName) { public void produce(KStream stream) { stream.to(this.toString(), Produced.with(keySerde, valueSerde)); } -} +} \ No newline at end of file diff --git a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java index 3de9d109..170c81f8 100644 --- a/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java +++ b/kstreamplify-core/src/main/java/com/michelin/kstreamplify/utils/WindowStateStoreUtils.java @@ -6,8 +6,11 @@ /** * The window state store utils. + * + * @deprecated Use {@link com.michelin.kstreamplify.store.WindowStateStoreUtils}. */ -public final class WindowStateStoreUtils { +@Deprecated(forRemoval = true, since = "1.1.0") +public class WindowStateStoreUtils { private WindowStateStoreUtils() { } diff --git a/kstreamplify-core/src/test/avro/kafka-person.avsc b/kstreamplify-core/src/test/avro/kafka-person.avsc new file mode 100644 index 00000000..72ba8866 --- /dev/null +++ b/kstreamplify-core/src/test/avro/kafka-person.avsc @@ -0,0 +1,37 @@ +{ + "namespace": "com.michelin.kstreamplify.avro", + "type": "record", + "name": "KafkaPersonStub", + "fields": [ + { + "name": "id", + "type": ["null", "long"], + "default": null, + "doc": "Person id" + }, + { + "name": "firstName", + "type": ["null", "string"], + "default": null, + "doc": "Person first name" + }, + { + "name": "lastName", + "type": ["null", "string"], + "default": null, + "doc": "Person last name" + }, + { + "name": "birthDate", + "type": [ + "null", + { + "type": "long", + "logicalType": "timestamp-millis" + } + ], + "default": null, + "doc": "Person date of birth" + } + ] +} \ No newline at end of file diff --git a/kstreamplify-core/src/test/avro/kafka-test.avsc b/kstreamplify-core/src/test/avro/kafka-record.avsc similarity index 97% rename from kstreamplify-core/src/test/avro/kafka-test.avsc rename to kstreamplify-core/src/test/avro/kafka-record.avsc index adc2488f..7423b08a 100644 --- a/kstreamplify-core/src/test/avro/kafka-test.avsc +++ b/kstreamplify-core/src/test/avro/kafka-record.avsc @@ -1,7 +1,7 @@ { "namespace": "com.michelin.kstreamplify.avro", "type": "record", - "name": "KafkaTestAvro", + "name": "KafkaRecordStub", "fields": [ { "name": "stringField", @@ -165,7 +165,7 @@ "type": "array", "items": { "type": "record", - "name": "SubKafkaTestAvro", + "name": "SubKafkaRecordStub", "fields": [ { "name": "subField", @@ -181,7 +181,7 @@ "type": "array", "items": { "type": "record", - "name": "SubSubKafkaTestAvro", + "name": "SubSubKafkaRecordStub", "fields": [ { "name": "subSubDateField", diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java index 0ba3bad5..813ab029 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/AvroToJsonConverterTest.java @@ -1,14 +1,13 @@ package com.michelin.kstreamplify.converter; import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNull; -import com.google.gson.Gson; -import com.google.gson.JsonObject; import com.michelin.kstreamplify.avro.EnumField; -import com.michelin.kstreamplify.avro.KafkaTestAvro; +import com.michelin.kstreamplify.avro.KafkaRecordStub; import com.michelin.kstreamplify.avro.MapElement; -import com.michelin.kstreamplify.avro.SubKafkaTestAvro; -import com.michelin.kstreamplify.avro.SubSubKafkaTestAvro; +import com.michelin.kstreamplify.avro.SubKafkaRecordStub; +import com.michelin.kstreamplify.avro.SubSubKafkaRecordStub; import java.math.BigDecimal; import java.time.Instant; import java.time.LocalDate; @@ -23,41 +22,78 @@ @Slf4j class AvroToJsonConverterTest { @Test - void shouldConvertAvroToJson() { - KafkaTestAvro avro = getKafkaTest(); - - String jsonString = AvroToJsonConverter.convertRecord(avro); - - var gson = new Gson(); - var jsonObject = gson.fromJson(jsonString, JsonObject.class); - - assertEquals("false", jsonObject.get("booleanField").getAsString()); - assertEquals("2024-03-27", jsonObject.get("dateField").getAsString()); - assertEquals("10", jsonObject.get("decimalField").getAsString()); - assertEquals("test", jsonObject.get("stringField").getAsString()); - assertEquals("20:51:01.815", jsonObject.get("timeMillisField").getAsString()); - assertEquals("20:51:01.815832", jsonObject.get("timeMicrosField").getAsString()); - assertEquals("2024-03-27T20:51:01.815832", jsonObject.get("localTimestampMillisField").getAsString()); - assertEquals("2024-03-27T20:51:01.815832123", jsonObject.get("localTimestampMicrosField").getAsString()); - assertEquals("2024-03-27T19:51:01.815Z", jsonObject.get("timestampMillisField").getAsString()); - assertEquals("2024-03-27T19:51:01.815832Z", jsonObject.get("timestampMicrosField").getAsString()); - assertEquals(EnumField.b.toString(), jsonObject.get("enumField").getAsString()); - assertEquals("1970-01-01T00:00:00.002Z", - jsonObject.getAsJsonArray("split").get(0).getAsJsonObject().getAsJsonArray("subSplit") - .get(0).getAsJsonObject().get("subSubDateField").getAsString()); - assertEquals("1970-01-01T00:00:00.003Z", - jsonObject.getAsJsonObject("members").getAsJsonObject("key1").get("mapDateField") - .getAsString()); - assertEquals("val1", jsonObject.getAsJsonObject("membersString").get("key1").getAsString()); - assertEquals("val1", jsonObject.getAsJsonArray("listString").get(0).getAsString()); - assertEquals("val2", jsonObject.getAsJsonArray("listString").get(1).getAsString()); + void shouldConvertObjectNull() { + assertNull(AvroToJsonConverter.convertObject((Object) null)); + } - log.info(jsonString); + @Test + void shouldConvertObject() { + String json = AvroToJsonConverter.convertObject(new PersonStub("John", "Doe")); + assertEquals(""" + { + "firstName": "John", + "lastName": "Doe" + }""", json); } + @Test + void shouldConvertGenericRecord() { + String json = AvroToJsonConverter.convertRecord(buildKafkaRecordStub()); + assertEquals(""" + { + "localTimestampMillisField": "2024-03-27T20:51:01.815832", + "membersString": { + "key1": "val1" + }, + "decimalField": 10, + "timeMillisField": "20:51:01.815", + "booleanField": false, + "dateField": "2024-03-27", + "timestampMillisField": "2024-03-27T19:51:01.815Z", + "intField": 5, + "localTimestampMicrosField": "2024-03-27T20:51:01.815832123", + "listString": [ + "val1", + "val2" + ], + "timestampMicrosField": "2024-03-27T19:51:01.815832Z", + "uuidField": "dc306935-d720-427f-9ecd-ff87c0b15189", + "split": [ + { + "subSplit": [ + { + "subSubIntField": 8, + "subSubDateField": "1970-01-01T00:00:00.002Z", + "subSubField": "subSubTest" + } + ], + "subField": "subTest" + } + ], + "members": { + "key1": { + "mapDateField": "1970-01-01T00:00:00.003Z", + "mapQuantityField": 1 + } + }, + "timeMicrosField": "20:51:01.815832", + "stringField": "test", + "enumField": "b" + }""", json); + } - private KafkaTestAvro getKafkaTest() { - return KafkaTestAvro.newBuilder() + @Test + void shouldConvertListObject() { + String json = AvroToJsonConverter.convertObject(List.of(new PersonStub("John", "Doe"))); + assertEquals(""" + [{ + "firstName": "John", + "lastName": "Doe" + }]""", json); + } + + private KafkaRecordStub buildKafkaRecordStub() { + return KafkaRecordStub.newBuilder() .setDecimalField(BigDecimal.TEN) .setIntField(5) .setStringField("test") @@ -78,10 +114,10 @@ private KafkaTestAvro getKafkaTest() { .setMembersString(Map.of("key1", "val1")) .setListString(List.of("val1", "val2")) .setSplit(List.of( - SubKafkaTestAvro.newBuilder() + SubKafkaRecordStub.newBuilder() .setSubField("subTest") .setSubSplit(List.of( - SubSubKafkaTestAvro.newBuilder() + SubSubKafkaRecordStub.newBuilder() .setSubSubField("subSubTest") .setSubSubDateField(Instant.ofEpochMilli(2)) .setSubSubIntField(8) @@ -89,4 +125,6 @@ private KafkaTestAvro getKafkaTest() { .build())) .build(); } + + record PersonStub(String firstName, String lastName) { } } diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/JsonToAvroConverterTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/JsonToAvroConverterTest.java index d3dba2b7..78b95cd9 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/JsonToAvroConverterTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/converter/JsonToAvroConverterTest.java @@ -2,9 +2,11 @@ import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; +import static org.junit.jupiter.api.Assertions.assertNull; import com.michelin.kstreamplify.avro.EnumField; -import com.michelin.kstreamplify.avro.KafkaTestAvro; +import com.michelin.kstreamplify.avro.KafkaRecordStub; +import java.util.Map; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.Test; @@ -33,9 +35,21 @@ class JsonToAvroConverterTest { + "\"listString\":[\"val1\",\"val2\"]" + "}"; + @Test + void shouldConvertJsonToObject() { + assertEquals(Map.of("firstName", "John", "lastName", "Doe"), + JsonToAvroConverter.jsonToObject("{\"firstName\":\"John\",\"lastName\":\"Doe\"}")); + } + + @Test + void shouldConvertJsonToObjectNull() { + assertNull(JsonToAvroConverter.jsonToObject(null)); + } + @Test void shouldConvertJsonToAvro() { - KafkaTestAvro kafkaTest = (KafkaTestAvro) JsonToAvroConverter.jsonToAvro(JSON, KafkaTestAvro.getClassSchema()); + KafkaRecordStub kafkaTest = (KafkaRecordStub) JsonToAvroConverter + .jsonToAvro(JSON, KafkaRecordStub.getClassSchema()); assertEquals("val1", kafkaTest.getMembersString().get("key1")); assertEquals(8, kafkaTest.getSplit().get(0).getSubSplit().get(0).getSubSubIntField()); assertEquals("subSubTest", kafkaTest.getSplit().get(0).getSubSplit().get(0).getSubSubField()); diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessorTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessorTest.java index aa26546f..2902ee7e 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessorTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyProcessorTest.java @@ -51,10 +51,7 @@ void setUp() { @Test void shouldProcessNewRecord() { - - // Create a KafkaError final KafkaError kafkaError = new KafkaError(); - // Create a test record final Record record = new Record<>("key", kafkaError, 0); processor.process(record); @@ -66,10 +63,7 @@ void shouldProcessNewRecord() { @Test void shouldProcessDuplicate() { - - // Create a KafkaError final KafkaError kafkaError = new KafkaError(); - // Create a test record final Record record = new Record<>("key", kafkaError, 0L); // Simulate hasNext() returning true once and then false @@ -90,22 +84,17 @@ void shouldProcessDuplicate() { @Test void shouldThrowException() { - - // Create a KafkaError - final KafkaError kafkaError = new KafkaError(); - // Create a test record Record record = new Record<>("key", new KafkaError(), 0L); when(windowStore.backwardFetch(any(), any(), any())).thenReturn(null) - .thenThrow(new RuntimeException("Exception...")); + .thenThrow(new RuntimeException("Exception...")); doThrow(new RuntimeException("Exception...")).when(windowStore).put(anyString(), any(), anyLong()); - // Call the process method processor.process(record); verify(context).forward(argThat(arg -> arg.value().getError().getContextMessage() - .equals("Couldn't figure out what to do with the current payload: " - + "An unlikely error occurred during deduplication transform"))); + .equals("Could not figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform"))); } } diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessorTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessorTest.java index 33f79694..be5de747 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessorTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupKeyValueProcessorTest.java @@ -51,11 +51,7 @@ void setUp() { @Test void shouldProcessNewRecord() { - - // Create a KafkaError final KafkaError kafkaError = new KafkaError(); - - // Create a test record final Record record = new Record<>("key", kafkaError, 0); processor.process(record); @@ -67,11 +63,7 @@ void shouldProcessNewRecord() { @Test void shouldProcessDuplicate() { - - // Create a KafkaError final KafkaError kafkaError = new KafkaError(); - - // Create a test record final Record record = new Record<>("key", kafkaError, 0L); // Simulate hasNext() returning true once and then false @@ -92,22 +84,17 @@ void shouldProcessDuplicate() { @Test void shouldThrowException() { - - // Create a KafkaError - final KafkaError kafkaError = new KafkaError(); - - // Create a test record - final Record record = new Record<>("key", kafkaError, 0L); + final Record message = new Record<>("key", new KafkaError(), 0L); when(windowStore.backwardFetch(any(), any(), any())).thenReturn(null) - .thenThrow(new RuntimeException("Exception...")); + .thenThrow(new RuntimeException("Exception...")); doThrow(new RuntimeException("Exception...")).when(windowStore).put(anyString(), any(), anyLong()); // Call the process method - processor.process(record); + processor.process(message); verify(context).forward(argThat(arg -> arg.value().getError().getContextMessage() - .equals("Couldn't figure out what to do with the current payload: " - + "An unlikely error occurred during deduplication transform"))); + .equals("Could not figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform"))); } } diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessorTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessorTest.java index 4da364e3..01d98bbb 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessorTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/deduplication/DedupWithPredicateProcessorTest.java @@ -26,7 +26,6 @@ @ExtendWith(MockitoExtension.class) class DedupWithPredicateProcessorTest { - private DedupWithPredicateProcessor processor; @Mock @@ -41,7 +40,7 @@ class DedupWithPredicateProcessorTest { @BeforeEach void setUp() { // Create an instance of DedupWithPredicateProcessor for testing - processor = new DedupWithPredicateProcessor<>("testStore", Duration.ofHours(1), TestKeyExtractor::extract); + processor = new DedupWithPredicateProcessor<>("testStore", Duration.ofHours(1), KeyExtractorStub::extract); // Stub the context.getStateStore method to return the mock store when(context.getStateStore("testStore")).thenReturn(windowStore); @@ -51,11 +50,7 @@ void setUp() { @Test void shouldProcessNewRecord() { - - // Create a KafkaError final KafkaError kafkaError = new KafkaError(); - - // Create a test record final Record record = new Record<>("key", kafkaError, 0); processor.process(record); @@ -67,10 +62,7 @@ void shouldProcessNewRecord() { @Test void shouldProcessDuplicate() { - - // Create a KafkaError final KafkaError kafkaError = new KafkaError(); - // Create a test record final Record record = new Record<>("key", kafkaError, 0L); // Simulate hasNext() returning true once and then false @@ -91,27 +83,24 @@ void shouldProcessDuplicate() { @Test void shouldThrowException() { - - // Create a KafkaError - final KafkaError kafkaError = new KafkaError(); - // Create a test record Record record = new Record<>("key", new KafkaError(), 0L); when(windowStore.backwardFetch(any(), any(), any())).thenReturn(null) - .thenThrow(new RuntimeException("Exception...")); + .thenThrow(new RuntimeException("Exception...")); doThrow(new RuntimeException("Exception...")).when(windowStore).put(anyString(), any(), anyLong()); // Call the process method processor.process(record); verify(context).forward(argThat(arg -> arg.value().getError().getContextMessage() - .equals("Couldn't figure out what to do with the current payload: " - + "An unlikely error occurred during deduplication transform"))); + .equals("Could not figure out what to do with the current payload: " + + "An unlikely error occurred during deduplication transform"))); } - public static class TestKeyExtractor { + static class KeyExtractorStub { public static String extract(V v) { return ""; } } } + diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandlerTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandlerTest.java index 2d26fba0..7b56229c 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandlerTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqDeserializationExceptionHandlerTest.java @@ -62,7 +62,7 @@ void shouldReturnFailIfNoDlq() { @Test void shouldReturnFailOnExceptionDuringHandle() { handler = new DlqDeserializationExceptionHandler(producer); - KafkaStreamsExecutionContext.setDlqTopicName("DlqTopic"); + KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC"); DeserializationExceptionHandler.DeserializationHandlerResponse response = handler.handle(processorContext, record, new KafkaException("Exception...")); @@ -72,7 +72,7 @@ void shouldReturnFailOnExceptionDuringHandle() { @Test void shouldReturnContinueOnKafkaException() { handler = new DlqDeserializationExceptionHandler(producer); - KafkaStreamsExecutionContext.setDlqTopicName("DlqTopic"); + KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC"); when(record.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8)); when(record.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8)); diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandlerTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandlerTest.java index 113c390d..bbaf14b5 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandlerTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/DlqProductionExceptionHandlerTest.java @@ -57,7 +57,7 @@ void shouldReturnFailIfNoDlq() { @Test void shouldReturnContinueOnExceptionDuringHandle() { handler = new DlqProductionExceptionHandler(producer); - KafkaStreamsExecutionContext.setDlqTopicName("DlqTopic"); + KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC"); ProductionExceptionHandler.ProductionExceptionHandlerResponse response = handler.handle(record, new KafkaException("Exception...")); @@ -67,7 +67,7 @@ void shouldReturnContinueOnExceptionDuringHandle() { @Test void shouldReturnContinueOnKafkaException() { handler = new DlqProductionExceptionHandler(producer); - KafkaStreamsExecutionContext.setDlqTopicName("DlqTopic"); + KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC"); when(record.key()).thenReturn("key".getBytes(StandardCharsets.UTF_8)); when(record.value()).thenReturn("value".getBytes(StandardCharsets.UTF_8)); @@ -82,7 +82,7 @@ void shouldReturnContinueOnKafkaException() { @Test void shouldReturnFailOnRetriableException() { handler = new DlqProductionExceptionHandler(producer); - KafkaStreamsExecutionContext.setDlqTopicName("DlqTopic"); + KafkaStreamsExecutionContext.setDlqTopicName("DLQ_TOPIC"); ProductionExceptionHandler.ProductionExceptionHandlerResponse response = handler.handle(record, new RetriableCommitFailedException("Exception...")); diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingResultTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingResultTest.java index 719e2909..9e5ccfdf 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingResultTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/error/ProcessingResultTest.java @@ -6,7 +6,6 @@ import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; -import com.michelin.kstreamplify.error.ProcessingResult; import org.apache.kafka.streams.processor.api.Record; import org.junit.jupiter.api.Test; @@ -80,11 +79,9 @@ void shouldCreateWrappedFailedProcessingResult() { void shouldProcessingResultBeValid() { ProcessingResult validResult = ProcessingResult.success("Value"); ProcessingResult invalidResult1 = ProcessingResult.fail(new Exception(), 42); - ProcessingResult invalidResult2 = new ProcessingResult<>(null); assertTrue(validResult.isValid()); assertFalse(invalidResult1.isValid()); - assertFalse(invalidResult2.isValid()); } } diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializerTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializerTest.java index b1aee7c4..46038f00 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializerTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsInitializerTest.java @@ -1,8 +1,8 @@ package com.michelin.kstreamplify.initializer; -import static com.michelin.kstreamplify.constants.InitializerConstants.SERVER_PORT_PROPERTY; -import static com.michelin.kstreamplify.constants.PropertyConstants.KAFKA_PROPERTIES_PREFIX; -import static com.michelin.kstreamplify.constants.PropertyConstants.PROPERTY_SEPARATOR; +import static com.michelin.kstreamplify.initializer.KafkaStreamsInitializer.SERVER_PORT_PROPERTY_NAME; +import static com.michelin.kstreamplify.property.PropertiesUtils.KAFKA_PROPERTIES_PREFIX; +import static com.michelin.kstreamplify.property.PropertiesUtils.PROPERTY_SEPARATOR; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -10,7 +10,7 @@ import static org.mockito.Mockito.mockStatic; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.properties.PropertiesUtils; +import com.michelin.kstreamplify.property.PropertiesUtils; import java.util.Properties; import org.apache.kafka.streams.StreamsConfig; import org.apache.kafka.streams.errors.StreamsUncaughtExceptionHandler; @@ -28,7 +28,7 @@ class KafkaStreamsInitializerTest { void shouldInitProperties() { try (MockedStatic propertiesUtilsMockedStatic = mockStatic(PropertiesUtils.class)) { Properties properties = new Properties(); - properties.put(SERVER_PORT_PROPERTY, 8080); + properties.put(SERVER_PORT_PROPERTY_NAME, 8080); properties.put(KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + StreamsConfig.APPLICATION_ID_CONFIG, "appId"); properties.put(KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + "prefix.self", "abc."); @@ -52,7 +52,7 @@ void shouldInitProperties() { void shouldShutdownClientOnUncaughtException() { try (MockedStatic propertiesUtilsMockedStatic = mockStatic(PropertiesUtils.class)) { Properties properties = new Properties(); - properties.put(SERVER_PORT_PROPERTY, 8080); + properties.put(SERVER_PORT_PROPERTY_NAME, 8080); properties.put(KAFKA_PROPERTIES_PREFIX + PROPERTY_SEPARATOR + StreamsConfig.APPLICATION_ID_CONFIG, "appId"); propertiesUtilsMockedStatic.when(PropertiesUtils::loadProperties) diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarterTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarterTest.java index 1fda3c76..4e425298 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarterTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/initializer/KafkaStreamsStarterTest.java @@ -10,8 +10,8 @@ import com.michelin.kstreamplify.deduplication.DeduplicationUtils; import com.michelin.kstreamplify.error.ProcessingResult; import com.michelin.kstreamplify.error.TopologyErrorHandler; -import com.michelin.kstreamplify.utils.SerdesUtils; -import com.michelin.kstreamplify.utils.TopicWithSerde; +import com.michelin.kstreamplify.serde.SerdesUtils; +import com.michelin.kstreamplify.serde.TopicWithSerde; import io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig; import java.io.IOException; import java.time.Duration; @@ -27,7 +27,6 @@ import org.junit.jupiter.api.Test; class KafkaStreamsStarterTest { - @Test void shouldInstantiateKafkaStreamsStarter() { KafkaStreamsExecutionContext.registerProperties(new Properties()); @@ -35,11 +34,11 @@ void shouldInstantiateKafkaStreamsStarter() { Map.of(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://")); StreamsBuilder builder = new StreamsBuilder(); - KafkaStreamsStarterImpl starter = new KafkaStreamsStarterImpl(); + KafkaStreamsStarterStub starter = new KafkaStreamsStarterStub(); starter.topology(builder); assertNotNull(builder.build().describe()); - assertEquals("dlqTopicUnitTests", starter.dlqTopic()); + assertEquals("DLQ_TOPIC", starter.dlqTopic()); starter.onStart(null); assertTrue(starter.isStarted()); @@ -49,17 +48,17 @@ void shouldInstantiateKafkaStreamsStarter() { void shouldStartWithCustomUncaughtExceptionHandler() { KafkaStreamsExecutionContext.registerProperties(new Properties()); KafkaStreamsExecutionContext.setSerdesConfig( - Map.of(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://")); + Map.of(AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG, "mock://")); StreamsBuilder builder = new StreamsBuilder(); - KafkaStreamsStarterImpl starter = new KafkaStreamsStarterImpl(); + KafkaStreamsStarterStub starter = new KafkaStreamsStarterStub(); starter.topology(builder); assertNotNull(builder.build().describe()); - assertEquals("dlqTopicUnitTests", starter.dlqTopic()); + assertEquals("DLQ_TOPIC", starter.dlqTopic()); assertEquals(starter.uncaughtExceptionHandler() - .handle(new Exception("Register a custom uncaught exception handler test.")), - REPLACE_THREAD); + .handle(new Exception("Register a custom uncaught exception handler test.")), + REPLACE_THREAD); starter.onStart(null); assertTrue(starter.isStarted()); @@ -69,32 +68,29 @@ void shouldStartWithCustomUncaughtExceptionHandler() { * Kafka Streams Starter implementation used for unit tests purpose. */ @Getter - static class KafkaStreamsStarterImpl extends KafkaStreamsStarter { + static class KafkaStreamsStarterStub extends KafkaStreamsStarter { private boolean started; @Override public void topology(StreamsBuilder streamsBuilder) { - var streams = TopicWithSerdesTestHelper.inputTopicWithSerdes().stream(streamsBuilder); + var streams = TopicWithSerdeStub.inputTopicWithSerde().stream(streamsBuilder); DeduplicationUtils.deduplicateKeys(streamsBuilder, streams, - "deduplicateKeysStoreName", "deduplicateKeysRepartitionName", - Duration.ZERO); + "deduplicateKeysStoreName", "deduplicateKeysRepartitionName", Duration.ZERO); DeduplicationUtils.deduplicateKeyValues(streamsBuilder, streams, - "deduplicateKeyValuesStoreName", - "deduplicateKeyValuesRepartitionName", Duration.ZERO); + "deduplicateKeyValuesStoreName", "deduplicateKeyValuesRepartitionName", Duration.ZERO); DeduplicationUtils.deduplicateWithPredicate(streamsBuilder, streams, Duration.ofMillis(1), null); - var enrichedStreams = streams.mapValues(KafkaStreamsStarterImpl::enrichValue); - var enrichedStreams2 = streams.mapValues(KafkaStreamsStarterImpl::enrichValue2); + var enrichedStreams = streams.mapValues(KafkaStreamsStarterStub::enrichValue); + var enrichedStreams2 = streams.mapValues(KafkaStreamsStarterStub::enrichValue2); var processingResults = TopologyErrorHandler.catchErrors(enrichedStreams); TopologyErrorHandler.catchErrors(enrichedStreams2, true); - TopicWithSerdesTestHelper.outputTopicWithSerdes().produce(processingResults); - + TopicWithSerdeStub.outputTopicWithSerde().produce(processingResults); } @Override public String dlqTopic() { - return "dlqTopicUnitTests"; + return "DLQ_TOPIC"; } @Override @@ -104,13 +100,12 @@ public void onStart(KafkaStreams kafkaStreams) { @Override public StreamsUncaughtExceptionHandler uncaughtExceptionHandler() { - return new CustomUncaughtExceptionHandler(); + return new UncaughtExceptionHandlerStub(); } private static ProcessingResult enrichValue(KafkaError input) { if (input != null) { - String output = "output field"; - return ProcessingResult.success(output); + return ProcessingResult.success("output field"); } else { return ProcessingResult.fail(new IOException("an exception occurred"), "output error"); } @@ -118,8 +113,7 @@ private static ProcessingResult enrichValue(KafkaError input) { private static ProcessingResult enrichValue2(KafkaError input) { if (input != null) { - String output = "output field 2"; - return ProcessingResult.success(output); + return ProcessingResult.success("output field 2"); } else { return ProcessingResult.fail(new IOException("an exception occurred"), "output error 2"); } @@ -132,25 +126,24 @@ private static ProcessingResult enrichValue2(KafkaError input) { * @param The key type * @param The value type */ - public static class TopicWithSerdesTestHelper extends TopicWithSerde { - private TopicWithSerdesTestHelper(String name, String appName, Serde keySerde, Serde valueSerde) { + static class TopicWithSerdeStub extends TopicWithSerde { + private TopicWithSerdeStub(String name, String appName, Serde keySerde, Serde valueSerde) { super(name, appName, keySerde, valueSerde); } - public static TopicWithSerdesTestHelper outputTopicWithSerdes() { - return new TopicWithSerdesTestHelper<>("OUTPUT_TOPIC", "APP_NAME", + public static TopicWithSerdeStub outputTopicWithSerde() { + return new TopicWithSerdeStub<>("OUTPUT_TOPIC", "APP_NAME", Serdes.String(), Serdes.String()); } - public static TopicWithSerdesTestHelper inputTopicWithSerdes() { - return new TopicWithSerdesTestHelper<>("INPUT_TOPIC", "APP_NAME", - Serdes.String(), SerdesUtils.getSerdesForValue()); + public static TopicWithSerdeStub inputTopicWithSerde() { + return new TopicWithSerdeStub<>("INPUT_TOPIC", "APP_NAME", + Serdes.String(), SerdesUtils.getValueSerdes()); } } - @Slf4j - static class CustomUncaughtExceptionHandler implements StreamsUncaughtExceptionHandler { + static class UncaughtExceptionHandlerStub implements StreamsUncaughtExceptionHandler { @Override public StreamThreadExceptionResponse handle(final Throwable t) { log.error("!Custom uncaught exception handler test!"); diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java new file mode 100644 index 00000000..aa191ee4 --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java @@ -0,0 +1,512 @@ +package com.michelin.kstreamplify.integration; + +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; + +import com.fasterxml.jackson.core.type.TypeReference; +import com.michelin.kstreamplify.avro.KafkaPersonStub; +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import com.michelin.kstreamplify.serde.SerdesUtils; +import com.michelin.kstreamplify.service.InteractiveQueriesService; +import com.michelin.kstreamplify.store.StateStoreRecord; +import com.michelin.kstreamplify.store.StreamsMetadata; +import io.confluent.kafka.serializers.KafkaAvroSerializer; +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.time.Duration; +import java.time.Instant; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.kstream.Consumed; +import org.apache.kafka.streams.processor.api.Processor; +import org.apache.kafka.streams.processor.api.ProcessorContext; +import org.apache.kafka.streams.processor.api.ProcessorSupplier; +import org.apache.kafka.streams.processor.api.Record; +import org.apache.kafka.streams.state.KeyValueStore; +import org.apache.kafka.streams.state.StoreBuilder; +import org.apache.kafka.streams.state.Stores; +import org.apache.kafka.streams.state.TimestampedKeyValueStore; +import org.apache.kafka.streams.state.TimestampedWindowStore; +import org.apache.kafka.streams.state.ValueAndTimestamp; +import org.apache.kafka.streams.state.WindowStore; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.testcontainers.junit.jupiter.Testcontainers; + +@Slf4j +@Testcontainers +class InteractiveQueriesIntegrationTest extends KafkaIntegrationTest { + private final InteractiveQueriesService interactiveQueriesService = new InteractiveQueriesService(initializer); + + @BeforeAll + static void globalSetUp() throws ExecutionException, InterruptedException { + createTopics( + broker.getBootstrapServers(), + new TopicPartition("STRING_TOPIC", 3), + new TopicPartition("AVRO_TOPIC", 2) + ); + + try (KafkaProducer stringKafkaProducer = new KafkaProducer<>( + Map.of(BOOTSTRAP_SERVERS_CONFIG, broker.getBootstrapServers(), + KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName(), + VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()))) { + + ProducerRecord message = new ProducerRecord<>( + "STRING_TOPIC", "person", "Doe"); + + stringKafkaProducer + .send(message) + .get(); + } + + try (KafkaProducer avroKafkaProducer = new KafkaProducer<>( + Map.of(BOOTSTRAP_SERVERS_CONFIG, broker.getBootstrapServers(), + KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName(), + VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName(), + SCHEMA_REGISTRY_URL_CONFIG, "http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()))) { + + KafkaPersonStub kafkaPersonStub = KafkaPersonStub.newBuilder() + .setId(1L) + .setFirstName("John") + .setLastName("Doe") + .setBirthDate(Instant.parse("2000-01-01T01:00:00Z")) + .build(); + + ProducerRecord message = new ProducerRecord<>( + "AVRO_TOPIC", "person", kafkaPersonStub); + + avroKafkaProducer + .send(message) + .get(); + } + + initializer = new KafkaStreamInitializerStub( + 8081, + "appInteractiveQueriesId", + broker.getBootstrapServers(), + "http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort(), + "/tmp/kstreamplify/kstreamplify-core-test/interactive-queries"); + initializer.init(new KafkaStreamsStarterStub()); + } + + @BeforeEach + void setUp() throws InterruptedException { + waitingForKafkaStreamsToStart(); + waitingForLocalStoreToReachOffset(Map.of( + "STRING_STRING_STORE", Map.of(1, 1L), + "STRING_AVRO_STORE", Map.of(0, 1L), + "STRING_AVRO_TIMESTAMPED_STORE", Map.of(0, 1L), + "STRING_AVRO_WINDOW_STORE", Map.of(0, 1L), + "STRING_AVRO_TIMESTAMPED_WINDOW_STORE", Map.of(0, 1L) + )); + } + + @Test + void shouldGetStoresAndStoreMetadata() throws IOException, InterruptedException { + // Get stores + HttpRequest storesRequest = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store")) + .GET() + .build(); + + HttpResponse storesResponse = httpClient.send(storesRequest, HttpResponse.BodyHandlers.ofString()); + List stores = objectMapper.readValue(storesResponse.body(), new TypeReference<>() {}); + + assertEquals(200, storesResponse.statusCode()); + assertTrue(stores.containsAll(List.of( + "STRING_STRING_STORE", + "STRING_AVRO_STORE", + "STRING_AVRO_TIMESTAMPED_STORE", + "STRING_AVRO_WINDOW_STORE", + "STRING_AVRO_TIMESTAMPED_WINDOW_STORE" + ))); + + // Get store metadata + HttpRequest streamsMetadataRequest = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/metadata/STRING_STRING_STORE")) + .GET() + .build(); + + HttpResponse streamsMetadataResponse = httpClient + .send(streamsMetadataRequest, HttpResponse.BodyHandlers.ofString()); + + List streamsMetadata = objectMapper + .readValue(streamsMetadataResponse.body(), new TypeReference<>() {}); + + assertEquals(200, streamsMetadataResponse.statusCode()); + assertEquals(Set.of( + "STRING_STRING_STORE", + "STRING_AVRO_STORE", + "STRING_AVRO_TIMESTAMPED_STORE", + "STRING_AVRO_WINDOW_STORE", + "STRING_AVRO_TIMESTAMPED_WINDOW_STORE"), streamsMetadata.get(0).getStateStoreNames()); + assertEquals("localhost", streamsMetadata.get(0).getHostInfo().host()); + assertEquals(8081, streamsMetadata.get(0).getHostInfo().port()); + assertEquals(Set.of( + "AVRO_TOPIC-0", + "AVRO_TOPIC-1", + "STRING_TOPIC-0", + "STRING_TOPIC-1", + "STRING_TOPIC-2"), streamsMetadata.get(0).getTopicPartitions()); + } + + @ParameterizedTest + @CsvSource({ + "http://localhost:8081/store/WRONG_STORE/person,State store WRONG_STORE not found", + "http://localhost:8081/store/STRING_STRING_STORE/wrongKey,Key wrongKey not found", + "http://localhost:8081/store/WRONG_STORE,State store WRONG_STORE not found", + }) + void shouldGetErrorWhenWrongKeyOrStore(String url, String message) throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create(url)) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + + assertEquals(404, response.statusCode()); + assertEquals(message, response.body()); + } + + @Test + void shouldGetByKeyWrongKey() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_STRING_STORE/wrongKey")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + + assertEquals(404, response.statusCode()); + assertEquals("Key wrongKey not found", response.body()); + } + + @Test + void shouldGetByKeyWrongStoreType() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_AVRO_WINDOW_STORE/person")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + + assertEquals(400, response.statusCode()); + assertTrue(response.body().contains("Cannot get result for failed query.")); + } + + @Test + void shouldGetByKeyInStringStringKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_STRING_STORE/person")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.getKey()); + assertEquals("Doe", body.getValue()); + assertNull(body.getTimestamp()); + } + + @Test + void shouldGetByKeyInStringAvroKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_AVRO_STORE/person")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.getKey()); + assertEquals(1, ((Map) body.getValue()).get("id")); + assertEquals("John", ((Map) body.getValue()).get("firstName")); + assertEquals("Doe", ((Map) body.getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) body.getValue()).get("birthDate")); + assertNull(body.getTimestamp()); + } + + @Test + void shouldGetByKeyInStringAvroKeyValueStoreFromInteractiveQueriesService() { + StateStoreRecord stateStoreRecord = interactiveQueriesService.getByKey("STRING_AVRO_STORE", "person"); + + assertEquals("person", stateStoreRecord.getKey()); + assertEquals(1L, ((Map) stateStoreRecord.getValue()).get("id")); + assertEquals("John", ((Map) stateStoreRecord.getValue()).get("firstName")); + assertEquals("Doe", ((Map) stateStoreRecord.getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) stateStoreRecord.getValue()).get("birthDate")); + assertNull(stateStoreRecord.getTimestamp()); + } + + @Test + void shouldGetByKeyInStringAvroTimestampedKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_AVRO_TIMESTAMPED_STORE/person")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + StateStoreRecord body = objectMapper.readValue(response.body(), StateStoreRecord.class); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.getKey()); + assertEquals(1, ((Map) body.getValue()).get("id")); + assertEquals("John", ((Map) body.getValue()).get("firstName")); + assertEquals("Doe", ((Map) body.getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) body.getValue()).get("birthDate")); + assertNotNull(body.getTimestamp()); + } + + @Test + void shouldGetAllWrongStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/WRONG_STORE/key")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + + assertEquals(404, response.statusCode()); + assertEquals("State store WRONG_STORE not found", response.body()); + } + + @Test + void shouldGetAllInStringStringKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_STRING_STORE")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + List body = objectMapper.readValue(response.body(), new TypeReference<>() {}); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.get(0).getKey()); + assertEquals("Doe", body.get(0).getValue()); + assertNull(body.get(0).getTimestamp()); + } + + @Test + void shouldGetAllInStringAvroKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_AVRO_STORE")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + List body = objectMapper.readValue(response.body(), new TypeReference<>() {}); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.get(0).getKey()); + assertEquals(1, ((Map) body.get(0).getValue()).get("id")); + assertEquals("John", ((Map) body.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) body.get(0).getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) body.get(0).getValue()).get("birthDate")); + assertNull(body.get(0).getTimestamp()); + } + + @Test + void shouldGetAllInStringAvroKeyValueStoreFromInteractiveQueriesService() { + List stateQueryData = interactiveQueriesService.getAll("STRING_AVRO_STORE"); + + assertEquals("person", stateQueryData.get(0).getKey()); + assertEquals(1L, ((Map) stateQueryData.get(0).getValue()).get("id")); + assertEquals("John", ((Map) stateQueryData.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) stateQueryData.get(0).getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) stateQueryData.get(0).getValue()).get("birthDate")); + assertNull(stateQueryData.get(0).getTimestamp()); + } + + @Test + void shouldGetAllInStringAvroTimestampedKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/STRING_AVRO_TIMESTAMPED_STORE")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + List body = objectMapper.readValue(response.body(), new TypeReference<>() {}); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.get(0).getKey()); + assertEquals(1, ((Map) body.get(0).getValue()).get("id")); + assertEquals("John", ((Map) body.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) body.get(0).getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) body.get(0).getValue()).get("birthDate")); + assertNotNull(body.get(0).getTimestamp()); + } + + @Test + void shouldGetAllOnLocalhostInStringStringKeyValueStore() throws IOException, InterruptedException { + HttpRequest request = HttpRequest.newBuilder() + .uri(URI.create("http://localhost:8081/store/local/STRING_STRING_STORE")) + .GET() + .build(); + + HttpResponse response = httpClient.send(request, HttpResponse.BodyHandlers.ofString()); + List body = objectMapper.readValue(response.body(), new TypeReference<>() {}); + + assertEquals(200, response.statusCode()); + assertEquals("person", body.get(0).getKey()); + assertEquals("Doe", body.get(0).getValue()); + assertNull(body.get(0).getTimestamp()); + } + + /** + * Kafka Streams starter implementation for integration tests. + * The topology consumes events from multiple topics (string, Java, Avro) and stores them in dedicated stores + * so that they can be queried. + */ + @Slf4j + static class KafkaStreamsStarterStub extends KafkaStreamsStarter { + @Override + public void topology(StreamsBuilder streamsBuilder) { + streamsBuilder + .stream("STRING_TOPIC", Consumed.with(Serdes.String(), Serdes.String())) + .process(new ProcessorSupplier() { + @Override + public Set> stores() { + // key-value store + StoreBuilder> stringStringKeyValueStoreBuilder = Stores + .keyValueStoreBuilder( + Stores.persistentKeyValueStore("STRING_STRING_STORE"), + Serdes.String(), Serdes.String()); + + return Set.of( + stringStringKeyValueStoreBuilder + ); + } + + @Override + public Processor get() { + return new Processor<>() { + private KeyValueStore stringStringKeyValueStore; + + @Override + public void init(ProcessorContext context) { + this.stringStringKeyValueStore = context.getStateStore("STRING_STRING_STORE"); + } + + @Override + public void process(Record message) { + stringStringKeyValueStore.put(message.key(), message.value()); + } + }; + } + }); + + streamsBuilder + .stream("AVRO_TOPIC", Consumed.with(Serdes.String(), SerdesUtils.getValueSerdes())) + .process(new ProcessorSupplier() { + @Override + public Set> stores() { + // key-value store + StoreBuilder> stringAvroKeyValueStoreBuilder = Stores + .keyValueStoreBuilder( + Stores.persistentKeyValueStore("STRING_AVRO_STORE"), + Serdes.String(), SerdesUtils.getValueSerdes()); + + // timestamped key-value store + StoreBuilder> stringAvroTimestampedKeyValueStoreBuilder = Stores + .timestampedKeyValueStoreBuilder( + Stores.persistentTimestampedKeyValueStore("STRING_AVRO_TIMESTAMPED_STORE"), + Serdes.String(), SerdesUtils.getValueSerdes()); + + // window store + StoreBuilder> stringAvroWindowStoreBuilder = + Stores.windowStoreBuilder( + Stores.persistentWindowStore("STRING_AVRO_WINDOW_STORE", + Duration.ofMinutes(5), Duration.ofMinutes(1), false), + Serdes.String(), SerdesUtils.getValueSerdes()); + + // timestamped window store + StoreBuilder> + stringAvroTimestampedWindowStoreBuilder = Stores.timestampedWindowStoreBuilder( + Stores.persistentTimestampedWindowStore("STRING_AVRO_TIMESTAMPED_WINDOW_STORE", + Duration.ofMinutes(5), Duration.ofMinutes(1), false), + Serdes.String(), SerdesUtils.getValueSerdes()); + + return Set.of( + stringAvroKeyValueStoreBuilder, + stringAvroTimestampedKeyValueStoreBuilder, + stringAvroWindowStoreBuilder, + stringAvroTimestampedWindowStoreBuilder + ); + } + + @Override + public Processor get() { + return new Processor<>() { + private KeyValueStore stringAvroKeyValueStore; + private TimestampedKeyValueStore + stringAvroTimestampedKeyValueStore; + private WindowStore stringAvroWindowStore; + private TimestampedWindowStore stringAvroTimestampedWindowStore; + + @Override + public void init(ProcessorContext context) { + this.stringAvroKeyValueStore = context + .getStateStore("STRING_AVRO_STORE"); + + this.stringAvroTimestampedKeyValueStore = context + .getStateStore("STRING_AVRO_TIMESTAMPED_STORE"); + + this.stringAvroWindowStore = context + .getStateStore("STRING_AVRO_WINDOW_STORE"); + + this.stringAvroTimestampedWindowStore = context + .getStateStore("STRING_AVRO_TIMESTAMPED_WINDOW_STORE"); + } + + @Override + public void process(Record message) { + stringAvroKeyValueStore.put(message.key(), message.value()); + stringAvroTimestampedKeyValueStore.put(message.key(), + ValueAndTimestamp.make(message.value(), message.timestamp())); + stringAvroWindowStore.put(message.key(), message.value(), message.timestamp()); + stringAvroTimestampedWindowStore.put(message.key(), + ValueAndTimestamp.make(message.value(), message.timestamp()), + message.timestamp()); + + } + }; + } + }); + } + + @Override + public String dlqTopic() { + return "DLQ_TOPIC"; + } + + @Override + public void onStart(KafkaStreams kafkaStreams) { + kafkaStreams.cleanUp(); + } + } +} diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java new file mode 100644 index 00000000..4cf24774 --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java @@ -0,0 +1,137 @@ +package com.michelin.kstreamplify.integration; + +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; +import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import java.net.http.HttpClient; +import java.util.Arrays; +import java.util.Map; +import lombok.AllArgsConstructor; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.LagInfo; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.utility.DockerImageName; + +@Slf4j +abstract class KafkaIntegrationTest { + protected static final String CONFLUENT_PLATFORM_VERSION = "7.6.1"; + protected static final Network NETWORK = Network.newNetwork(); + protected final HttpClient httpClient = HttpClient.newBuilder().build(); + protected final ObjectMapper objectMapper = new ObjectMapper(); + protected static KafkaStreamsInitializer initializer; + + @Container + static KafkaContainer broker = new KafkaContainer(DockerImageName + .parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) + .withNetwork(NETWORK) + .withNetworkAliases("broker") + .withKraft(); + + @Container + static GenericContainer schemaRegistry = new GenericContainer<>(DockerImageName + .parse("confluentinc/cp-schema-registry:" + CONFLUENT_PLATFORM_VERSION)) + .dependsOn(broker) + .withNetwork(NETWORK) + .withNetworkAliases("schema-registry") + .withExposedPorts(8081) + .withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry") + .withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:8081") + .withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "PLAINTEXT://broker:9092") + .waitingFor(Wait.forHttp("/subjects").forStatusCode(200)); + + protected static void createTopics(String bootstrapServers, TopicPartition... topicPartitions) { + var newTopics = Arrays.stream(topicPartitions) + .map(topicPartition -> new NewTopic(topicPartition.topic(), topicPartition.partition(), (short) 1)) + .toList(); + try (var admin = AdminClient.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, bootstrapServers))) { + admin.createTopics(newTopics); + } + } + + protected void waitingForKafkaStreamsToStart() throws InterruptedException { + while (!initializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING)) { + log.info("Waiting for Kafka Streams to start..."); + Thread.sleep(2000); // NOSONAR + } + } + + protected void waitingForLocalStoreToReachOffset(Map> topicPartitionOffset) + throws InterruptedException { + + while (hasLag(topicPartitionOffset)) { + log.info("Waiting for local stores {} to reach offsets", topicPartitionOffset.keySet().stream().toList()); + Thread.sleep(5000); // NOSONAR + } + } + + private boolean hasLag(Map> topicPartitionOffset) { + Map> currentLag = initializer.getKafkaStreams().allLocalStorePartitionLags(); + + return !topicPartitionOffset.entrySet() + .stream() + .allMatch(topicPartitionOffsetEntry -> topicPartitionOffsetEntry.getValue().entrySet() + .stream() + .anyMatch(partitionOffsetEntry -> currentLag.get(topicPartitionOffsetEntry.getKey()) + .get(partitionOffsetEntry.getKey()) + .currentOffsetPosition() == partitionOffsetEntry.getValue())); + } + + /** + * Define a KafkaStreamsInitializer stub for testing. + * This stub allows to override some properties of the application.properties file + * or to set some properties dynamically from Testcontainers. + */ + @AllArgsConstructor + static class KafkaStreamInitializerStub extends KafkaStreamsInitializer { + private Integer newServerPort; + private String applicationId; + private String bootstrapServers; + private String schemaRegistryUrl; + private String stateDir; + + public KafkaStreamInitializerStub(String bootstrapServers) { + this.bootstrapServers = bootstrapServers; + } + + /** + * Override properties of the application.properties file. + * Some properties are dynamically set from Testcontainers. + */ + @Override + protected void initProperties() { + super.initProperties(); + KafkaStreamsExecutionContext.getProperties() + .setProperty(BOOTSTRAP_SERVERS_CONFIG, bootstrapServers); + + if (newServerPort != null) { + serverPort = newServerPort; + } + + if (applicationId != null) { + KafkaStreamsExecutionContext.getProperties() + .setProperty("application.id", applicationId); + } + + if (schemaRegistryUrl != null) { + KafkaStreamsExecutionContext.getProperties() + .setProperty(SCHEMA_REGISTRY_URL_CONFIG, schemaRegistryUrl); + } + + if (stateDir != null) { + KafkaStreamsExecutionContext.getProperties() + .setProperty("state.dir", stateDir); + } + } + } +} diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integrations/KafkaStreamsInitializerIntegrationTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaStreamsInitializerIntegrationTest.java similarity index 59% rename from kstreamplify-core/src/test/java/com/michelin/kstreamplify/integrations/KafkaStreamsInitializerIntegrationTest.java rename to kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaStreamsInitializerIntegrationTest.java index b4cabb1e..3d4d1425 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integrations/KafkaStreamsInitializerIntegrationTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/integration/KafkaStreamsInitializerIntegrationTest.java @@ -1,29 +1,24 @@ -package com.michelin.kstreamplify.integrations; +package com.michelin.kstreamplify.integration; -import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; import java.io.IOException; import java.net.URI; -import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpResponse; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; -import java.util.Map; +import java.util.Set; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsMetadata; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.testcontainers.containers.KafkaContainer; import org.testcontainers.junit.jupiter.Container; @@ -32,27 +27,26 @@ @Slf4j @Testcontainers -class KafkaStreamsInitializerIntegrationTest { - private final KafkaStreamsInitializer initializer = new KafkaStreamInitializerImpl(); - - private final HttpClient httpClient = HttpClient.newBuilder().build(); - - @Container - static KafkaContainer kafka = new KafkaContainer(DockerImageName - .parse("confluentinc/cp-kafka:7.4.0")) - .withKraft(); - +class KafkaStreamsInitializerIntegrationTest extends KafkaIntegrationTest { @BeforeAll - static void setUp() { - createTopics("inputTopic", "outputTopic"); + static void globalSetUp() { + createTopics( + broker.getBootstrapServers(), + new TopicPartition("INPUT_TOPIC", 2), + new TopicPartition("OUTPUT_TOPIC", 2) + ); + + initializer = new KafkaStreamInitializerStub(broker.getBootstrapServers()); + initializer.init(new KafkaStreamsStarterStub()); + } + + @BeforeEach + void setUp() throws InterruptedException { + waitingForKafkaStreamsToStart(); } @Test void shouldInitAndRun() throws InterruptedException, IOException { - initializer.init(new KafkaStreamsStarterImpl()); - - waitingForKafkaStreamsToRun(); - assertEquals(KafkaStreams.State.RUNNING, initializer.getKafkaStreams().state()); List streamsMetadata = @@ -63,12 +57,14 @@ void shouldInitAndRun() throws InterruptedException, IOException { assertEquals(8080, streamsMetadata.get(0).hostInfo().port()); assertTrue(streamsMetadata.get(0).stateStoreNames().isEmpty()); - List topicPartitions = streamsMetadata.get(0).topicPartitions().stream().toList(); + Set topicPartitions = streamsMetadata.get(0).topicPartitions(); - assertEquals("inputTopic", topicPartitions.get(0).topic()); - assertEquals(0, topicPartitions.get(0).partition()); + assertTrue(Set.of( + new TopicPartition("INPUT_TOPIC", 0), + new TopicPartition("INPUT_TOPIC", 1) + ).containsAll(topicPartitions)); - assertEquals("dlqTopic", KafkaStreamsExecutionContext.getDlqTopicName()); + assertEquals("DLQ_TOPIC", KafkaStreamsExecutionContext.getDlqTopicName()); assertEquals("org.apache.kafka.common.serialization.Serdes$StringSerde", KafkaStreamsExecutionContext.getSerdesConfig().get("default.key.serde")); assertEquals("org.apache.kafka.common.serialization.Serdes$StringSerde", @@ -107,51 +103,26 @@ void shouldInitAndRun() throws InterruptedException, IOException { assertEquals(""" Topologies: Sub-topology: 0 - Source: KSTREAM-SOURCE-0000000000 (topics: [inputTopic]) + Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC]) --> KSTREAM-SINK-0000000001 - Sink: KSTREAM-SINK-0000000001 (topic: outputTopic) + Sink: KSTREAM-SINK-0000000001 (topic: OUTPUT_TOPIC) <-- KSTREAM-SOURCE-0000000000 """, responseTopology.body()); } - private void waitingForKafkaStreamsToRun() throws InterruptedException { - while (!initializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING)) { - log.info("Waiting for Kafka Streams to start..."); - Thread.sleep(2000); - } - } - - private static void createTopics(String... topics) { - var newTopics = Arrays.stream(topics) - .map(topic -> new NewTopic(topic, 1, (short) 1)) - .toList(); - try (var admin = AdminClient.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()))) { - admin.createTopics(newTopics); - } - } - - static class KafkaStreamInitializerImpl extends KafkaStreamsInitializer { - @Override - protected void initProperties() { - super.initProperties(); - KafkaStreamsExecutionContext.getProperties() - .setProperty(BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()); - } - } - @Slf4j - static class KafkaStreamsStarterImpl extends KafkaStreamsStarter { + static class KafkaStreamsStarterStub extends KafkaStreamsStarter { @Override public void topology(StreamsBuilder streamsBuilder) { streamsBuilder - .stream("inputTopic") - .to("outputTopic"); + .stream("INPUT_TOPIC") + .to("OUTPUT_TOPIC"); } @Override public String dlqTopic() { - return "dlqTopic"; + return "DLQ_TOPIC"; } @Override diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/properties/PropertiesUtilsTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/PropertiesUtilsTest.java similarity index 94% rename from kstreamplify-core/src/test/java/com/michelin/kstreamplify/properties/PropertiesUtilsTest.java rename to kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/PropertiesUtilsTest.java index 7e61a16d..aa86a470 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/properties/PropertiesUtilsTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/PropertiesUtilsTest.java @@ -1,4 +1,4 @@ -package com.michelin.kstreamplify.properties; +package com.michelin.kstreamplify.property; import static org.junit.jupiter.api.Assertions.assertTrue; diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/properties/RocksDbConfigTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/RocksDbConfigTest.java similarity index 69% rename from kstreamplify-core/src/test/java/com/michelin/kstreamplify/properties/RocksDbConfigTest.java rename to kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/RocksDbConfigTest.java index 7c908973..43c86ad1 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/properties/RocksDbConfigTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/property/RocksDbConfigTest.java @@ -1,11 +1,11 @@ -package com.michelin.kstreamplify.properties; +package com.michelin.kstreamplify.property; import static org.mockito.Mockito.any; -import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import com.michelin.kstreamplify.store.RocksDbConfig; import java.util.HashMap; import java.util.Map; import java.util.Properties; @@ -13,7 +13,6 @@ import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; -import org.mockito.MockitoAnnotations; import org.mockito.junit.jupiter.MockitoExtension; import org.rocksdb.BlockBasedTableConfig; import org.rocksdb.CompressionType; @@ -21,7 +20,6 @@ @ExtendWith(MockitoExtension.class) class RocksDbConfigTest { - @Mock private Options options; @@ -32,25 +30,21 @@ void setUp() { @Test void testSetConfigWithDefaultValues() { - // Arrange Map configs = new HashMap<>(); RocksDbConfig rocksDbConfig = new RocksDbConfig(); KafkaStreamsExecutionContext.registerProperties(new Properties()); - // Act rocksDbConfig.setConfig("storeName", options, configs); - // Assert - verify(options, times(1)).tableFormatConfig(); - verify(options, times(1)).setTableFormatConfig(any()); - verify(options, times(1)).setMaxWriteBufferNumber(RocksDbConfig.ROCKSDB_MAX_WRITE_BUFFER_DEFAULT); - verify(options, times(1)).setWriteBufferSize(RocksDbConfig.ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT); - verify(options, times(1)).setCompressionType(CompressionType.NO_COMPRESSION); + verify(options).tableFormatConfig(); + verify(options).setTableFormatConfig(any()); + verify(options).setMaxWriteBufferNumber(RocksDbConfig.ROCKSDB_MAX_WRITE_BUFFER_DEFAULT); + verify(options).setWriteBufferSize(RocksDbConfig.ROCKSDB_WRITE_BUFFER_SIZE_DEFAULT); + verify(options).setCompressionType(CompressionType.NO_COMPRESSION); } @Test void testSetConfigWithCustomValues() { - // Arrange long cacheSize = 64 * 1024L * 1024L; long writeBufferSize = 8 * 1024L * 1024L; long blockSize = 8 * 1024L; @@ -71,14 +65,12 @@ void testSetConfigWithCustomValues() { RocksDbConfig rocksDbConfig = new RocksDbConfig(); - // Act rocksDbConfig.setConfig("storeName", options, configs); - // Assert - verify(options, times(1)).tableFormatConfig(); - verify(options, times(1)).setTableFormatConfig(any()); - verify(options, times(1)).setMaxWriteBufferNumber(maxWriteBuffer); - verify(options, times(1)).setWriteBufferSize(writeBufferSize); - verify(options, times(1)).setCompressionType(CompressionType.getCompressionType(compressionType)); + verify(options).tableFormatConfig(); + verify(options).setTableFormatConfig(any()); + verify(options).setMaxWriteBufferNumber(maxWriteBuffer); + verify(options).setWriteBufferSize(writeBufferSize); + verify(options).setCompressionType(CompressionType.getCompressionType(compressionType)); } } diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/rest/DefaultProbeControllerTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/rest/DefaultProbeControllerTest.java deleted file mode 100644 index 78ab4bc5..00000000 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/rest/DefaultProbeControllerTest.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.michelin.kstreamplify.rest; - -import static org.junit.jupiter.api.Assertions.assertNotEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; - -import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; -import org.junit.jupiter.api.Test; - -class DefaultProbeControllerTest { - @Test - void shouldCreateServerWithDefaultHostAndPort() { - DefaultProbeController controller = new DefaultProbeController(new KafkaStreamsInitializer()); - - assertNotNull(controller.server.getAddress().getAddress().getHostName()); - assertNotEquals(0, controller.server.getAddress().getPort()); - } -} diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/utils/TopicWithSerdesTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/serde/TopicWithSerdeTest.java similarity index 95% rename from kstreamplify-core/src/test/java/com/michelin/kstreamplify/utils/TopicWithSerdesTest.java rename to kstreamplify-core/src/test/java/com/michelin/kstreamplify/serde/TopicWithSerdeTest.java index e068e03c..db4cf32b 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/utils/TopicWithSerdesTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/serde/TopicWithSerdeTest.java @@ -1,16 +1,14 @@ -package com.michelin.kstreamplify.utils; +package com.michelin.kstreamplify.serde; import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.junit.jupiter.api.Assertions.assertNotNull; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; import java.util.Properties; import org.apache.kafka.common.serialization.Serdes; import org.apache.kafka.streams.StreamsBuilder; -import org.apache.kafka.streams.kstream.KStream; import org.junit.jupiter.api.Test; -class TopicWithSerdesTest { +class TopicWithSerdeTest { @Test void shouldCreateTopicWithSerde() { diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServerTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServerTest.java new file mode 100644 index 00000000..612044b1 --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/server/KafkaStreamsHttpServerTest.java @@ -0,0 +1,18 @@ +package com.michelin.kstreamplify.server; + +import static org.junit.jupiter.api.Assertions.assertNotEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; + +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import org.junit.jupiter.api.Test; + +class KafkaStreamsHttpServerTest { + @Test + void shouldCreateServerWithDefaultHostAndPort() { + KafkaStreamsHttpServer server = new KafkaStreamsHttpServer(new KafkaStreamsInitializer()); + server.start(); + + assertNotNull(server.server.getAddress().getHostName()); + assertNotEquals(0, server.server.getAddress().getPort()); + } +} diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/InteractiveQueriesServiceTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/InteractiveQueriesServiceTest.java new file mode 100644 index 00000000..f967f75a --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/InteractiveQueriesServiceTest.java @@ -0,0 +1,451 @@ +package com.michelin.kstreamplify.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertIterableEquals; +import static org.junit.jupiter.api.Assertions.assertThrows; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doCallRealMethod; +import static org.mockito.Mockito.when; + +import com.michelin.kstreamplify.exception.OtherInstanceResponseException; +import com.michelin.kstreamplify.exception.UnknownKeyException; +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import com.michelin.kstreamplify.store.StateStoreRecord; +import java.net.http.HttpClient; +import java.net.http.HttpResponse; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.CompletableFuture; +import org.apache.kafka.common.serialization.Serializer; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.KeyQueryMetadata; +import org.apache.kafka.streams.KeyValue; +import org.apache.kafka.streams.StreamsMetadata; +import org.apache.kafka.streams.errors.StreamsNotStartedException; +import org.apache.kafka.streams.errors.UnknownStateStoreException; +import org.apache.kafka.streams.query.QueryResult; +import org.apache.kafka.streams.query.StateQueryRequest; +import org.apache.kafka.streams.query.StateQueryResult; +import org.apache.kafka.streams.state.HostInfo; +import org.apache.kafka.streams.state.KeyValueIterator; +import org.apache.kafka.streams.state.ValueAndTimestamp; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.ArgumentMatchers; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class InteractiveQueriesServiceTest { + private static final String STREAMS_NOT_STARTED = "Cannot process request while instance is in REBALANCING state"; + + @Mock + private KafkaStreamsInitializer kafkaStreamsInitializer; + + @Mock + private HttpClient httpClient; + + @Mock + private StreamsMetadata streamsMetadata; + + @Mock + private KafkaStreams kafkaStreams; + + @Mock + private StateQueryResult>> stateRangeQueryResult; + + @Mock + private KeyValueIterator> iterator; + + @Mock + private StateQueryResult> stateKeyQueryResult; + + @Mock + private HttpResponse httpResponse; + + @InjectMocks + private InteractiveQueriesService interactiveQueriesService; + + @Test + void shouldConstructInteractiveQueriesService() { + InteractiveQueriesService service = new InteractiveQueriesService(kafkaStreamsInitializer); + assertEquals(kafkaStreamsInitializer, service.getKafkaStreamsInitializer()); + } + + @Test + void shouldNotGetStoresWhenStreamsIsNotStarted() { + when(kafkaStreamsInitializer.isNotRunning()) + .thenReturn(true); + + when(kafkaStreamsInitializer.getKafkaStreams()) + .thenReturn(kafkaStreams); + + when(kafkaStreams.state()) + .thenReturn(KafkaStreams.State.REBALANCING); + + StreamsNotStartedException exception = assertThrows(StreamsNotStartedException.class, + () -> interactiveQueriesService.getStateStores()); + + assertEquals(STREAMS_NOT_STARTED, exception.getMessage()); + } + + @Test + void shouldGetStores() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(List.of(streamsMetadata)); + when(streamsMetadata.stateStoreNames()).thenReturn(Set.of("store1", "store2")); + + Set stores = interactiveQueriesService.getStateStores(); + + assertTrue(stores.contains("store1")); + assertTrue(stores.contains("store2")); + } + + @Test + void shouldGetStoresWhenNull() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(null); + + Set stores = interactiveQueriesService.getStateStores(); + + assertTrue(stores.isEmpty()); + } + + @Test + void shouldGetStoresWhenEmpty() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.metadataForAllStreamsClients()).thenReturn(Collections.emptyList()); + + Set stores = interactiveQueriesService.getStateStores(); + + assertTrue(stores.isEmpty()); + } + + @Test + void shouldNotGetStreamsMetadataForStoreWhenStreamsIsNotStarted() { + when(kafkaStreamsInitializer.isNotRunning()) + .thenReturn(true); + + when(kafkaStreamsInitializer.getKafkaStreams()) + .thenReturn(kafkaStreams); + + when(kafkaStreams.state()) + .thenReturn(KafkaStreams.State.REBALANCING); + + StreamsNotStartedException exception = assertThrows(StreamsNotStartedException.class, + () -> interactiveQueriesService.getStreamsMetadataForStore("store")); + + assertEquals(STREAMS_NOT_STARTED, exception.getMessage()); + } + + @Test + void shouldGetStreamsMetadataForStore() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata)); + + Collection streamsMetadataResponse = interactiveQueriesService + .getStreamsMetadataForStore("store"); + + assertIterableEquals(List.of(streamsMetadata), streamsMetadataResponse); + } + + @Test + void shouldNotGetAllWhenStreamsIsNotStarted() { + when(kafkaStreamsInitializer.isNotRunning()) + .thenReturn(true); + + when(kafkaStreamsInitializer.getKafkaStreams()) + .thenReturn(kafkaStreams); + + when(kafkaStreams.state()) + .thenReturn(KafkaStreams.State.REBALANCING); + + StreamsNotStartedException exception = assertThrows(StreamsNotStartedException.class, + () -> interactiveQueriesService.getAll("store")); + + assertEquals(STREAMS_NOT_STARTED, exception.getMessage()); + } + + @Test + void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataNull() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null); + + assertThrows(UnknownStateStoreException.class, () -> interactiveQueriesService.getAll("store")); + } + + @Test + void shouldGetAllThrowsUnknownStoreExceptionWhenMetadataEmpty() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList()); + + assertThrows(UnknownStateStoreException.class, () -> interactiveQueriesService.getAll("store")); + } + + @Test + void shouldGetAllCurrentInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata)); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(streamsMetadata.hostInfo()).thenReturn(hostInfo); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo); + + when(kafkaStreams.query(ArgumentMatchers.>>>any())).thenReturn(stateRangeQueryResult); + + QueryResult>> queryResult = QueryResult.forResult(iterator); + when(stateRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, queryResult)); + doCallRealMethod().when(iterator).forEachRemaining(any()); + when(iterator.hasNext()) + .thenReturn(true) + .thenReturn(false); + + PersonStub personStub = new PersonStub("John", "Doe"); + when(iterator.next()) + .thenReturn(KeyValue.pair("key", ValueAndTimestamp.make(personStub, 150L))); + + List responses = interactiveQueriesService.getAll("store"); + + assertEquals("key", responses.get(0).getKey()); + assertEquals("John", ((Map) responses.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) responses.get(0).getValue()).get("lastName")); + assertEquals(150L, responses.get(0).getTimestamp()); + } + + @Test + void shouldGetAllOtherInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata)); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(streamsMetadata.hostInfo()).thenReturn(hostInfo); + + HostInfo anotherHostInfo = new HostInfo("anotherHost", 8080); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(anotherHostInfo); + + when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString()))) + .thenReturn(CompletableFuture.completedFuture(httpResponse)); + when(httpResponse.body()).thenReturn(""" + [ + { + "key": "key", + "value": { + "firstName": "John", + "lastName": "Doe" + }, + "timestamp": 150 + } + ]"""); + + List responses = interactiveQueriesService.getAll("store"); + + assertEquals("key", responses.get(0).getKey()); + assertEquals("John", ((Map) responses.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) responses.get(0).getValue()).get("lastName")); + assertEquals(150L, responses.get(0).getTimestamp()); + } + + @Test + void shouldGetAllOnLocalhostThrowsUnknownStoreExceptionWhenMetadataNull() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(null); + + assertThrows(UnknownStateStoreException.class, () -> interactiveQueriesService.getAllOnLocalhost("store")); + } + + @Test + void shouldGetAllOnLocalhostThrowsUnknownStoreExceptionWhenMetadataEmpty() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(Collections.emptyList()); + + assertThrows(UnknownStateStoreException.class, () -> interactiveQueriesService.getAllOnLocalhost("store")); + } + + @Test + void shouldGetAllOnLocalhost() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata)); + + when(kafkaStreams.query(ArgumentMatchers.>>>any())).thenReturn(stateRangeQueryResult); + + QueryResult>> queryResult = QueryResult.forResult(iterator); + when(stateRangeQueryResult.getPartitionResults()).thenReturn(Map.of(0, queryResult)); + doCallRealMethod().when(iterator).forEachRemaining(any()); + when(iterator.hasNext()) + .thenReturn(true) + .thenReturn(false); + + PersonStub personStub = new PersonStub("John", "Doe"); + when(iterator.next()) + .thenReturn(KeyValue.pair("key", ValueAndTimestamp.make(personStub, 150L))); + + List responses = interactiveQueriesService.getAllOnLocalhost("store"); + + assertEquals("key", responses.get(0).getKey()); + assertEquals("John", ((Map) responses.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) responses.get(0).getValue()).get("lastName")); + assertEquals(150L, responses.get(0).getTimestamp()); + } + + @Test + void shouldHandleRuntimeExceptionWhenGettingAllOtherInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.streamsMetadataForStore(any())).thenReturn(List.of(streamsMetadata)); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(streamsMetadata.hostInfo()).thenReturn(hostInfo); + + HostInfo anotherHostInfo = new HostInfo("anotherHost", 8080); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(anotherHostInfo); + + when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString()))) + .thenThrow(new RuntimeException("Error")); + + OtherInstanceResponseException exception = assertThrows(OtherInstanceResponseException.class, + () -> interactiveQueriesService.getAll("store")); + + assertEquals("Fail to read other instance response", exception.getMessage()); + } + + @Test + void shouldNotGetByKeyWhenStreamsIsNotStarted() { + when(kafkaStreamsInitializer.isNotRunning()) + .thenReturn(true); + + when(kafkaStreamsInitializer.getKafkaStreams()) + .thenReturn(kafkaStreams); + + when(kafkaStreams.state()) + .thenReturn(KafkaStreams.State.REBALANCING); + + StreamsNotStartedException exception = assertThrows(StreamsNotStartedException.class, + () -> interactiveQueriesService.getByKey("store", "key")); + + assertEquals(STREAMS_NOT_STARTED, exception.getMessage()); + } + + @Test + void shouldGetByKeyThrowsUnknownStoreExceptionWhenMetadataNull() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.>any())) + .thenReturn(null); + + assertThrows(UnknownStateStoreException.class, () -> + interactiveQueriesService.getByKey("store", "key")); + } + + @Test + void shouldGetByKeyCurrentInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + + when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.>any())) + .thenReturn(new KeyQueryMetadata( + new HostInfo("localhost", 8080), + Collections.emptySet(), + 0) + ); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo); + + when(kafkaStreams.query(ArgumentMatchers.>>any())) + .thenReturn(stateKeyQueryResult); + + QueryResult> queryResult = QueryResult + .forResult(ValueAndTimestamp.make(new PersonStub("John", "Doe"), 150L)); + + when(stateKeyQueryResult.getOnlyPartitionResult()) + .thenReturn(queryResult); + + StateStoreRecord response = interactiveQueriesService.getByKey("store", "key"); + + assertEquals("key", response.getKey()); + assertEquals("John", ((Map) response.getValue()).get("firstName")); + assertEquals("Doe", ((Map) response.getValue()).get("lastName")); + assertEquals(150L, response.getTimestamp()); + } + + @Test + void shouldGetByKeyOtherInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.>any())) + .thenReturn(new KeyQueryMetadata( + new HostInfo("localhost", 8085), + Collections.emptySet(), + 0) + ); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo); + + when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString()))) + .thenReturn(CompletableFuture.completedFuture(httpResponse)); + when(httpResponse.body()).thenReturn(""" + { + "key": "key", + "value": { + "firstName": "John", + "lastName": "Doe" + }, + "timestamp": 150 + } + """); + + StateStoreRecord response = interactiveQueriesService.getByKey("store", "key"); + + assertEquals("key", response.getKey()); + assertEquals("John", ((Map) response.getValue()).get("firstName")); + assertEquals("Doe", ((Map) response.getValue()).get("lastName")); + assertEquals(150L, response.getTimestamp()); + } + + @Test + void shouldGetUnknownKeyCurrentInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.>any())) + .thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8080), Collections.emptySet(), 0)); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo); + + when(kafkaStreams.query(ArgumentMatchers.>>any())) + .thenReturn(stateKeyQueryResult); + + when(stateKeyQueryResult.getOnlyPartitionResult()) + .thenReturn(null); + + UnknownKeyException exception = assertThrows(UnknownKeyException.class, () -> + interactiveQueriesService.getByKey("store", "unknownKey")); + + assertEquals("Key unknownKey not found", exception.getMessage()); + } + + @Test + void shouldHandleRuntimeExceptionWhenGettingByKeyOtherInstance() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.queryMetadataForKey(anyString(), any(), ArgumentMatchers.>any())) + .thenReturn(new KeyQueryMetadata(new HostInfo("localhost", 8085), Collections.emptySet(), 0)); + + HostInfo hostInfo = new HostInfo("localhost", 8080); + when(kafkaStreamsInitializer.getHostInfo()).thenReturn(hostInfo); + + when(httpClient.sendAsync(any(), eq(HttpResponse.BodyHandlers.ofString()))) + .thenThrow(new RuntimeException("Error")); + + OtherInstanceResponseException exception = assertThrows(OtherInstanceResponseException.class, + () -> interactiveQueriesService.getByKey("store", "key")); + + assertEquals("Fail to read other instance response", exception.getMessage()); + } + + record PersonStub(String firstName, String lastName) { } +} diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/KubernetesServiceTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/KubernetesServiceTest.java new file mode 100644 index 00000000..048f383c --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/KubernetesServiceTest.java @@ -0,0 +1,157 @@ +package com.michelin.kstreamplify.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import java.net.HttpURLConnection; +import java.util.Properties; +import java.util.Set; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.processor.internals.StreamThread; +import org.apache.kafka.streams.processor.internals.ThreadMetadataImpl; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class KubernetesServiceTest { + @Mock + private KafkaStreamsInitializer kafkaStreamsInitializer; + + @Mock + private KafkaStreams kafkaStreams; + + @InjectMocks + private KubernetesService kubernetesService; + + @Test + void shouldGetReadinessProbeWhenRunning() { + KafkaStreamsExecutionContext.registerProperties(new Properties()); + + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.state()).thenReturn(KafkaStreams.State.RUNNING); + + int response = kubernetesService.getReadiness(); + + assertEquals(HttpURLConnection.HTTP_OK, response); + } + + @Test + void shouldGetReadinessProbeWhenNotRunning() { + KafkaStreamsExecutionContext.registerProperties(new Properties()); + + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING); + + int response = kubernetesService.getReadiness(); + + assertEquals(HttpURLConnection.HTTP_UNAVAILABLE, response); + } + + @Test + void shouldGetReadinessProbeWhenNull() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(null); + + int response = kubernetesService.getReadiness(); + + assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response); + } + + @Test + void shouldGetReadinessProbeWhenRebalancingAndAllThreadsCreated() { + KafkaStreamsExecutionContext.registerProperties(new Properties()); + + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING); + when(kafkaStreams.metadataForLocalThreads()).thenReturn(Set.of( + new ThreadMetadataImpl( + "thread-1", + StreamThread.State.CREATED.name(), + "mainConsumerClientId", + "restoreConsumerClientId", + Set.of(), + "adminClientId", + Set.of(), + Set.of()) + )); + + int response = kubernetesService.getReadiness(); + + assertEquals(HttpURLConnection.HTTP_NO_CONTENT, response); + } + + @Test + void shouldGetReadinessProbeWhenRebalancingAndAllThreadsNotStartingOrCreated() { + KafkaStreamsExecutionContext.registerProperties(new Properties()); + + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.state()).thenReturn(KafkaStreams.State.REBALANCING); + when(kafkaStreams.metadataForLocalThreads()).thenReturn(Set.of( + new ThreadMetadataImpl( + "thread-1", + StreamThread.State.CREATED.name(), + "mainConsumerClientId", + "restoreConsumerClientId", + Set.of(), + "adminClientId", + Set.of(), + Set.of()), + new ThreadMetadataImpl( + "thread-2", + StreamThread.State.STARTING.name(), + "mainConsumerClientId", + "restoreConsumerClientId", + Set.of(), + "adminClientId", + Set.of(), + Set.of()), + new ThreadMetadataImpl( + "thread-3", + StreamThread.State.PARTITIONS_ASSIGNED.name(), + "mainConsumerClientId", + "restoreConsumerClientId", + Set.of(), + "adminClientId", + Set.of(), + Set.of()) + )); + + int response = kubernetesService.getReadiness(); + + assertEquals(HttpURLConnection.HTTP_UNAVAILABLE, response); + } + + @Test + void shouldGetLivenessProbeWithWhenStreamsRunning() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.state()).thenReturn(KafkaStreams.State.RUNNING); + + int response = kubernetesService.getLiveness(); + + assertEquals(HttpURLConnection.HTTP_OK, response); + } + + @Test + void shouldGetLivenessProbeWithWhenStreamsNotRunning() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); + when(kafkaStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING); + + int response = kubernetesService.getLiveness(); + + assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, response); + } + + @Test + void shouldGetLivenessProbeWithWhenStreamsNull() { + when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(null); + + int response = kubernetesService.getLiveness(); + + assertEquals(HttpURLConnection.HTTP_NO_CONTENT, response); + } +} + diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/TopologyServiceTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/TopologyServiceTest.java new file mode 100644 index 00000000..c5b175a8 --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/service/TopologyServiceTest.java @@ -0,0 +1,57 @@ +package com.michelin.kstreamplify.service; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import org.apache.kafka.streams.StreamsBuilder; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class TopologyServiceTest { + @Mock + private KafkaStreamsInitializer kafkaStreamsInitializer; + + @InjectMocks + private TopologyService topologyService; + + @Test + void shouldExposeTopology() { + StreamsBuilder streamsBuilder = new StreamsBuilder(); + KafkaStreamsStarter starter = new KafkaStreamsStarterStub(); + starter.topology(streamsBuilder); + + when(kafkaStreamsInitializer.getTopology()).thenReturn(streamsBuilder.build()); + + String response = topologyService.getTopology(); + + assertEquals(""" + Topologies: + Sub-topology: 0 + Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC]) + --> KSTREAM-SINK-0000000001 + Sink: KSTREAM-SINK-0000000001 (topic: OUTPUT_TOPIC) + <-- KSTREAM-SOURCE-0000000000 + + """, response); + } + + static class KafkaStreamsStarterStub extends KafkaStreamsStarter { + @Override + public void topology(StreamsBuilder streamsBuilder) { + streamsBuilder + .stream("INPUT_TOPIC") + .to("OUTPUT_TOPIC"); + } + + @Override + public String dlqTopic() { + return "DLQ_TOPIC"; + } + } +} diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/services/ProbeServiceTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/services/ProbeServiceTest.java deleted file mode 100644 index 4d6489b3..00000000 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/services/ProbeServiceTest.java +++ /dev/null @@ -1,134 +0,0 @@ -package com.michelin.kstreamplify.services; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.Mockito.when; - -import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; -import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; -import com.michelin.kstreamplify.model.RestServiceResponse; -import java.net.HttpURLConnection; -import java.util.Properties; -import org.apache.kafka.streams.KafkaStreams; -import org.apache.kafka.streams.StreamsBuilder; -import org.junit.jupiter.api.Test; -import org.junit.jupiter.api.extension.ExtendWith; -import org.mockito.Mock; -import org.mockito.junit.jupiter.MockitoExtension; - -@ExtendWith(MockitoExtension.class) -class ProbeServiceTest { - @Mock - private KafkaStreamsInitializer kafkaStreamsInitializer; - - @Mock - private KafkaStreams kafkaStreams; - - @Test - void shouldGetReadinessProbeWithWhenStreamsRunning() { - KafkaStreamsExecutionContext.registerProperties(new Properties()); - - when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); - when(kafkaStreams.state()).thenReturn(KafkaStreams.State.RUNNING); - - RestServiceResponse response = ProbeService.readinessProbe(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_OK, response.getStatus()); - } - - @Test - void shouldGetReadinessProbeWithWhenStreamsNotRunning() { - KafkaStreamsExecutionContext.registerProperties(new Properties()); - - when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); - when(kafkaStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING); - - RestServiceResponse response = ProbeService.readinessProbe(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_UNAVAILABLE, response.getStatus()); - } - - @Test - void shouldGetReadinessProbeWithWhenStreamsNull() { - when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(null); - - RestServiceResponse response = ProbeService.readinessProbe(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_BAD_REQUEST, response.getStatus()); - } - - @Test - void shouldGetLivenessProbeWithWhenStreamsRunning() { - when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); - when(kafkaStreams.state()).thenReturn(KafkaStreams.State.RUNNING); - - RestServiceResponse response = ProbeService.livenessProbe(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_OK, response.getStatus()); - } - - @Test - void shouldGetLivenessProbeWithWhenStreamsNotRunning() { - when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(kafkaStreams); - when(kafkaStreams.state()).thenReturn(KafkaStreams.State.NOT_RUNNING); - - RestServiceResponse response = ProbeService.livenessProbe(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_INTERNAL_ERROR, response.getStatus()); - } - - @Test - void shouldGetLivenessProbeWithWhenStreamsNull() { - when(kafkaStreamsInitializer.getKafkaStreams()).thenReturn(null); - - RestServiceResponse response = ProbeService.livenessProbe(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_NO_CONTENT, response.getStatus()); - } - - @Test - void shouldExposeTopologyWithNonNullTopology() { - StreamsBuilder streamsBuilder = new StreamsBuilder(); - KafkaStreamsStarter starter = new KafkaStreamsStarterImpl(); - starter.topology(streamsBuilder); - - when(kafkaStreamsInitializer.getTopology()).thenReturn(streamsBuilder.build()); - - RestServiceResponse response = ProbeService.exposeTopology(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_OK, response.getStatus()); - assertEquals(""" - Topologies: - Sub-topology: 0 - Source: KSTREAM-SOURCE-0000000000 (topics: [inputTopic]) - --> KSTREAM-SINK-0000000001 - Sink: KSTREAM-SINK-0000000001 (topic: outputTopic) - <-- KSTREAM-SOURCE-0000000000 - - """, response.getBody()); - } - - @Test - void shouldExposeTopologyWithNullTopology() { - when(kafkaStreamsInitializer.getTopology()).thenReturn(null); - - RestServiceResponse response = ProbeService.exposeTopology(kafkaStreamsInitializer); - - assertEquals(HttpURLConnection.HTTP_NO_CONTENT, response.getStatus()); - } - - static class KafkaStreamsStarterImpl extends KafkaStreamsStarter { - @Override - public void topology(StreamsBuilder streamsBuilder) { - streamsBuilder - .stream("inputTopic") - .to("outputTopic"); - } - - @Override - public String dlqTopic() { - return "dlqTopic"; - } - } -} - diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/utils/WindowStateStoreUtilsTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/store/WindowStateStoreUtilsTest.java similarity index 85% rename from kstreamplify-core/src/test/java/com/michelin/kstreamplify/utils/WindowStateStoreUtilsTest.java rename to kstreamplify-core/src/test/java/com/michelin/kstreamplify/store/WindowStateStoreUtilsTest.java index bc7d9bba..68c69a4f 100644 --- a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/utils/WindowStateStoreUtilsTest.java +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/store/WindowStateStoreUtilsTest.java @@ -1,4 +1,4 @@ -package com.michelin.kstreamplify.utils; +package com.michelin.kstreamplify.store; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNull; @@ -25,6 +25,15 @@ class WindowStateStoreUtilsTest { @Mock private WindowStoreIterator iterator; + @Test + void shouldReturnNull() { + when(windowStore.backwardFetch(anyString(), any(), any())) + .thenReturn(null); + + String result = WindowStateStoreUtils.get(windowStore, "testKey", 1); + assertNull(result); + } + @Test void shouldPutAndGetFromWindowStore() { String value = "testValue"; @@ -39,13 +48,11 @@ void shouldPutAndGetFromWindowStore() { when(windowStore.backwardFetch(anyString(), any(), any())) .thenReturn(iterator); - // Call the put method String key = "testKey"; WindowStateStoreUtils.put(windowStore, key, value); String result = WindowStateStoreUtils.get(windowStore, key, 1); String nullResult = WindowStateStoreUtils.get(windowStore, "nothing", 1); - // Verify that the put method of the windowStore is called with the correct arguments assertEquals("testValue", result); assertNull(nullResult); verify(windowStore).put(eq(key), eq(value), anyLong()); diff --git a/kstreamplify-core/src/test/java/com/michelin/kstreamplify/topic/TopicUtilsTest.java b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/topic/TopicUtilsTest.java new file mode 100644 index 00000000..f1c39622 --- /dev/null +++ b/kstreamplify-core/src/test/java/com/michelin/kstreamplify/topic/TopicUtilsTest.java @@ -0,0 +1,35 @@ +package com.michelin.kstreamplify.topic; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; +import java.util.Properties; +import org.junit.jupiter.api.Test; + +class TopicUtilsTest { + + @Test + void shouldRemapTopic() { + Properties properties = new Properties(); + properties.put("topic.remap.myTopic", "myRemappedTopic"); + + KafkaStreamsExecutionContext.setProperties(properties); + + String remappedTopic = TopicUtils.remapAndPrefix("myTopic", ""); + + assertEquals("myRemappedTopic", remappedTopic); + } + + @Test + void shouldRemapAndPrefixTopic() { + Properties properties = new Properties(); + properties.put("topic.remap.myTopic", "myRemappedTopic"); + properties.put("prefix.myNamespace", "myNamespacePrefix."); + + KafkaStreamsExecutionContext.setProperties(properties); + + String remappedTopic = TopicUtils.remapAndPrefix("myTopic", "myNamespace"); + + assertEquals("myNamespacePrefix.myRemappedTopic", remappedTopic); + } +} diff --git a/kstreamplify-core/src/test/resources/application.yml b/kstreamplify-core/src/test/resources/application.yml index 508dbd3f..06a8d152 100644 --- a/kstreamplify-core/src/test/resources/application.yml +++ b/kstreamplify-core/src/test/resources/application.yml @@ -4,4 +4,6 @@ kafka: properties: application.id: appId default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde - default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde \ No newline at end of file + default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde + state.dir: /tmp/kstreamplify/kstreamplify-core-test/initializer + auto.offset.reset: earliest \ No newline at end of file diff --git a/kstreamplify-spring-boot/pom.xml b/kstreamplify-spring-boot/pom.xml index bce4f299..40f95bef 100644 --- a/kstreamplify-spring-boot/pom.xml +++ b/kstreamplify-spring-boot/pom.xml @@ -68,4 +68,13 @@ 1.0.3-SNAPSHOT + + + + + org.apache.avro + avro-maven-plugin + + + diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/config/BeanConfig.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/config/BeanConfig.java new file mode 100644 index 00000000..9ed27de3 --- /dev/null +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/config/BeanConfig.java @@ -0,0 +1,50 @@ +package com.michelin.kstreamplify.config; + +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import com.michelin.kstreamplify.initializer.SpringBootKafkaStreamsInitializer; +import com.michelin.kstreamplify.service.InteractiveQueriesService; +import com.michelin.kstreamplify.service.KubernetesService; +import com.michelin.kstreamplify.service.TopologyService; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; + +/** + * Bean configuration. + */ +@Configuration +@ConditionalOnBean(KafkaStreamsStarter.class) +public class BeanConfig { + /** + * Register the Kubernetes service as a bean. + * + * @param initializer The Kafka Streams initializer + * @return The Kubernetes service + */ + @Bean + KubernetesService kubernetesService(SpringBootKafkaStreamsInitializer initializer) { + return new KubernetesService(initializer); + } + + /** + * Register the Topology service as a bean. + * + * @param initializer The Kafka Streams initializer + * @return The Topology service + */ + @Bean + TopologyService topologyService(SpringBootKafkaStreamsInitializer initializer) { + return new TopologyService(initializer); + } + + /** + * Register the Store service as a bean. + * + * @param initializer The Kafka Streams initializer + * @return The Store service + */ + @Bean + InteractiveQueriesService interactiveQueriesService(SpringBootKafkaStreamsInitializer initializer) { + return new InteractiveQueriesService(initializer); + } +} diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/ControllerExceptionHandler.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/ControllerExceptionHandler.java new file mode 100644 index 00000000..ebfe6867 --- /dev/null +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/ControllerExceptionHandler.java @@ -0,0 +1,74 @@ +package com.michelin.kstreamplify.controller; + +import com.michelin.kstreamplify.exception.UnknownKeyException; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.streams.errors.StreamsNotStartedException; +import org.apache.kafka.streams.errors.UnknownStateStoreException; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.ExceptionHandler; +import org.springframework.web.bind.annotation.RestControllerAdvice; + +/** + * Controller exception handler. + */ +@Slf4j +@RestControllerAdvice +public class ControllerExceptionHandler { + + /** + * Handle the unknown state store exception. + * + * @param e The exception + * @return The response entity + */ + @ExceptionHandler(UnknownStateStoreException.class) + public ResponseEntity handleUnknownStateStoreException(UnknownStateStoreException e) { + log.error(e.getMessage(), e); + return ResponseEntity + .status(HttpStatus.NOT_FOUND) + .body(e.getMessage()); + } + + /** + * Handle the stream not started exception. + * + * @param e The exception + * @return The response entity + */ + @ExceptionHandler(StreamsNotStartedException.class) + public ResponseEntity handleStreamsNotStartedException(StreamsNotStartedException e) { + log.error(e.getMessage(), e); + return ResponseEntity + .status(HttpStatus.SERVICE_UNAVAILABLE) + .body(e.getMessage()); + } + + /** + * Handle the unknown key exception. + * + * @param e The exception + * @return The response entity + */ + @ExceptionHandler(UnknownKeyException.class) + public ResponseEntity handleUnknownKeyException(UnknownKeyException e) { + log.error(e.getMessage(), e); + return ResponseEntity + .status(HttpStatus.NOT_FOUND) + .body(e.getMessage()); + } + + /** + * Handle the illegal argument exception. + * + * @param e The exception + * @return The response entity + */ + @ExceptionHandler(IllegalArgumentException.class) + public ResponseEntity handleIllegalArgumentException(IllegalArgumentException e) { + log.error(e.getMessage(), e); + return ResponseEntity + .status(HttpStatus.BAD_REQUEST) + .body(e.getMessage()); + } +} diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/InteractiveQueriesController.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/InteractiveQueriesController.java new file mode 100644 index 00000000..eef1b2f7 --- /dev/null +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/InteractiveQueriesController.java @@ -0,0 +1,109 @@ +package com.michelin.kstreamplify.controller; + +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import com.michelin.kstreamplify.service.InteractiveQueriesService; +import com.michelin.kstreamplify.store.StateStoreRecord; +import com.michelin.kstreamplify.store.StreamsMetadata; +import java.util.List; +import java.util.Set; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.PathVariable; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Kafka Streams controller for store. + */ +@RestController +@RequestMapping("/store") +@ConditionalOnBean(KafkaStreamsStarter.class) +public class InteractiveQueriesController { + + /** + * The store service. + */ + @Autowired + private InteractiveQueriesService interactiveQueriesService; + + /** + * Get the stores. + * + * @return The stores + */ + @GetMapping + public ResponseEntity> getStores() { + return ResponseEntity + .ok() + .contentType(MediaType.APPLICATION_JSON) + .body(interactiveQueriesService.getStateStores()); + } + + /** + * Get the hosts of the store. + * + * @param store The store + * @return The hosts + */ + @GetMapping(value = "/metadata/{store}") + public ResponseEntity> getStreamsMetadataForStore(@PathVariable("store") final String store) { + return ResponseEntity + .ok() + .contentType(MediaType.APPLICATION_JSON) + .body(interactiveQueriesService.getStreamsMetadataForStore(store) + .stream() + .map(streamsMetadata -> new StreamsMetadata( + streamsMetadata.stateStoreNames(), + streamsMetadata.hostInfo(), + streamsMetadata.topicPartitions())) + .toList() + ); + } + + /** + * Get all records from the store. + * + * @param store The store + * @return The values + */ + @GetMapping(value = "/{store}") + public ResponseEntity> getAll(@PathVariable("store") String store) { + return ResponseEntity + .ok() + .contentType(MediaType.APPLICATION_JSON) + .body(interactiveQueriesService.getAll(store)); + } + + /** + * Get all records from the store on the local host. + * + * @param store The store + * @return The values + */ + @GetMapping(value = "/local/{store}") + public ResponseEntity> getAllOnLocalhost(@PathVariable("store") String store) { + return ResponseEntity + .ok() + .contentType(MediaType.APPLICATION_JSON) + .body(interactiveQueriesService.getAllOnLocalhost(store)); + } + + /** + * Get the record by key from the store. + * + * @param store The store + * @param key The key + * @return The value + */ + @GetMapping("/{store}/{key}") + public ResponseEntity getByKey(@PathVariable("store") String store, + @PathVariable("key") String key) { + return ResponseEntity + .ok() + .contentType(MediaType.APPLICATION_JSON) + .body(interactiveQueriesService.getByKey(store, key)); + } +} diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/KubernetesController.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/KubernetesController.java new file mode 100644 index 00000000..77ff9090 --- /dev/null +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/KubernetesController.java @@ -0,0 +1,48 @@ +package com.michelin.kstreamplify.controller; + +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import com.michelin.kstreamplify.service.KubernetesService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Kafka Streams controller for Kubernetes. + */ +@RestController +@ConditionalOnBean(KafkaStreamsStarter.class) +public class KubernetesController { + /** + * The Kubernetes service. + */ + @Autowired + private KubernetesService kubernetesService; + + /** + * Readiness Kubernetes probe endpoint. + * + * @return An HTTP response based on the Kafka Streams state + */ + @GetMapping("/${kubernetes.readiness.path:ready}") + public ResponseEntity readiness() { + int readinessStatus = kubernetesService.getReadiness(); + return ResponseEntity + .status(readinessStatus) + .build(); + } + + /** + * Liveness Kubernetes probe endpoint. + * + * @return An HTTP response based on the Kafka Streams state + */ + @GetMapping("/${kubernetes.liveness.path:liveness}") + public ResponseEntity liveness() { + int livenessStatus = kubernetesService.getLiveness(); + return ResponseEntity + .status(livenessStatus) + .build(); + } +} diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/TopologyController.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/TopologyController.java new file mode 100644 index 00000000..479bf7d2 --- /dev/null +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/controller/TopologyController.java @@ -0,0 +1,36 @@ +package com.michelin.kstreamplify.controller; + +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import com.michelin.kstreamplify.service.TopologyService; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; +import org.springframework.http.MediaType; +import org.springframework.http.ResponseEntity; +import org.springframework.web.bind.annotation.GetMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * Kafka Streams controller for topology. + */ +@RestController +@ConditionalOnBean(KafkaStreamsStarter.class) +public class TopologyController { + /** + * The topology service. + */ + @Autowired + private TopologyService topologyService; + + /** + * Get the Kafka Streams topology. + * + * @return The Kafka Streams topology + */ + @GetMapping("/${topology.path:topology}") + public ResponseEntity topology() { + return ResponseEntity + .ok() + .contentType(MediaType.TEXT_PLAIN) + .body(topologyService.getTopology()); + } +} diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializer.java similarity index 94% rename from kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java rename to kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializer.java index 0cf5050c..8c7d9124 100644 --- a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializer.java +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializer.java @@ -1,7 +1,7 @@ package com.michelin.kstreamplify.initializer; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.properties.KafkaProperties; +import com.michelin.kstreamplify.property.KafkaProperties; import io.micrometer.core.instrument.MeterRegistry; import io.micrometer.core.instrument.binder.kafka.KafkaStreamsMetrics; import lombok.extern.slf4j.Slf4j; @@ -21,7 +21,7 @@ @Slf4j @Component @ConditionalOnBean(KafkaStreamsStarter.class) -public class SpringKafkaStreamsInitializer extends KafkaStreamsInitializer implements ApplicationRunner { +public class SpringBootKafkaStreamsInitializer extends KafkaStreamsInitializer implements ApplicationRunner { /** * The application context. */ @@ -66,7 +66,7 @@ public void run(ApplicationArguments args) { * {@inheritDoc} */ @Override - protected void initHttpServer() { + protected void startHttpServer() { // Nothing to do here as the server is already started by Spring Boot } diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/property/KafkaProperties.java similarity index 94% rename from kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java rename to kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/property/KafkaProperties.java index 79323374..5e9cd24f 100644 --- a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/properties/KafkaProperties.java +++ b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/property/KafkaProperties.java @@ -1,4 +1,4 @@ -package com.michelin.kstreamplify.properties; +package com.michelin.kstreamplify.property; import java.util.HashMap; import java.util.Map; diff --git a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java b/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java deleted file mode 100644 index 4bdd79d2..00000000 --- a/kstreamplify-spring-boot/src/main/java/com/michelin/kstreamplify/rest/SpringProbeController.java +++ /dev/null @@ -1,65 +0,0 @@ -package com.michelin.kstreamplify.rest; - -import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; -import com.michelin.kstreamplify.initializer.SpringKafkaStreamsInitializer; -import com.michelin.kstreamplify.model.RestServiceResponse; -import com.michelin.kstreamplify.services.ProbeService; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; -import org.springframework.http.ResponseEntity; -import org.springframework.web.bind.annotation.GetMapping; -import org.springframework.web.bind.annotation.RestController; - -/** - * Spring Boot probe controller. - */ -@RestController -@ConditionalOnBean(KafkaStreamsStarter.class) -public class SpringProbeController { - /** - * The Kafka Streams initializer. - */ - @Autowired - private SpringKafkaStreamsInitializer kafkaStreamsInitializer; - - /** - * Readiness Kubernetes probe endpoint. - * - * @return An HTTP response based on the Kafka Streams state - */ - @GetMapping("/${readiness_path:ready}") - public ResponseEntity readinessProbe() { - return convertToResponseEntity(ProbeService.readinessProbe(kafkaStreamsInitializer)); - } - - /** - * Liveness Kubernetes probe endpoint. - * - * @return An HTTP response based on the Kafka Streams state - */ - @GetMapping("/${liveness_path:liveness}") - public ResponseEntity livenessProbe() { - return convertToResponseEntity(ProbeService.livenessProbe(kafkaStreamsInitializer)); - } - - /** - * Get the Kafka Streams topology. - * - * @return The Kafka Streams topology - */ - @GetMapping("/${expose_topology_path:topology}") - public ResponseEntity exposeTopology() { - return convertToResponseEntity(ProbeService.exposeTopology(kafkaStreamsInitializer)); - } - - /** - * Convert the probe service response into an HTTP response entity. - * - * @param serviceResponse The probe service response - * @return An HTTP response - */ - private static ResponseEntity convertToResponseEntity( - RestServiceResponse serviceResponse) { - return ResponseEntity.status(serviceResponse.getStatus()).body(serviceResponse.getBody()); - } -} diff --git a/kstreamplify-spring-boot/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports b/kstreamplify-spring-boot/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports index 0b52d9c9..258a5a2f 100644 --- a/kstreamplify-spring-boot/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports +++ b/kstreamplify-spring-boot/src/main/resources/META-INF/spring/org.springframework.boot.autoconfigure.AutoConfiguration.imports @@ -1,4 +1,8 @@ -com.michelin.kstreamplify.initializer.SpringKafkaStreamsInitializer -com.michelin.kstreamplify.rest.SpringProbeController -com.michelin.kstreamplify.properties.KafkaProperties -com.michelin.kstreamplify.opentelemetry.OpenTelemetryConfig \ No newline at end of file +com.michelin.kstreamplify.initializer.SpringBootKafkaStreamsInitializer +com.michelin.kstreamplify.controller.KubernetesController +com.michelin.kstreamplify.controller.TopologyController +com.michelin.kstreamplify.controller.InteractiveQueriesController +com.michelin.kstreamplify.controller.ControllerExceptionHandler +com.michelin.kstreamplify.property.KafkaProperties +com.michelin.kstreamplify.opentelemetry.OpenTelemetryConfig +com.michelin.kstreamplify.config.BeanConfig \ No newline at end of file diff --git a/kstreamplify-spring-boot/src/test/avro/kafka-person.avsc b/kstreamplify-spring-boot/src/test/avro/kafka-person.avsc new file mode 100644 index 00000000..72ba8866 --- /dev/null +++ b/kstreamplify-spring-boot/src/test/avro/kafka-person.avsc @@ -0,0 +1,37 @@ +{ + "namespace": "com.michelin.kstreamplify.avro", + "type": "record", + "name": "KafkaPersonStub", + "fields": [ + { + "name": "id", + "type": ["null", "long"], + "default": null, + "doc": "Person id" + }, + { + "name": "firstName", + "type": ["null", "string"], + "default": null, + "doc": "Person first name" + }, + { + "name": "lastName", + "type": ["null", "string"], + "default": null, + "doc": "Person last name" + }, + { + "name": "birthDate", + "type": [ + "null", + { + "type": "long", + "logicalType": "timestamp-millis" + } + ], + "default": null, + "doc": "Person date of birth" + } + ] +} \ No newline at end of file diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/ControllerExceptionHandlerTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/ControllerExceptionHandlerTest.java new file mode 100644 index 00000000..f0fe697b --- /dev/null +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/ControllerExceptionHandlerTest.java @@ -0,0 +1,32 @@ +package com.michelin.kstreamplify.controller; + +import static org.junit.jupiter.api.Assertions.assertEquals; + +import org.apache.kafka.streams.errors.StreamsNotStartedException; +import org.apache.kafka.streams.errors.UnknownStateStoreException; +import org.junit.jupiter.api.Test; +import org.springframework.http.ResponseEntity; + +class ControllerExceptionHandlerTest { + private final ControllerExceptionHandler controllerExceptionHandler = new ControllerExceptionHandler(); + + @Test + void shouldHandleUnknownStateStoreException() { + UnknownStateStoreException e = new UnknownStateStoreException("message"); + + ResponseEntity response = controllerExceptionHandler.handleUnknownStateStoreException(e); + + assertEquals("message", response.getBody()); + assertEquals(404, response.getStatusCode().value()); + } + + @Test + void shouldHandleStreamsNotStartedException() { + StreamsNotStartedException e = new StreamsNotStartedException("message"); + + ResponseEntity response = controllerExceptionHandler.handleStreamsNotStartedException(e); + + assertEquals("message", response.getBody()); + assertEquals(503, response.getStatusCode().value()); + } +} diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/InteractiveQueriesControllerTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/InteractiveQueriesControllerTest.java new file mode 100644 index 00000000..79cd0de7 --- /dev/null +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/InteractiveQueriesControllerTest.java @@ -0,0 +1,103 @@ +package com.michelin.kstreamplify.controller; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.mockito.Mockito.when; + +import com.michelin.kstreamplify.service.InteractiveQueriesService; +import com.michelin.kstreamplify.store.StateStoreRecord; +import java.util.List; +import java.util.Set; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.streams.StreamsMetadata; +import org.apache.kafka.streams.state.HostInfo; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +class InteractiveQueriesControllerTest { + @Mock + private StreamsMetadata streamsMetadata; + + @Mock + private InteractiveQueriesService interactiveQueriesService; + + @InjectMocks + private InteractiveQueriesController interactiveQueriesController; + + @Test + void shouldGetStores() { + when(interactiveQueriesService.getStateStores()) + .thenReturn(Set.of("store1", "store2")); + + assertEquals(Set.of("store1", "store2"), interactiveQueriesController.getStores().getBody()); + } + + @Test + void shouldGetHostsForStore() { + when(streamsMetadata.stateStoreNames()) + .thenReturn(Set.of("store")); + + when(streamsMetadata.hostInfo()) + .thenReturn(new HostInfo("host1", 1234)); + + when(streamsMetadata.topicPartitions()) + .thenReturn(Set.of(new TopicPartition("topic", 0))); + + when(interactiveQueriesService.getStreamsMetadataForStore("store")) + .thenReturn(List.of(streamsMetadata)); + + List response = + interactiveQueriesController.getStreamsMetadataForStore("store").getBody(); + + assertNotNull(response); + assertEquals(streamsMetadata.stateStoreNames(), response.get(0).getStateStoreNames()); + assertEquals(streamsMetadata.hostInfo().host(), response.get(0).getHostInfo().host()); + assertEquals(streamsMetadata.hostInfo().port(), response.get(0).getHostInfo().port()); + assertTrue(response.get(0).getTopicPartitions().contains("topic-0")); + } + + @Test + void shouldGetAll() { + when(interactiveQueriesService.getAll("store")) + .thenReturn(List.of(new StateStoreRecord("key1", "value1", 1L))); + + List responses = interactiveQueriesController.getAll("store").getBody(); + + assertNotNull(responses); + assertEquals("key1", responses.get(0).getKey()); + assertEquals("value1", responses.get(0).getValue()); + assertEquals(1L, responses.get(0).getTimestamp()); + } + + @Test + void shouldGetAllOnLocalhost() { + when(interactiveQueriesService.getAllOnLocalhost("store")) + .thenReturn(List.of(new StateStoreRecord("key1", "value1", 1L))); + + List responses = interactiveQueriesController.getAllOnLocalhost("store").getBody(); + + assertNotNull(responses); + assertEquals("key1", responses.get(0).getKey()); + assertEquals("value1", responses.get(0).getValue()); + assertEquals(1L, responses.get(0).getTimestamp()); + } + + @Test + void shouldGetByKey() { + when(interactiveQueriesService.getByKey("store", "key")) + .thenReturn(new StateStoreRecord("key1", "value1", 1L)); + + StateStoreRecord response = interactiveQueriesController + .getByKey("store", "key").getBody(); + + assertNotNull(response); + assertEquals("key1", response.getKey()); + assertEquals("value1", response.getValue()); + assertEquals(1L, response.getTimestamp()); + } +} diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/KubernetesControllerTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/KubernetesControllerTest.java new file mode 100644 index 00000000..01211e40 --- /dev/null +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/KubernetesControllerTest.java @@ -0,0 +1,42 @@ +package com.michelin.kstreamplify.controller; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import com.michelin.kstreamplify.service.KubernetesService; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; + +@ExtendWith(MockitoExtension.class) +class KubernetesControllerTest { + @Mock + private KubernetesService kubernetesService; + + @InjectMocks + private KubernetesController kubernetesController; + + @Test + void shouldGetReadinessProbe() { + when(kubernetesService.getReadiness()) + .thenReturn(200); + + ResponseEntity response = kubernetesController.readiness(); + + assertEquals(HttpStatus.OK, response.getStatusCode()); + } + + @Test + void shouldGetLivenessProbe() { + when(kubernetesService.getLiveness()) + .thenReturn(200); + + ResponseEntity response = kubernetesController.liveness(); + + assertEquals(HttpStatus.OK, response.getStatusCode()); + } +} diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/TopologyControllerTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/TopologyControllerTest.java new file mode 100644 index 00000000..e3683366 --- /dev/null +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/controller/TopologyControllerTest.java @@ -0,0 +1,33 @@ +package com.michelin.kstreamplify.controller; + +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.mockito.Mockito.when; + +import com.michelin.kstreamplify.service.TopologyService; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.InjectMocks; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.springframework.http.HttpStatus; +import org.springframework.http.ResponseEntity; + +@ExtendWith(MockitoExtension.class) +class TopologyControllerTest { + @Mock + private TopologyService topologyService; + + @InjectMocks + private TopologyController topologyController; + + @Test + void shouldGetTopology() { + when(topologyService.getTopology()) + .thenReturn("Topology"); + + ResponseEntity response = topologyController.topology(); + + assertEquals(HttpStatus.OK, response.getStatusCode()); + assertEquals("Topology", response.getBody()); + } +} diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializerTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializerTest.java similarity index 92% rename from kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializerTest.java rename to kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializerTest.java index 87f3af16..442a4259 100644 --- a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/SpringKafkaStreamsInitializerTest.java +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/initializer/SpringBootKafkaStreamsInitializerTest.java @@ -7,7 +7,7 @@ import static org.mockito.Mockito.when; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.properties.KafkaProperties; +import com.michelin.kstreamplify.property.KafkaProperties; import java.util.Properties; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsConfig; @@ -20,7 +20,7 @@ import org.springframework.context.ConfigurableApplicationContext; @ExtendWith(MockitoExtension.class) -class SpringKafkaStreamsInitializerTest { +class SpringBootKafkaStreamsInitializerTest { @Mock private ConfigurableApplicationContext applicationContext; @@ -31,7 +31,7 @@ class SpringKafkaStreamsInitializerTest { private KafkaProperties kafkaProperties; @InjectMocks - private SpringKafkaStreamsInitializer initializer; + private SpringBootKafkaStreamsInitializer initializer; @Test void shouldInitProperties() { @@ -62,7 +62,7 @@ void shouldCloseSpringBootContextOnUncaughtException() { initializer.initProperties(); StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse response = initializer - .onStreamsUncaughtException(new RuntimeException("Test Exception")); + .onStreamsUncaughtException(new RuntimeException("Unexpected test exception")); assertEquals(StreamsUncaughtExceptionHandler.StreamThreadExceptionResponse.SHUTDOWN_CLIENT, response); verify(applicationContext).close(); diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java new file mode 100644 index 00000000..af599821 --- /dev/null +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/InteractiveQueriesIntegrationTest.java @@ -0,0 +1,454 @@ +package com.michelin.kstreamplify.integration; + +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.clients.producer.ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG; +import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; +import static org.junit.jupiter.api.Assertions.assertEquals; +import static org.junit.jupiter.api.Assertions.assertNotNull; +import static org.junit.jupiter.api.Assertions.assertNull; +import static org.junit.jupiter.api.Assertions.assertTrue; +import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.DEFINED_PORT; +import static org.springframework.http.HttpMethod.GET; + +import com.michelin.kstreamplify.avro.KafkaPersonStub; +import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; +import com.michelin.kstreamplify.serde.SerdesUtils; +import com.michelin.kstreamplify.service.InteractiveQueriesService; +import com.michelin.kstreamplify.store.StateStoreRecord; +import com.michelin.kstreamplify.store.StreamsMetadata; +import io.confluent.kafka.serializers.KafkaAvroSerializer; +import java.time.Duration; +import java.time.Instant; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.ExecutionException; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.producer.KafkaProducer; +import org.apache.kafka.clients.producer.ProducerRecord; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.common.serialization.Serdes; +import org.apache.kafka.common.serialization.StringSerializer; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.StreamsBuilder; +import org.apache.kafka.streams.kstream.Consumed; +import org.apache.kafka.streams.processor.api.Processor; +import org.apache.kafka.streams.processor.api.ProcessorContext; +import org.apache.kafka.streams.processor.api.ProcessorSupplier; +import org.apache.kafka.streams.processor.api.Record; +import org.apache.kafka.streams.state.KeyValueStore; +import org.apache.kafka.streams.state.StoreBuilder; +import org.apache.kafka.streams.state.Stores; +import org.apache.kafka.streams.state.TimestampedKeyValueStore; +import org.apache.kafka.streams.state.TimestampedWindowStore; +import org.apache.kafka.streams.state.ValueAndTimestamp; +import org.apache.kafka.streams.state.WindowStore; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.params.ParameterizedTest; +import org.junit.jupiter.params.provider.CsvSource; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.SpringApplication; +import org.springframework.boot.autoconfigure.SpringBootApplication; +import org.springframework.boot.test.context.SpringBootTest; +import org.springframework.core.ParameterizedTypeReference; +import org.springframework.http.ResponseEntity; +import org.springframework.test.context.ActiveProfiles; +import org.testcontainers.junit.jupiter.Testcontainers; + +@Slf4j +@Testcontainers +@ActiveProfiles("interactive-queries") +@SpringBootTest(webEnvironment = DEFINED_PORT) +class InteractiveQueriesIntegrationTest extends KafkaIntegrationTest { + @Autowired + private InteractiveQueriesService interactiveQueriesService; + + @BeforeAll + static void globalSetUp() throws ExecutionException, InterruptedException { + createTopics( + broker.getBootstrapServers(), + new TopicPartition("STRING_TOPIC", 3), + new TopicPartition("AVRO_TOPIC", 2) + ); + + try (KafkaProducer stringKafkaProducer = new KafkaProducer<>( + Map.of(BOOTSTRAP_SERVERS_CONFIG, broker.getBootstrapServers(), + KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName(), + VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName()))) { + + ProducerRecord message = new ProducerRecord<>( + "STRING_TOPIC", "person", "Doe"); + + stringKafkaProducer + .send(message) + .get(); + } + + try (KafkaProducer avroKafkaProducer = new KafkaProducer<>( + Map.of(BOOTSTRAP_SERVERS_CONFIG, broker.getBootstrapServers(), + KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName(), + VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName(), + SCHEMA_REGISTRY_URL_CONFIG, "http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()))) { + + KafkaPersonStub kafkaPersonStub = KafkaPersonStub.newBuilder() + .setId(1L) + .setFirstName("John") + .setLastName("Doe") + .setBirthDate(Instant.parse("2000-01-01T01:00:00Z")) + .build(); + + ProducerRecord message = new ProducerRecord<>( + "AVRO_TOPIC", "person", kafkaPersonStub); + + avroKafkaProducer + .send(message) + .get(); + } + } + + @BeforeEach + void setUp() throws InterruptedException { + waitingForKafkaStreamsToStart(); + waitingForLocalStoreToReachOffset(Map.of( + "STRING_STRING_STORE", Map.of(1, 1L), + "STRING_AVRO_STORE", Map.of(0, 1L), + "STRING_AVRO_TIMESTAMPED_STORE", Map.of(0, 1L), + "STRING_AVRO_WINDOW_STORE", Map.of(0, 1L), + "STRING_AVRO_TIMESTAMPED_WINDOW_STORE", Map.of(0, 1L) + )); + } + + @Test + void shouldGetStoresAndStoreMetadata() { + // Get stores + ResponseEntity> stores = restTemplate + .exchange("http://localhost:8085/store", GET, null, new ParameterizedTypeReference<>() { + }); + + assertEquals(200, stores.getStatusCode().value()); + assertNotNull(stores.getBody()); + assertTrue(stores.getBody().containsAll(List.of( + "STRING_STRING_STORE", + "STRING_AVRO_STORE", + "STRING_AVRO_TIMESTAMPED_STORE", + "STRING_AVRO_WINDOW_STORE", + "STRING_AVRO_TIMESTAMPED_WINDOW_STORE" + ))); + + // Get hosts + ResponseEntity> streamsMetadata = restTemplate + .exchange("http://localhost:8085/store/metadata/STRING_STRING_STORE", GET, null, new ParameterizedTypeReference<>() { + }); + + assertEquals(200, streamsMetadata.getStatusCode().value()); + assertNotNull(streamsMetadata.getBody()); + assertEquals(Set.of( + "STRING_STRING_STORE", + "STRING_AVRO_STORE", + "STRING_AVRO_TIMESTAMPED_STORE", + "STRING_AVRO_WINDOW_STORE", + "STRING_AVRO_TIMESTAMPED_WINDOW_STORE"), streamsMetadata.getBody().get(0).getStateStoreNames()); + assertEquals("localhost", streamsMetadata.getBody().get(0).getHostInfo().host()); + assertEquals(8085, streamsMetadata.getBody().get(0).getHostInfo().port()); + assertEquals(Set.of( + "AVRO_TOPIC-0", + "AVRO_TOPIC-1", + "STRING_TOPIC-0", + "STRING_TOPIC-1", + "STRING_TOPIC-2"), + streamsMetadata.getBody().get(0).getTopicPartitions()); + } + + @ParameterizedTest + @CsvSource({ + "http://localhost:8085/store/WRONG_STORE/person,State store WRONG_STORE not found", + "http://localhost:8085/store/STRING_STRING_STORE/wrongKey,Key wrongKey not found", + "http://localhost:8085/store/WRONG_STORE,State store WRONG_STORE not found", + }) + void shouldGetErrorWhenWrongKeyOrStore(String url, String message) { + ResponseEntity response = restTemplate + .getForEntity(url, String.class); + + assertEquals(404, response.getStatusCode().value()); + assertEquals(message, response.getBody()); + } + + @Test + void shouldGetByKeyWrongStoreType() { + ResponseEntity response = restTemplate + .getForEntity("http://localhost:8085/store/STRING_AVRO_WINDOW_STORE/person", String.class); + + assertEquals(400, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertTrue(response.getBody().contains("Cannot get result for failed query.")); + } + + @Test + void shouldGetByKeyInStringStringKeyValueStore() { + ResponseEntity response = restTemplate + .getForEntity("http://localhost:8085/store/STRING_STRING_STORE/person", StateStoreRecord.class); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().getKey()); + assertEquals("Doe", response.getBody().getValue()); + assertNull(response.getBody().getTimestamp()); + } + + @Test + void shouldGetByKeyInStringAvroKeyValueStore() { + ResponseEntity response = restTemplate + .getForEntity("http://localhost:8085/store/STRING_AVRO_STORE/person", + StateStoreRecord.class); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().getKey()); + assertEquals(1, ((HashMap) response.getBody().getValue()).get("id")); + assertEquals("John", ((HashMap) response.getBody().getValue()).get("firstName")); + assertEquals("Doe", ((HashMap) response.getBody().getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((HashMap) response.getBody().getValue()).get("birthDate")); + assertNull(response.getBody().getTimestamp()); + } + + @Test + void shouldGetByKeyInStringAvroKeyValueStoreFromInteractiveQueriesService() { + StateStoreRecord stateStoreRecord = interactiveQueriesService.getByKey("STRING_AVRO_STORE", "person"); + + assertEquals("person", stateStoreRecord.getKey()); + assertEquals(1L, ((Map) stateStoreRecord.getValue()).get("id")); + assertEquals("John", ((Map) stateStoreRecord.getValue()).get("firstName")); + assertEquals("Doe", ((Map) stateStoreRecord.getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) stateStoreRecord.getValue()).get("birthDate")); + assertNull(stateStoreRecord.getTimestamp()); + } + + @Test + void shouldGetByKeyInStringAvroTimestampedKeyValueStore() { + ResponseEntity response = restTemplate + .getForEntity("http://localhost:8085/store/STRING_AVRO_TIMESTAMPED_STORE/person", + StateStoreRecord.class); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().getKey()); + assertEquals(1, ((HashMap) response.getBody().getValue()).get("id")); + assertEquals("John", ((HashMap) response.getBody().getValue()).get("firstName")); + assertEquals("Doe", ((HashMap) response.getBody().getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((HashMap) response.getBody().getValue()).get("birthDate")); + assertNotNull(response.getBody().getTimestamp()); + } + + @Test + void shouldGetAllInStringStringKeyValueStore() { + ResponseEntity> response = restTemplate + .exchange("http://localhost:8085/store/STRING_STRING_STORE", GET, null, new ParameterizedTypeReference<>() { + }); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().get(0).getKey()); + assertEquals("Doe", response.getBody().get(0).getValue()); + assertNull(response.getBody().get(0).getTimestamp()); + } + + @Test + void shouldGetAllInStringAvroKeyValueStore() { + ResponseEntity> response = restTemplate + .exchange("http://localhost:8085/store/STRING_AVRO_STORE", GET, null, new ParameterizedTypeReference<>() { + }); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().get(0).getKey()); + assertEquals(1, ((Map) response.getBody().get(0).getValue()).get("id")); + assertEquals("John", ((Map) response.getBody().get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) response.getBody().get(0).getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) response.getBody().get(0).getValue()).get("birthDate")); + assertNull(response.getBody().get(0).getTimestamp()); + } + + @Test + void shouldGetAllInStringAvroKeyValueStoreFromInteractiveQueriesService() { + List stateQueryData = interactiveQueriesService.getAll("STRING_AVRO_STORE"); + + assertEquals("person", stateQueryData.get(0).getKey()); + assertEquals(1L, ((Map) stateQueryData.get(0).getValue()).get("id")); + assertEquals("John", ((Map) stateQueryData.get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) stateQueryData.get(0).getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) stateQueryData.get(0).getValue()).get("birthDate")); + assertNull(stateQueryData.get(0).getTimestamp()); + } + + @Test + void shouldGetAllInStringAvroTimestampedKeyValueStore() { + ResponseEntity> response = restTemplate + .exchange("http://localhost:8085/store/STRING_AVRO_TIMESTAMPED_STORE", GET, null, new ParameterizedTypeReference<>() { + }); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().get(0).getKey()); + assertEquals(1, ((Map) response.getBody().get(0).getValue()).get("id")); + assertEquals("John", ((Map) response.getBody().get(0).getValue()).get("firstName")); + assertEquals("Doe", ((Map) response.getBody().get(0).getValue()).get("lastName")); + assertEquals("2000-01-01T01:00:00Z", ((Map) response.getBody().get(0).getValue()).get("birthDate")); + assertNotNull(response.getBody().get(0).getTimestamp()); + } + + @Test + void shouldGetAllOnLocalhostInStringStringKeyValueStore() { + ResponseEntity> response = restTemplate + .exchange("http://localhost:8085/store/local/STRING_STRING_STORE", GET, null, new ParameterizedTypeReference<>() { + }); + + assertEquals(200, response.getStatusCode().value()); + assertNotNull(response.getBody()); + assertEquals("person", response.getBody().get(0).getKey()); + assertEquals("Doe", response.getBody().get(0).getValue()); + assertNull(response.getBody().get(0).getTimestamp()); + } + + /** + * Kafka Streams starter implementation for integration tests. + * The topology consumes events from multiple topics and stores them in dedicated stores + * so that they can be queried. + */ + @Slf4j + @SpringBootApplication + static class KafkaStreamsStarterStub extends KafkaStreamsStarter { + public static void main(String[] args) { + SpringApplication.run(KafkaStreamsStarterStub.class, args); + } + + @Override + public void topology(StreamsBuilder streamsBuilder) { + streamsBuilder + .stream("STRING_TOPIC", Consumed.with(Serdes.String(), Serdes.String())) + .process(new ProcessorSupplier() { + @Override + public Set> stores() { + // key-value store + StoreBuilder> stringStringKeyValueStoreBuilder = Stores + .keyValueStoreBuilder( + Stores.persistentKeyValueStore("STRING_STRING_STORE"), + Serdes.String(), Serdes.String()); + + return Set.of( + stringStringKeyValueStoreBuilder + ); + } + + @Override + public Processor get() { + return new Processor<>() { + private KeyValueStore stringStringKeyValueStore; + + @Override + public void init(ProcessorContext context) { + this.stringStringKeyValueStore = context.getStateStore("STRING_STRING_STORE"); + } + + @Override + public void process(Record message) { + stringStringKeyValueStore.put(message.key(), message.value()); + } + }; + } + }); + + streamsBuilder + .stream("AVRO_TOPIC", Consumed.with(Serdes.String(), SerdesUtils.getValueSerdes())) + .process(new ProcessorSupplier() { + @Override + public Set> stores() { + // key-value store + StoreBuilder> stringAvroKeyValueStoreBuilder = Stores + .keyValueStoreBuilder( + Stores.persistentKeyValueStore("STRING_AVRO_STORE"), + Serdes.String(), SerdesUtils.getValueSerdes()); + + // timestamped key-value store + StoreBuilder> stringAvroTimestampedKeyValueStoreBuilder = Stores + .timestampedKeyValueStoreBuilder( + Stores.persistentTimestampedKeyValueStore("STRING_AVRO_TIMESTAMPED_STORE"), + Serdes.String(), SerdesUtils.getValueSerdes()); + + // window store + StoreBuilder> stringAvroWindowStoreBuilder = + Stores.windowStoreBuilder( + Stores.persistentWindowStore("STRING_AVRO_WINDOW_STORE", + Duration.ofMinutes(5), Duration.ofMinutes(1), false), + Serdes.String(), SerdesUtils.getValueSerdes()); + + // timestamped window store + StoreBuilder> + stringAvroTimestampedWindowStoreBuilder = Stores.timestampedWindowStoreBuilder( + Stores.persistentTimestampedWindowStore("STRING_AVRO_TIMESTAMPED_WINDOW_STORE", + Duration.ofMinutes(5), Duration.ofMinutes(1), false), + Serdes.String(), SerdesUtils.getValueSerdes()); + + return Set.of( + stringAvroKeyValueStoreBuilder, + stringAvroTimestampedKeyValueStoreBuilder, + stringAvroWindowStoreBuilder, + stringAvroTimestampedWindowStoreBuilder + ); + } + + @Override + public Processor get() { + return new Processor<>() { + private KeyValueStore stringAvroKeyValueStore; + private TimestampedKeyValueStore + stringAvroTimestampedKeyValueStore; + private WindowStore stringAvroWindowStore; + private TimestampedWindowStore stringAvroTimestampedWindowStore; + + @Override + public void init(ProcessorContext context) { + this.stringAvroKeyValueStore = context + .getStateStore("STRING_AVRO_STORE"); + + this.stringAvroTimestampedKeyValueStore = context + .getStateStore("STRING_AVRO_TIMESTAMPED_STORE"); + + this.stringAvroWindowStore = context + .getStateStore("STRING_AVRO_WINDOW_STORE"); + + this.stringAvroTimestampedWindowStore = context + .getStateStore("STRING_AVRO_TIMESTAMPED_WINDOW_STORE"); + } + + @Override + public void process(Record message) { + stringAvroKeyValueStore.put(message.key(), message.value()); + stringAvroTimestampedKeyValueStore.put(message.key(), + ValueAndTimestamp.make(message.value(), message.timestamp())); + stringAvroWindowStore.put(message.key(), message.value(), message.timestamp()); + stringAvroTimestampedWindowStore.put(message.key(), + ValueAndTimestamp.make(message.value(), message.timestamp()), + message.timestamp()); + + } + }; + } + }); + } + + @Override + public String dlqTopic() { + return "DLQ_TOPIC"; + } + + @Override + public void onStart(KafkaStreams kafkaStreams) { + kafkaStreams.cleanUp(); + } + } +} diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java new file mode 100644 index 00000000..8ced7b23 --- /dev/null +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/KafkaIntegrationTest.java @@ -0,0 +1,99 @@ +package com.michelin.kstreamplify.integration; + +import static io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.SCHEMA_REGISTRY_URL_CONFIG; +import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; + +import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; +import java.util.Arrays; +import java.util.Map; +import lombok.extern.slf4j.Slf4j; +import org.apache.kafka.clients.admin.AdminClient; +import org.apache.kafka.clients.admin.NewTopic; +import org.apache.kafka.common.TopicPartition; +import org.apache.kafka.streams.KafkaStreams; +import org.apache.kafka.streams.LagInfo; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.boot.test.web.client.TestRestTemplate; +import org.springframework.test.context.DynamicPropertyRegistry; +import org.springframework.test.context.DynamicPropertySource; +import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.KafkaContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.junit.jupiter.Container; +import org.testcontainers.utility.DockerImageName; + +@Slf4j +abstract class KafkaIntegrationTest { + protected static final String CONFLUENT_PLATFORM_VERSION = "7.6.1"; + protected static final Network NETWORK = Network.newNetwork(); + + @Autowired + protected KafkaStreamsInitializer initializer; + + @Autowired + protected TestRestTemplate restTemplate; + + @Container + static KafkaContainer broker = new KafkaContainer(DockerImageName + .parse("confluentinc/cp-kafka:" + CONFLUENT_PLATFORM_VERSION)) + .withNetwork(NETWORK) + .withNetworkAliases("broker") + .withKraft(); + + @Container + static GenericContainer schemaRegistry = new GenericContainer<>(DockerImageName + .parse("confluentinc/cp-schema-registry:" + CONFLUENT_PLATFORM_VERSION)) + .dependsOn(broker) + .withNetwork(NETWORK) + .withNetworkAliases("schema-registry") + .withExposedPorts(8081) + .withEnv("SCHEMA_REGISTRY_HOST_NAME", "schema-registry") + .withEnv("SCHEMA_REGISTRY_LISTENERS", "http://0.0.0.0:8081") + .withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS", "PLAINTEXT://broker:9092") + .waitingFor(Wait.forHttp("/subjects").forStatusCode(200)); + + @DynamicPropertySource + static void kafkaProperties(DynamicPropertyRegistry registry) { + registry.add("kafka.properties." + BOOTSTRAP_SERVERS_CONFIG, broker::getBootstrapServers); + registry.add("kafka.properties." + SCHEMA_REGISTRY_URL_CONFIG, + () -> "http://" + schemaRegistry.getHost() + ":" + schemaRegistry.getFirstMappedPort()); + } + + protected static void createTopics(String bootstrapServers, TopicPartition... topicPartitions) { + var newTopics = Arrays.stream(topicPartitions) + .map(topicPartition -> new NewTopic(topicPartition.topic(), topicPartition.partition(), (short) 1)) + .toList(); + try (var admin = AdminClient.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, bootstrapServers))) { + admin.createTopics(newTopics); + } + } + + protected void waitingForKafkaStreamsToStart() throws InterruptedException { + while (!initializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING)) { + log.info("Waiting for Kafka Streams to start..."); + Thread.sleep(2000); // NOSONAR + } + } + + protected void waitingForLocalStoreToReachOffset(Map> topicPartitionOffset) + throws InterruptedException { + + while (hasLag(topicPartitionOffset)) { + log.info("Waiting for local stores {} to reach offsets", topicPartitionOffset.keySet().stream().toList()); + Thread.sleep(5000); // NOSONAR + } + } + + private boolean hasLag(Map> topicPartitionOffset) { + Map> currentLag = initializer.getKafkaStreams().allLocalStorePartitionLags(); + + return !topicPartitionOffset.entrySet() + .stream() + .allMatch(topicPartitionOffsetEntry -> topicPartitionOffsetEntry.getValue().entrySet() + .stream() + .anyMatch(partitionOffsetEntry -> currentLag.get(topicPartitionOffsetEntry.getKey()) + .get(partitionOffsetEntry.getKey()) + .currentOffsetPosition() == partitionOffsetEntry.getValue())); + } +} diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integrations/SpringKafkaStreamsInitializerIntegrationTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/SpringBootKafkaStreamsInitializerIntegrationTest.java similarity index 61% rename from kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integrations/SpringKafkaStreamsInitializerIntegrationTest.java rename to kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/SpringBootKafkaStreamsInitializerIntegrationTest.java index 0812fe40..dd1fc592 100644 --- a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integrations/SpringKafkaStreamsInitializerIntegrationTest.java +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/integration/SpringBootKafkaStreamsInitializerIntegrationTest.java @@ -1,4 +1,4 @@ -package com.michelin.kstreamplify.integrations; +package com.michelin.kstreamplify.integration; import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG; import static org.junit.jupiter.api.Assertions.assertEquals; @@ -7,27 +7,23 @@ import static org.springframework.boot.test.context.SpringBootTest.WebEnvironment.DEFINED_PORT; import com.michelin.kstreamplify.context.KafkaStreamsExecutionContext; -import com.michelin.kstreamplify.initializer.KafkaStreamsInitializer; import com.michelin.kstreamplify.initializer.KafkaStreamsStarter; import io.micrometer.core.instrument.MeterRegistry; import java.util.ArrayList; -import java.util.Arrays; import java.util.List; -import java.util.Map; +import java.util.Set; import lombok.extern.slf4j.Slf4j; -import org.apache.kafka.clients.admin.AdminClient; -import org.apache.kafka.clients.admin.NewTopic; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.streams.KafkaStreams; import org.apache.kafka.streams.StreamsBuilder; import org.apache.kafka.streams.StreamsMetadata; import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.test.context.SpringBootTest; -import org.springframework.boot.test.web.client.TestRestTemplate; import org.springframework.http.ResponseEntity; import org.springframework.test.context.DynamicPropertyRegistry; import org.springframework.test.context.DynamicPropertySource; @@ -39,36 +35,26 @@ @Slf4j @Testcontainers @SpringBootTest(webEnvironment = DEFINED_PORT) -class SpringKafkaStreamsInitializerIntegrationTest { - @Autowired - private KafkaStreamsInitializer initializer; - +class SpringBootKafkaStreamsInitializerIntegrationTest extends KafkaIntegrationTest { @Autowired private MeterRegistry registry; - @Autowired - private TestRestTemplate restTemplate; - - @Container - static KafkaContainer kafka = new KafkaContainer(DockerImageName - .parse("confluentinc/cp-kafka:7.4.0")) - .withKraft(); - - @DynamicPropertySource - static void kafkaProperties(DynamicPropertyRegistry registry) { - registry.add("kafka.properties." + BOOTSTRAP_SERVERS_CONFIG, - kafka::getBootstrapServers); + @BeforeAll + static void globalSetUp() { + createTopics( + broker.getBootstrapServers(), + new TopicPartition("INPUT_TOPIC", 2), + new TopicPartition("OUTPUT_TOPIC", 2) + ); } - @BeforeAll - static void setUp() { - createTopics("inputTopic", "outputTopic"); + @BeforeEach + void setUp() throws InterruptedException { + waitingForKafkaStreamsToStart(); } @Test - void shouldInitAndRun() throws InterruptedException { - waitingForKafkaStreamsToRun(); - + void shouldInitAndRun() { assertEquals(KafkaStreams.State.RUNNING, initializer.getKafkaStreams().state()); List streamsMetadata = @@ -76,53 +62,53 @@ void shouldInitAndRun() throws InterruptedException { // Assert Kafka Streams initialization assertEquals("localhost", streamsMetadata.get(0).hostInfo().host()); - assertEquals(8081, streamsMetadata.get(0).hostInfo().port()); + assertEquals(8086, streamsMetadata.get(0).hostInfo().port()); assertTrue(streamsMetadata.get(0).stateStoreNames().isEmpty()); - List topicPartitions = streamsMetadata.get(0).topicPartitions().stream().toList(); + Set topicPartitions = streamsMetadata.get(0).topicPartitions(); - assertEquals("inputTopic", topicPartitions.get(0).topic()); - assertEquals(0, topicPartitions.get(0).partition()); + assertTrue(Set.of( + new TopicPartition("INPUT_TOPIC", 0), + new TopicPartition("INPUT_TOPIC", 1) + ).containsAll(topicPartitions)); - assertEquals("dlqTopic", KafkaStreamsExecutionContext.getDlqTopicName()); + assertEquals("DLQ_TOPIC", KafkaStreamsExecutionContext.getDlqTopicName()); assertEquals("org.apache.kafka.common.serialization.Serdes$StringSerde", KafkaStreamsExecutionContext.getSerdesConfig().get("default.key.serde")); assertEquals("org.apache.kafka.common.serialization.Serdes$StringSerde", KafkaStreamsExecutionContext.getSerdesConfig().get("default.value.serde")); - assertEquals("localhost:8081", + assertEquals("localhost:8086", KafkaStreamsExecutionContext.getProperties().get("application.server")); // Assert HTTP probes ResponseEntity responseReady = restTemplate - .getForEntity("http://localhost:8081/ready", Void.class); + .getForEntity("http://localhost:8086/ready", Void.class); assertEquals(200, responseReady.getStatusCode().value()); ResponseEntity responseLiveness = restTemplate - .getForEntity("http://localhost:8081/liveness", Void.class); + .getForEntity("http://localhost:8086/liveness", Void.class); assertEquals(200, responseLiveness.getStatusCode().value()); ResponseEntity responseTopology = restTemplate - .getForEntity("http://localhost:8081/topology", String.class); + .getForEntity("http://localhost:8086/topology", String.class); assertEquals(200, responseTopology.getStatusCode().value()); assertEquals(""" Topologies: Sub-topology: 0 - Source: KSTREAM-SOURCE-0000000000 (topics: [inputTopic]) + Source: KSTREAM-SOURCE-0000000000 (topics: [INPUT_TOPIC]) --> KSTREAM-SINK-0000000001 - Sink: KSTREAM-SINK-0000000001 (topic: outputTopic) + Sink: KSTREAM-SINK-0000000001 (topic: OUTPUT_TOPIC) <-- KSTREAM-SOURCE-0000000000 """, responseTopology.getBody()); } @Test - void shouldRegisterKafkaMetrics() throws InterruptedException { - waitingForKafkaStreamsToRun(); - + void shouldRegisterKafkaMetrics() { // Kafka Streams metrics are registered assertFalse(registry.getMeters() .stream() @@ -145,39 +131,27 @@ void shouldRegisterKafkaMetrics() throws InterruptedException { .isEmpty()); } - private void waitingForKafkaStreamsToRun() throws InterruptedException { - while (!initializer.getKafkaStreams().state().equals(KafkaStreams.State.RUNNING)) { - log.info("Waiting for Kafka Streams to start..."); - Thread.sleep(2000); - } - } - - private static void createTopics(String... topics) { - var newTopics = Arrays.stream(topics) - .map(topic -> new NewTopic(topic, 1, (short) 1)) - .toList(); - try (var admin = AdminClient.create(Map.of(BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers()))) { - admin.createTopics(newTopics); - } - } - + /** + * Kafka Streams starter implementation for integration tests. + * The topology simply forwards messages from inputTopic to outputTopic. + */ @Slf4j @SpringBootApplication - static class KafkaStreamsStarterImpl extends KafkaStreamsStarter { + static class KafkaStreamsStarterStub extends KafkaStreamsStarter { public static void main(String[] args) { - SpringApplication.run(KafkaStreamsStarterImpl.class, args); + SpringApplication.run(KafkaStreamsStarterStub.class, args); } @Override public void topology(StreamsBuilder streamsBuilder) { streamsBuilder - .stream("inputTopic") - .to("outputTopic"); + .stream("INPUT_TOPIC") + .to("OUTPUT_TOPIC"); } @Override public String dlqTopic() { - return "dlqTopic"; + return "DLQ_TOPIC"; } @Override diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/properties/KafkaPropertiesTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/property/KafkaPropertiesTest.java similarity index 94% rename from kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/properties/KafkaPropertiesTest.java rename to kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/property/KafkaPropertiesTest.java index c93ef23d..99ed2bca 100644 --- a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/properties/KafkaPropertiesTest.java +++ b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/property/KafkaPropertiesTest.java @@ -1,4 +1,4 @@ -package com.michelin.kstreamplify.properties; +package com.michelin.kstreamplify.property; import static org.junit.jupiter.api.Assertions.assertTrue; @@ -7,7 +7,6 @@ import org.junit.jupiter.api.Test; class KafkaPropertiesTest { - private final KafkaProperties kafkaProperties = new KafkaProperties(); @Test diff --git a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/rest/SpringProbeControllerTest.java b/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/rest/SpringProbeControllerTest.java deleted file mode 100644 index bce5a66b..00000000 --- a/kstreamplify-spring-boot/src/test/java/com/michelin/kstreamplify/rest/SpringProbeControllerTest.java +++ /dev/null @@ -1,55 +0,0 @@ -package com.michelin.kstreamplify.rest; - -import static org.junit.jupiter.api.Assertions.assertEquals; -import static org.mockito.ArgumentMatchers.any; -import static org.mockito.Mockito.mockStatic; - -import com.michelin.kstreamplify.model.RestServiceResponse; -import com.michelin.kstreamplify.services.ProbeService; -import org.junit.jupiter.api.Test; -import org.mockito.MockedStatic; -import org.springframework.http.HttpStatus; -import org.springframework.http.ResponseEntity; - -class SpringProbeControllerTest { - private final SpringProbeController controller = new SpringProbeController(); - - @Test - void shouldGetReadinessProbe() { - try (MockedStatic probeService = mockStatic(ProbeService.class)) { - probeService.when(() -> ProbeService.readinessProbe(any())) - .thenReturn(new RestServiceResponse<>(200, "Ready")); - - ResponseEntity response = controller.readinessProbe(); - - assertEquals(HttpStatus.OK, response.getStatusCode()); - assertEquals("Ready", response.getBody()); - } - } - - @Test - void shouldGetLivenessProbe() { - try (MockedStatic probeService = mockStatic(ProbeService.class)) { - probeService.when(() -> ProbeService.livenessProbe(any())) - .thenReturn(new RestServiceResponse<>(200, "Alive")); - - ResponseEntity response = controller.livenessProbe(); - - assertEquals(HttpStatus.OK, response.getStatusCode()); - assertEquals("Alive", response.getBody()); - } - } - - @Test - void shouldGetTopology() { - try (MockedStatic probeService = mockStatic(ProbeService.class)) { - probeService.when(() -> ProbeService.exposeTopology(any())) - .thenReturn(new RestServiceResponse<>(200, "Topology")); - - ResponseEntity response = controller.exposeTopology(); - - assertEquals(HttpStatus.OK, response.getStatusCode()); - assertEquals("Topology", response.getBody()); - } - } -} diff --git a/kstreamplify-spring-boot/src/test/resources/application-interactive-queries.yml b/kstreamplify-spring-boot/src/test/resources/application-interactive-queries.yml new file mode 100644 index 00000000..1b96440a --- /dev/null +++ b/kstreamplify-spring-boot/src/test/resources/application-interactive-queries.yml @@ -0,0 +1,9 @@ +server: + port: 8085 +kafka: + properties: + application.id: appInteractiveQueriesId + default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde + default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde + state.dir: /tmp/kstreamplify/kstreamplify-spring-boot-test/interactive-queries + auto.offset.reset: earliest \ No newline at end of file diff --git a/kstreamplify-spring-boot/src/test/resources/application.yml b/kstreamplify-spring-boot/src/test/resources/application.yml index 3ffd8327..1f3411c5 100644 --- a/kstreamplify-spring-boot/src/test/resources/application.yml +++ b/kstreamplify-spring-boot/src/test/resources/application.yml @@ -1,7 +1,9 @@ server: - port: 8081 + port: 8086 kafka: properties: application.id: appId default.key.serde: org.apache.kafka.common.serialization.Serdes$StringSerde - default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde \ No newline at end of file + default.value.serde: org.apache.kafka.common.serialization.Serdes$StringSerde + state.dir: /tmp/kstreamplify/kstreamplify-spring-boot-test/initializer + auto.offset.reset: earliest \ No newline at end of file diff --git a/pom.xml b/pom.xml index 50b9397f..7db3ba0c 100644 --- a/pom.xml +++ b/pom.xml @@ -187,7 +187,7 @@ commons-lang3 ${commons-lang3.version} - + org.mockito mockito-core @@ -332,6 +332,7 @@ info true + check-style