Kafka consumer unit test with Avro Schema registry failing - java

I'm writing a consumer which listens to a Kafka topic and consumes message whenever message is available. I've tested the logic/code by running Kafka locally and it's working fine.
While writing the unit/component test cases, it's failing with avro schema registry url error. I've tried different options available on internet but could not find anything working. I am not sure if my approach is even correct. Please help.
Listener Class
#KafkaListener(topics = "positionmgmt.v1", containerFactory = "genericKafkaListenerFactory")
public void receive(ConsumerRecord<String, GenericRecord> consumerRecord) {
try {
GenericRecord generic = consumerRecord.value();
Object obj = generic.get("metadata");
ObjectMapper mapper = new ObjectMapper();
Header headerMetaData = mapper.readValue(obj.toString(), Header.class);
System.out.println("Received payload : " + consumerRecord.value());
//Call backend with details in GenericRecord
}catch (Exception e){
System.out.println("Exception while reading message from Kafka " + e );
}
Kafka config
#Bean
public ConcurrentKafkaListenerContainerFactory<String, GenericRecord> genericKafkaListenerFactory() {
ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(genericConsumerFactory());
return factory;
}
public ConsumerFactory<String, GenericRecord> genericConsumerFactory() {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
config.put(ConsumerConfig.GROUP_ID_CONFIG, "group_id");
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
config.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG,"http://localhost:8081");
return new DefaultKafkaConsumerFactory<>(config);
}
Avro Schema
{
"type":"record",
"name":"KafkaEvent",
"namespace":"com.ms.model.avro",
"fields":[
{
"name":"metadata",
"type":{
"name":"metadata",
"type":"record",
"fields":[
{
"name":"correlationid",
"type":"string",
"doc":"this is corrleation id for transaction"
},
{
"name":"subject",
"type":"string",
"doc":"this is subject for transaction"
},
{
"name":"version",
"type":"string",
"doc":"this is version for transaction"
}
]
}
},
{
"name":"name",
"type":"string"
},
{
"name":"dept",
"type":"string"
},
{
"name":"empnumber",
"type":"string"
}
]
}
This is my test code which I tried...
#ComponentTest
#RunWith(SpringRunner.class)
#EmbeddedKafka(partitions = 1, topics = { "positionmgmt.v1" })
#SpringBootTest(classes={Application.class})
#DirtiesContext
public class ConsumeKafkaMessageTest {
private static final String TEST_TOPIC = "positionmgmt.v1";
#Autowired(required=true)
EmbeddedKafkaBroker embeddedKafkaBroker;
private Schema schema;
private SchemaRegistryClient schemaRegistry;
private KafkaAvroSerializer avroSerializer;
private KafkaAvroDeserializer avroDeserializer;
private MockSchemaRegistryClient mockSchemaRegistryClient = new MockSchemaRegistryClient();
private String registryUrl = "unused";
private String avroSchema = string representation of avro schema
#BeforeEach
public void setUp() throws Exception {
Schema.Parser parser = new Schema.Parser();
schema = parser.parse(avroSchema);
mockSchemaRegistryClient.register("Vendors-value", schema);
}
#Test
public void consumeKafkaMessage_receive_sucess() {
Schema metadataSchema = schema.getField("metadata").schema();
GenericRecord metadata = new GenericData.Record(metadataSchema);
metadata.put("version", "1.0");
metadata.put("correlationid", "correlationid");
metadata.put("subject", "metadata");
GenericRecord record = new GenericData.Record(schema);
record.put("metadata", metadata);
record.put("name", "ABC");
record.put("dept", "XYZ");
Consumer<String, GenericRecord> consumer = configureConsumer();
Producer<String, GenericRecord> producer = configureProducer();
ProducerRecord<String, GenericRecord> prodRecord = new ProducerRecord<String, GenericRecord>(TEST_TOPIC, record);
producer.send(prodRecord);
ConsumerRecord<String, GenericRecord> singleRecord = KafkaTestUtils.getSingleRecord(consumer, TEST_TOPIC);
assertNotNull(singleRecord.value());
consumer.close();
producer.close();
}
private Consumer<String, GenericRecord> configureConsumer() {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupid", "true", embeddedKafkaBroker);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
Consumer<String, GenericRecord> consumer = new DefaultKafkaConsumerFactory<String, GenericRecord>(consumerProps).createConsumer();
consumer.subscribe(Collections.singleton(TEST_TOPIC));
return consumer;
}
private Producer<String, GenericRecord> configureProducer() {
Map<String, Object> producerProps = new HashMap<>(KafkaTestUtils.producerProps(embeddedKafkaBroker));
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
producerProps.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, mockSchemaRegistryClient);
producerProps.put(KafkaAvroSerializerConfig.AUTO_REGISTER_SCHEMAS, "false");
return new DefaultKafkaProducerFactory<String, GenericRecord>(producerProps).createProducer();
}
}
Error
component.com.ms.listener.ConsumeKafkaMessageTest > consumeKafkaMessage_receive_sucess() FAILED
org.apache.kafka.common.KafkaException: Failed to construct kafka producer
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:457)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:289)
at org.springframework.kafka.core.DefaultKafkaProducerFactory.createKafkaProducer(DefaultKafkaProducerFactory.java:318)
at org.springframework.kafka.core.DefaultKafkaProducerFactory.createProducer(DefaultKafkaProducerFactory.java:305)
at component.com.ms.listener.ConsumeKafkaMessageTest.configureProducer(ConsumeKafkaMessageTest.java:125)
at component.com.ms.listener.ConsumeKafkaMessageTest.consumeKafkaMessage_receive_sucess(ConsumeKafkaMessageTest.java:97)
Caused by:
io.confluent.common.config.ConfigException: Invalid value io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient#20751870 for configuration schema.registry.url: Expected a comma separated list.
at io.confluent.common.config.ConfigDef.parseType(ConfigDef.java:345)
at io.confluent.common.config.ConfigDef.parse(ConfigDef.java:249)
at io.confluent.common.config.AbstractConfig.<init>(AbstractConfig.java:78)
at io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig.<init>(AbstractKafkaAvroSerDeConfig.java:105)
at io.confluent.kafka.serializers.KafkaAvroSerializerConfig.<init>(KafkaAvroSerializerConfig.java:32)
at io.confluent.kafka.serializers.KafkaAvroSerializer.configure(KafkaAvroSerializer.java:48)
at org.apache.kafka.common.serialization.ExtendedSerializer$Wrapper.configure(ExtendedSerializer.java:60)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:372)
... 5 more

I investigated it a bit and I found out that the problem is in the CashedSchemaRegistryClient that is used by the KafkaAvroSerializer/Deserializer. It is used to fetch the schema definitions from the Confluent Schema Registry.
You already have your schema definition locally so you don't need to go to Schema Registry for them. (at least in your tests)
I had a similar problem and I solved it by creating a custom KafkaAvroSerializer/KafkaAvroDeserializer.
This is a sample of KafkaAvroSerializer. It is rather simple. You just need to extend the provided KafkaAvroSerializer and tell him to use MockSchemaRegistryClient.
public class CustomKafkaAvroSerializer extends KafkaAvroSerializer {
public CustomKafkaAvroSerializer() {
super();
super.schemaRegistry = new MockSchemaRegistryClient();
}
public CustomKafkaAvroSerializer(SchemaRegistryClient client) {
super(new MockSchemaRegistryClient());
}
public CustomKafkaAvroSerializer(SchemaRegistryClient client, Map<String, ?> props) {
super(new MockSchemaRegistryClient(), props);
}
}
This is a sample of KafkaAvroDeserializer. When the deserialize method is called you need to tell him which schema to use.
public class CustomKafkaAvroDeserializer extends KafkaAvroDeserializer {
#Override
public Object deserialize(String topic, byte[] bytes) {
this.schemaRegistry = getMockClient(KafkaEvent.SCHEMA$);
return super.deserialize(topic, bytes);
}
private static SchemaRegistryClient getMockClient(final Schema schema$) {
return new MockSchemaRegistryClient() {
#Override
public synchronized Schema getById(int id) {
return schema$;
}
};
}
}
The last step is to tell spring to use created Serializer/Deserializer
spring.kafka.producer.properties.schema.registry.url= not-used
spring.kafka.producer.value-serializer = CustomKafkaAvroSerializer
spring.kafka.producer.key-serializer = org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.group-id = showcase-producer-id
spring.kafka.consumer.properties.schema.registry.url= not-used
spring.kafka.consumer.value-deserializer = CustomKafkaAvroDeserializer
spring.kafka.consumer.key-deserializer = org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.group-id = showcase-consumer-id
spring.kafka.auto.offset.reset = earliest
spring.kafka.producer.auto.register.schemas= true
spring.kafka.properties.specific.avro.reader= true
I wrote a short blog post about that:
https://medium.com/#igorvlahek1/no-need-for-schema-registry-in-your-spring-kafka-tests-a5b81468a0e1?source=friends_link&sk=e55f73b86504e9f577e259181c8d0e23
Link to the working sample project: https://github.com/ivlahek/kafka-avro-without-registry

The answer from #ivlahek is working, but if you look at this example 3 year later you might want to do slight modification to CustomKafkaAvroDeserializer
private static SchemaRegistryClient getMockClient(final Schema schema) {
return new MockSchemaRegistryClient() {
#Override
public ParsedSchema getSchemaBySubjectAndId(String subject, int id)
throws IOException, RestClientException {
return new AvroSchema(schema);
}
};
}

As the error says, you need to provide a string to the registry in the producer config, not an object.
Since you're using the Mock class, that string could be anything...
However, you'll need to construct the serializers given the registry instance
Serializer serializer = new KafkaAvroSerializer(mockSchemaRegistry);
// make config map with ("schema.registry.url", "unused")
serializer.configure(config, false);
Otherwise, it will try to create a non-mocked client
And put that into the properties
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serializer);

If your #KafkaListener is in test class then you can read it in StringDeserializer then convert it to the desired class manually
#Autowired
private MyKafkaAvroDeserializer myKafkaAvroDeserializer;
#KafkaListener( topics = "test")
public void inputData(ConsumerRecord<?, ?> consumerRecord) {
log.info("received payload='{}'", consumerRecord.toString(),consumerRecord.value());
GenericRecord genericRecord = (GenericRecord)myKafkaAvroDeserializer.deserialize("test",consumerRecord.value().toString().getBytes(StandardCharsets.UTF_8));
Myclass myclass = (Myclass) SpecificData.get().deepCopy(Myclass.SCHEMA$, genericRecord);
}
#Component
public class MyKafkaAvroDeserializer extends KafkaAvroDeserializer {
#Override
public Object deserialize(String topic, byte[] bytes) {
this.schemaRegistry = getMockClient(Myclass.SCHEMA$);
return super.deserialize(topic, bytes);
}
private static SchemaRegistryClient getMockClient(final Schema schema$) {
return new MockSchemaRegistryClient() {
#Override
public synchronized org.apache.avro.Schema getById(int id) {
return schema$;
}
};
}
}
Remember to add schema registry and key/value serializer in application.yml although it won't be used
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
schema.registry.url :http://localhost:8080

Related

How to test EmbeddedKafka with SpringBoot

I met problem with testing Kafka Producer after change custom Producer to KafkaTemplate.
For tests reason I wrote next class:
public class KafkaTestingTools {
static private Map<String, Consumer<Long, GenericData.Record>> consumers = new HashMap<>();
static public void sendMessage (String topic, String key, Object message, Schema schema) throws InterruptedException{
Properties properties = new Properties();
properties.put("schema.registry.url", "http://localhost:8081");
properties.put("bootstrap.servers", "http://localhost:9092");
properties.put("acks", "all");
properties.put("retries", 0);
properties.put("linger.ms", 1);
properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
properties.put("value.serializer", "com.logistics.mock.CustomKafkaAvroSerializer");
KafkaProducer<String, Object> producer = new KafkaProducer<>(properties);
CustomKafkaAvroDeserializer.setTopicScheme(topic, schema);
ProducerRecord<String, Object> record = new ProducerRecord<>(topic, key, message);
producer.send(record);
producer.close();
}
static public void registerConsumerContainer(EmbeddedKafkaBroker embeddedKafka, String topic, Schema schema) throws InterruptedException{
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("testGroup" + UUID.randomUUID().toString(), "true", embeddedKafka);
consumerProps.put("schema.registry.url", "http://localhost:8081");
consumerProps.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
consumerProps.put("value.deserializer", "com.logistics.mock.CustomKafkaAvroDeserializer");
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
ConsumerFactory<Long, GenericData.Record> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
Consumer<Long, GenericData.Record> consumer = cf.createConsumer();
consumers.put(topic, consumer);
embeddedKafka.consumeFromAnEmbeddedTopic(consumer, topic);
CustomKafkaAvroDeserializer.setTopicScheme(topic, schema);
}
static public Object getSingleRecordFromRegisteredContainer(EmbeddedKafkaBroker embeddedKafka, String topic){
return SpecificData.get().deepCopy(
CustomKafkaAvroDeserializer.getTopicScheme(topic),
KafkaTestUtils.getSingleRecord(consumers.get(topic), topic).value()
);
}
}
Producer example:
#Service
#CommonsLog
public class PointProducer {
private final KafkaTemplate<String, ExportMessage> kafkaTemplate;
private final String topic;
#Autowired
public PointProducer(#Value("${kafka.producer.points}") String topic,
KafkaTemplate<String, ExportMessage> kafkaTemplate) {
this.topic = topic;
this.kafkaTemplate = kafkaTemplate;
}
public void produce(Point point) {
var message = new ExportMessage();
message.setId(point.getId());
log.warn("produce point: " + message.toString());
kafkaTemplate.send(topic, point.getId().toString(), message);
kafkaTemplate.flush();
}
kafka config
spring:
kafka:
bootstrap-servers: ${spring.embedded.kafka.brokers}
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
point-deserializer: com.logistics.mock.CustomKafkaAvroDeserializer
auto-offset-reset: latest
group-id: credit_file_test
producer:
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: com.logistics.mock.CustomKafkaAvroSerializer
schema-registry-url: http://localhost:8081
kafka.consumer.points: points_export
kafka.producer.files: common.file
kafka.producer.orders: common.order
kafka.producer.points: common.point
And tests looks like:
#SpringBootTest
#TestMethodOrder(OrderAnnotation.class)
#EmbeddedKafka(partitions = 1, topics = { "topic1", "topic2" }, brokerProperties = { "listeners=PLAINTEXT://localhost:9092", "port=9092" })
class ApplicationLogisticOrderTest {
#Test
#Order(1)
#WithMockUser(roles = "ADMIN")
void checkSendToKafka() throws Exception {
KafkaTestingTools.registerConsumerContainer(this.embeddedKafka, TOPIC1, Message.SCHEMA$);
Thread.sleep(3000);
prepareCustomizedLogisticOrder(t -> {
});
var mockMvc = MockMvcBuilders.webAppContextSetup(this.wac).build();
mockMvc.perform(MockMvcRequestBuilders.put("/orders/7000000/sendToKafka"));
}
And on line with perform I caught:
Caused by: org.apache.kafka.common.config.ConfigException: Missing required configuration "schema.registry.url" which has no default value.
at org.apache.kafka.common.config.ConfigDef.parseValue(ConfigDef.java:478)
at org.apache.kafka.common.config.ConfigDef.parse(ConfigDef.java:468)
at org.apache.kafka.common.config.AbstractConfig.<init>(AbstractConfig.java:108)
at org.apache.kafka.common.config.AbstractConfig.<init>(AbstractConfig.java:129)
at io.confluent.kafka.serializers.AbstractKafkaSchemaSerDeConfig.<init>(AbstractKafkaSchemaSerDeConfig.java:177)
at io.confluent.kafka.serializers.KafkaAvroSerializerConfig.<init>(KafkaAvroSerializerConfig.java:32)
at io.confluent.kafka.serializers.KafkaAvroSerializer.configure(KafkaAvroSerializer.java:50)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:376)
I tried to put it in application.yml, in KafkaTestingTools properties, but nothing changed, it looks like Spring looks for this property in another place.
Maybe someone met this situation and know solution?
Thanks in advance.
The problem is here:
spring:
kafka:
schema-registry-url: http://localhost:8081
There is no such a property managed by Spring Boot.
More over this schema-registry-url doesn't fit to that schema.registry.url.
You have to consider to change it into this:
spring:
kafka:
producer:
properties:
"schema.registry.url": http://localhost:8081
See docs for more info: https://docs.spring.io/spring-boot/docs/current/reference/html/features.html#features.messaging.kafka.additional-properties

Trigger one Kafka consumer by using values of another consumer In Spring Kafka

I have one scheduler which produces one event. My consumer consumes this event. The payload of this event is a json with below fields:
private String topic;
private String partition;
private String filterKey;
private long CustId;
Now I need to trigger one more consumer which will take all this information which I get a response from first consumer.
#KafkaListener(topics = "<**topic-name-from-first-consumer-response**>", groupId = "group" containerFactory = "kafkaListenerFactory")
public void consumeJson(List<User> data, Acknowledgment acknowledgment,
#Header(KafkaHeaders.RECEIVED_PARTITION_ID) List<Integer> partitions,
#Header(KafkaHeaders.OFFSET) List<Long> offsets) {
// consumer code goes here...}
I need to create some dynamic variable which I can pass in place of topic name.
similarly, I am using the filtering in the configuration file and I need to pass key dynamically in the configuration.
factory.setRecordFilterStrategy(new RecordFilterStrategy<String, Object>() {
#Override
public boolean filter(ConsumerRecord<String, Object> consumerRecord) {
if(consumerRecord.key().equals("**Key will go here**")) {
return false;
}
else {
return true;
}
}
});
How can we dynamically inject these values from the response of first consumer and trigger the second consumer. Both the consumers are in same application
You cannot do that with an annotated listener, the configuration is only used during initialization; you would need to create a listener container yourself (using the ConcurrentKafkaListenerContainerFactory) to dynamically create a listener.
EDIT
Here's an example.
#SpringBootApplication
public class So69134055Application {
public static void main(String[] args) {
SpringApplication.run(So69134055Application.class, args);
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("so69134055").partitions(1).replicas(1).build();
}
}
#Component
class Listener {
private static final Logger log = LoggerFactory.getLogger(Listener.class);
private static final Method otherListen;
static {
try {
otherListen = Listener.class.getDeclaredMethod("otherListen", List.class);
}
catch (NoSuchMethodException | SecurityException ex) {
throw new IllegalStateException(ex);
}
}
private final ConcurrentKafkaListenerContainerFactory<String, String> factory;
private final MessageHandlerMethodFactory methodFactory;
private final KafkaAdmin admin;
private final KafkaTemplate<String, String> template;
public Listener(ConcurrentKafkaListenerContainerFactory<String, String> factory, KafkaAdmin admin,
KafkaTemplate<String, String> template, KafkaListenerAnnotationBeanPostProcessor<?, ?> bpp) {
this.factory = factory;
this.admin = admin;
this.template = template;
this.methodFactory = bpp.getMessageHandlerMethodFactory();
}
#KafkaListener(id = "so69134055", topics = "so69134055")
public void listen(String topicName) {
try (AdminClient client = AdminClient.create(this.admin.getConfigurationProperties())) {
NewTopic topic = TopicBuilder.name(topicName).build();
client.createTopics(List.of(topic)).all().get(10, TimeUnit.SECONDS);
}
catch (Exception e) {
log.error("Failed to create topic", e);
}
ConcurrentMessageListenerContainer<String, String> container =
this.factory.createContainer(new TopicPartitionOffset(topicName, 0));
BatchMessagingMessageListenerAdapter<String, String> adapter =
new BatchMessagingMessageListenerAdapter<>(this, otherListen);
adapter.setHandlerMethod(new HandlerAdapter(
this.methodFactory.createInvocableHandlerMethod(this, otherListen)));
FilteringBatchMessageListenerAdapter<String, String> filtered =
new FilteringBatchMessageListenerAdapter<>(adapter, record -> !record.key().equals("foo"));
container.getContainerProperties().setMessageListener(filtered);
container.getContainerProperties().setGroupId("group.for." + topicName);
container.setBeanName(topicName + ".container");
container.start();
IntStream.range(0, 10).forEach(i -> this.template.send(topicName, 0, i % 2 == 0 ? "foo" : "bar", "test" + i));
}
void otherListen(List<String> others) {
log.info("Others: {}", others);
}
}
spring.kafka.consumer.auto-offset-reset=earliest
Output - showing that the filter was applied to the records with bar in the key.
Others: [test0, test2, test4, test6, test8]

Spring Kafka: Close the container and read the messages from specific offset with ConcurrentKafkaListenerContainerFactory

In my spring kafka application, I want to trigger the consumer at run time according to input of some scheduler. Scheduler will tell the listener from which topic it can start consuming messages. There is springboot application with custom ConcurrentKafkaListenerContainerFactory class. I need to perform three tasks:
close the container, After successfully reading all the messages available on topic.
It will store the current offset in DB or file system.
Next time when consumer up again, the stored offset can be used to process the records instead of default offset managed by Kafka. So that in future we can change the offset value in DB and get get desired reports.
I know how to handle all these with #KafkaListener but not sure how to hook with ConcurrentKafkaListenerContainerFactory. The current code is listed below:
#SpringBootApplication
public class KafkaApp{
public static void main(String[] args) {
SpringApplication.run(KafkaApp.class, args);
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("testTopic").partitions(1).replicas(1).build();
}
}
#Component
class Listener {
private static final Logger log = LoggerFactory.getLogger(Listener.class);
private static final Method otherListen;
static {
try {
otherListen = Listener.class.getDeclaredMethod("otherListen", List.class);
}
catch (NoSuchMethodException | SecurityException ex) {
throw new IllegalStateException(ex);
}
}
private final ConcurrentKafkaListenerContainerFactory<String, String> factory;
private final MessageHandlerMethodFactory methodFactory;
private final KafkaAdmin admin;
private final KafkaTemplate<String, String> template;
public Listener(ConcurrentKafkaListenerContainerFactory<String, String> factory, KafkaAdmin admin,
KafkaTemplate<String, String> template, KafkaListenerAnnotationBeanPostProcessor<?, ?> bpp) {
this.factory = factory;
this.admin = admin;
this.template = template;
this.methodFactory = bpp.getMessageHandlerMethodFactory();
}
#KafkaListener(id = "myId", topics = "testTopic")
public void listen(String topicName) {
try (AdminClient client = AdminClient.create(this.admin.getConfigurationProperties())) {
NewTopic topic = TopicBuilder.name(topicName).build();
client.createTopics(List.of(topic)).all().get(10, TimeUnit.SECONDS);
}
catch (Exception e) {
log.error("Failed to create topic", e);
}
ConcurrentMessageListenerContainer<String, String> container =
this.factory.createContainer(new TopicPartitionOffset(topicName, 0));
BatchMessagingMessageListenerAdapter<String, String> adapter =
new BatchMessagingMessageListenerAdapter<>(this, otherListen);
adapter.setHandlerMethod(new HandlerAdapter(
this.methodFactory.createInvocableHandlerMethod(this, otherListen)));
FilteringBatchMessageListenerAdapter<String, String> filtered =
new FilteringBatchMessageListenerAdapter<>(adapter, record -> !record.key().equals("foo"));
container.getContainerProperties().setMessageListener(filtered);
container.getContainerProperties().setGroupId("group.for." + topicName);
container.setBeanName(topicName + ".container");
container.start();
IntStream.range(0, 10).forEach(i -> this.template.send(topicName, 0, i % 2 == 0 ? "foo" : "bar", "test" + i));
}
void otherListen(List<String> others) {
log.info("Others: {}", others);
}
}
EDIT
#SpringBootApplication
public class KafkaApp{
public static void main(String[] args) {
SpringApplication.run(KafkaApp.class, args);
}
#Bean
public NewTopic topic() {
return TopicBuilder.name("testTopic").partitions(1).replicas(1).build();
}
}
#Component
class Listener {
private static final Logger log = LoggerFactory.getLogger(Listener.class);
private static final Method otherListen;
static {
try {
otherListen = Listener.class.getDeclaredMethod("otherListen", List.class);
}
catch (NoSuchMethodException | SecurityException ex) {
throw new IllegalStateException(ex);
}
}
private final ConcurrentKafkaListenerContainerFactory<String, String> factory;
private final MessageHandlerMethodFactory methodFactory;
private final KafkaAdmin admin;
private final KafkaTemplate<String, String> template;
public Listener(ConcurrentKafkaListenerContainerFactory<String, String> factory, KafkaAdmin admin,
KafkaTemplate<String, String> template, KafkaListenerAnnotationBeanPostProcessor<?, ?> bpp) {
this.factory = factory;
this.admin = admin;
this.template = template;
this.methodFactory = bpp.getMessageHandlerMethodFactory();
}
#KafkaListener(id = "myId", topics = "testTopic")
public void listen(String topicName) {
try (AdminClient client = AdminClient.create(this.admin.getConfigurationProperties())) {
NewTopic topic = TopicBuilder.name(topicName).build();
client.createTopics(List.of(topic)).all().get(10, TimeUnit.SECONDS);
}
catch (Exception e) {
log.error("Failed to create topic", e);
}
ConcurrentMessageListenerContainer<String, String> container =
this.factory.createContainer(new TopicPartitionOffset(topicName, 0));
BatchMessagingMessageListenerAdapter<String, String> adapter =
new BatchMessagingMessageListenerAdapter<>(this, otherListen);
adapter.setHandlerMethod(new HandlerAdapter(
this.methodFactory.createInvocableHandlerMethod(this, otherListen)));
FilteringBatchMessageListenerAdapter<String, String> filtered =
new FilteringBatchMessageListenerAdapter<>(adapter, record -> !record.key().equals("foo"));
container.getContainerProperties().setMessageListener(filtered);
container.getContainerProperties().setGroupId("group.for." + topicName);
container.setBeanName(topicName + ".container");
container.getContainerProperties().setIdleEventInterval(3000L);
container.start();
IntStream.range(0, 10).forEach(i -> this.template.send(topicName, 0, i % 2 == 0 ? "foo" : "bar", "test" + i));
}
void otherListen(List<String> others) {
log.info("Others: {}", others);
}
#EventListener
public void eventHandler(ListenerContainerIdleEvent event) {
logger.info("No messages received for " + event.getIdleTime() + " milliseconds");
}
}
You can receive ListenerContainerIdleEvents when there are no messages left to process; you can use this event to stop the container; you should perform the stop() on a different thread (not the one that publishes the event).
See How to check if Kafka is empty using Spring Kafka?
You can get the partition/offset in several ways.
void otherListen<List<ConsumerRecord<..., ...>>)
or
void otherListen(List<String> others,
#Header(KafkaHeaders.RECEIVED_PARTITION) List<Integer> partitions,
#Header(KafkaHeaders.OFFSET) List<Long> offsets)
You can specify the starting offset in the
new TopicPartitionOffset(topicName, 0), startOffset);
when creating the container.
EDIT
To stop the container when it is idle, set the idleEventInterval and add an #EventListener method and stop the container.
TaskExecutor exec = new SimpleAsyncTaskExecutor();
#EventListener
void idle(ListenerContainerIdleEvent event) {
log...
this.exec.execute(() -> event.getContainer(ConcurrentMessageListenerContainer.class).stop());
}
If you add concurrency to your containers, you would need for each child container to go idle before stopping the parent container.
EDIT2
I just added it to the code I wrote for the answer to your other question and it works exactly as expected.
#KafkaListener(id = "so69134055", topics = "so69134055")
public void listen(String topicName) {
try (AdminClient client = AdminClient.create(this.admin.getConfigurationProperties())) {
NewTopic topic = TopicBuilder.name(topicName).build();
client.createTopics(List.of(topic)).all().get(10, TimeUnit.SECONDS);
}
catch (Exception e) {
log.error("Failed to create topic", e);
}
ConcurrentMessageListenerContainer<String, String> container =
this.factory.createContainer(new TopicPartitionOffset(topicName, 0));
BatchMessagingMessageListenerAdapter<String, String> adapter =
new BatchMessagingMessageListenerAdapter<>(this, otherListen);
adapter.setHandlerMethod(new HandlerAdapter(
this.methodFactory.createInvocableHandlerMethod(this, otherListen)));
FilteringBatchMessageListenerAdapter<String, String> filtered =
new FilteringBatchMessageListenerAdapter<>(adapter, record -> !record.key().equals("foo"));
container.getContainerProperties().setMessageListener(filtered);
container.getContainerProperties().setGroupId("group.for." + topicName);
container.getContainerProperties().setIdleEventInterval(3000L);
container.setBeanName(topicName + ".container");
container.start();
IntStream.range(0, 10).forEach(i -> this.template.send(topicName, 0, i % 2 == 0 ? "foo" : "bar", "test" + i));
}
void otherListen(List<String> others) {
log.info("Others: {}", others);
}
TaskExecutor exec = new SimpleAsyncTaskExecutor();
#EventListener
public void idle(ListenerContainerIdleEvent event) {
log.info(event.toString());
this.exec.execute(() -> {
ConcurrentMessageListenerContainer container = event.getContainer(ConcurrentMessageListenerContainer.class);
log.info("stopping container: " + container.getBeanName());
container.stop();
});
}
[foo.container-0-C-1] Others: [test0, test2, test4, test6, test8]
[foo.container-0-C-1] ListenerContainerIdleEvent [idleTime=5.007s, listenerId=foo.container-0, container=KafkaMessageListenerContainer [id=foo.container-0, clientIndex=-0, topicPartitions=[foo-0]], paused=false, topicPartitions=[foo-0]]
[SimpleAsyncTaskExecutor-1] stopping container: foo.container
[foo.container-0-C-1] [Consumer clientId=consumer-group.for.foo-2, groupId=group.for.foo] Unsubscribed all topics or patterns and assigned partitions
[foo.container-0-C-1] Metrics scheduler closed
[foo.container-0-C-1] Closing reporter org.apache.kafka.common.metrics.JmxReporter
[foo.container-0-C-1] Metrics reporters closed
[foo.container-0-C-1] App info kafka.consumer for consumer-group.for.foo-2 unregistered
[foo.container-0-C-1] group.for.foo: Consumer stopped

Having issue in testing with spring cloud stream with kafka binding

I have following Binding
public interface KafkaBinding {
String DATA_OUT = "dataout";
#Output(DATA_OUT)
MessageChannel dataOut();
}
Here is kafka utility
#EnableBinding(KafkaBinding.class)
public class KafkaStreamUtil {
private final MessageChannel out;
public KafkaStreamUtil(KafkaBinding binding) {
this.out = binding .dataOut();
}
public void SendToKafkaTopic(List<data> dataList){
dataList
.stream()
.forEach(this::sendMessgae);
}
private void sendMessgae(Data data) {
Message<Data> message = MessageBuilder
.withPayload(data)
.setHeader(KafkaHeaders.MESSAGE_KEY, data.getRequestId().getBytes())
.build();
try {
this.out.send(message);
} catch (Exception e) {
log.error(e.getMessage(),e);
}
}
My test class
#RunWith(SpringRunner.class)
#ActiveProfiles("test")
#SpringBootTest(classes = {KafkaStreamUtil.class,KafkaBinding.class})
#DirtiesContext(classMode= DirtiesContext.ClassMode.AFTER_EACH_TEST_METHOD)
public class KafkaStreamUtilTest {
private static final String TEST_TOPIC1 = "data";
private static final String GROUP_NAME = "embeddedKafkaApplication";
#ClassRule
public static EmbeddedKafkaRule kafkaRule =
new EmbeddedKafkaRule(1, true, TEST_TOPIC1);
public static EmbeddedKafkaBroker embeddedKafka = kafkaRule.getEmbeddedKafka();
#Autowired
private KafkaStreamUtil kUtil;
#Autowired
private KafkaBinding binding;
#BeforeClass
public static void setupProperties() {
System.setProperty("spring.cloud.stream.kafka.binder.brokers", embeddedKafka.getBrokersAsString());
System.setProperty("spring.cloud.stream.kafka.streams.binder.configuration.commit.interval.ms", "1000");
System.setProperty("spring.cloud.stream.kafka.streams.binder.configuration.default.key.serde", "org.apache.kafka.common.serialization.Serdes$StringSerde");
System.setProperty("spring.cloud.stream.kafka.streams.binder.configuration.default.value.serde", "org.apache.kafka.common.serialization.Serdes$StringSerde");
System.setProperty("spring.cloud.stream.bindings.dataout.destination", "data");
System.setProperty("spring.cloud.stream.bindings.dataout.producer.header-mode", "raw");
System.setProperty("spring.autoconfigure.exclude","org.springframework.cloud.stream.test.binder.TestSupportBinderAutoConfiguration");
System.setProperty("spring.kafka.consumer.value-deserializer","org.apache.kafka.common.serialization.StringDeserializer");
System.setProperty("spring.cloud.stream.bindings.input.consumer.headerMode","raw");
System.setProperty("spring.cloud.stream.bindings.input.group","embeddedKafkaApplication");
System.setProperty("spring.kafka.consumer.group-id","EmbeddedKafkaIntTest");
}
#Before
public void setUp() throws Exception {
kUtil = new KafkaStreamUtil(binding);
}
#Test
public void sendToKafkaTopic() {
kUtil.SendToKafkaTopic(dataList);
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps(GROUP_NAME, "false", embeddedKafka);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
consumerProps.put("key.deserializer", StringDeserializer.class);
consumerProps.put("value.deserializer", StringDeserializer.class);
DefaultKafkaConsumerFactory<String, String> cf = new DefaultKafkaConsumerFactory<>(consumerProps);
Consumer<String, String> consumer = cf.createConsumer();
consumer.subscribe(Collections.singleton(TEST_TOPIC1));
ConsumerRecords<String, String> records = consumer.poll(10_000);
consumer.commitSync();
consumer.close();
Assert.assertNotNull(records);
}
}
I am unable to get binding with Kafka util in test the binding is always null. Please let know what I am missing. I am able to test with springboottest but it is loading all beans I just want to load necessary componens required for this test.

Kafka consumer in flink

I am working with kafka and apache flink. I am trying to consume records (which are in avro format) from a kafka topic in apache flink. Below is the piece of code I am trying with.
Using a custom deserialiser to deserialise avro records from the topic.
the Avro schema for the data I am sending to topic "test-topic" is as below.
{
"namespace": "com.example.flink.avro",
"type": "record",
"name": "UserInfo",
"fields": [
{"name": "name", "type": "string"}
]
}
The custom deserialiser I am using is as below.
public class AvroDeserializationSchema<T> implements DeserializationSchema<T> {
private static final long serialVersionUID = 1L;
private final Class<T> avroType;
private transient DatumReader<T> reader;
private transient BinaryDecoder decoder;
public AvroDeserializationSchema(Class<T> avroType) {
this.avroType = avroType;
}
public T deserialize(byte[] message) {
ensureInitialized();
try {
decoder = DecoderFactory.get().binaryDecoder(message, decoder);
T t = reader.read(null, decoder);
return t;
} catch (Exception ex) {
throw new RuntimeException(ex);
}
}
private void ensureInitialized() {
if (reader == null) {
if (org.apache.avro.specific.SpecificRecordBase.class.isAssignableFrom(avroType)) {
reader = new SpecificDatumReader<T>(avroType);
} else {
reader = new ReflectDatumReader<T>(avroType);
}
}
}
public boolean isEndOfStream(T nextElement) {
return false;
}
public TypeInformation<T> getProducedType() {
return TypeExtractor.getForClass(avroType);
}
}
And this is how my flink app is written.
public class FlinkKafkaApp {
public static void main(String[] args) throws Exception {
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
Properties kafkaProperties = new Properties();
kafkaProperties.put("bootstrap.servers", "localhost:9092");
kafkaProperties.put("group.id", "test");
AvroDeserializationSchema<UserInfo> schema = new AvroDeserializationSchema<UserInfo>(UserInfo.class);
FlinkKafkaConsumer011<UserInfo> consumer = new FlinkKafkaConsumer011<UserInfo>("test-topic", schema, kafkaProperties);
DataStreamSource<UserInfo> userStream = env.addSource(consumer);
userStream.map(new MapFunction<UserInfo, UserInfo>() {
#Override
public UserInfo map(UserInfo userInfo) {
return userInfo;
}
}).print();
env.execute("Test Kafka");
}
I am trying to print the record sent to the the topic which is as below.
{"name" :"sumit"}
Output:
The output I am getting is
{"name":""}
Can anyone help to figure out what is the issue here and why I am not getting {"name" : "sumit"} as output.
Flink documentation says :
Flinkā€™s Kafka consumer is called FlinkKafkaConsumer08 (or 09 for Kafka 0.9.0.x versions, etc. or just FlinkKafkaConsumer for Kafka >= 1.0.0 versions). It provides access to one or more Kafka topics.
We do not have to write the custom de-serializer to consume Avro messages from Kafka.
-To read SpecificRecords :
DataStreamSource<UserInfo> stream = streamExecutionEnvironment.addSource(new FlinkKafkaConsumer<>("test_topic", AvroDeserializationSchema.forSpecific(UserInfo.class), properties).setStartFromEarliest());
To read GenericRecords :
Schema schema = Schema.parse("{"namespace": "com.example.flink.avro","type": "record","name": "UserInfo","fields": [{"name": "name", "type": "string"}]}");
DataStreamSource<GenericRecord> stream = streamExecutionEnvironment.addSource(new FlinkKafkaConsumer<>("test_topic", AvroDeserializationSchema.forGeneric(schema), properties).setStartFromEarliest());
For more details : https://ci.apache.org/projects/flink/flink-docs-stable/dev/connectors/kafka.html#kafka-consumer

Categories