Return Java Object to proper Kafka producer - java

I want to implement Kafka Topic which sends and receives Serialized Java Objects based on this example.
I tried this:
Producer Config:
#Configuration
public class KafkaProducerConfig {
#Value(value = "${kafka.bootstrapAddress}")
private String bootstrapAddress;
#Bean
public ProducerFactory<String, Object> requestFactoryProducerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ObjectFactorySerializer.class);
return new DefaultKafkaProducerFactory<>(configProps);
}
#Bean
public KafkaTemplate<String, Object> requestFactoryKafkaTemplate() {
return new KafkaTemplate<>(requestFactoryProducerFactory());
}
#Bean
public ConsumerFactory<String, Object> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "tp-sale.reply");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ObjectFactoryDeserializer.class);
return new DefaultKafkaConsumerFactory<>(props);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
#Bean
public ReplyingKafkaTemplate<String, Object, Object> replyKafkaTemplate(ProducerFactory<String, Object> producerFactory, ConcurrentKafkaListenerContainerFactory<String, Object> factory) {
ConcurrentMessageListenerContainer<String, Object> kafkaMessageListenerContainer = factory.createContainer("tp-sale.reply");
ReplyingKafkaTemplate<String, Object, Object> requestReplyKafkaTemplate = new ReplyingKafkaTemplate<>(producerFactory, kafkaMessageListenerContainer);
requestReplyKafkaTemplate.setDefaultTopic("tp-sale.reply");
return requestReplyKafkaTemplate;
}
}
Producer:
#RestController
#RequestMapping("/checkout")
public class CheckoutController {
private static final Logger LOG = LoggerFactory.getLogger(CheckoutController.class);
private KafkaTemplate<String, Object> requestFactoryKafkaTemplate;
private ReplyingKafkaTemplate<String, Object, Object> requestReplyKafkaTemplate;
#Autowired
public CheckoutController(KafkaTemplate<String, Object> requestFactoryKafkaTemplate,
ReplyingKafkaTemplate<String, Object, Object> requestReplyKafkaTemplate){
this.requestFactoryKafkaTemplate = requestFactoryKafkaTemplate;
this.requestReplyKafkaTemplate = requestReplyKafkaTemplate;
}
#PostMapping("sale_test")
public void performSaleTest() throws ExecutionException, InterruptedException, TimeoutException {
SaleRequestFactory obj = new SaleRequestFactory();
obj.setId(100);
ProducerRecord<String, Object> record = new ProducerRecord<>("tp-sale.request", obj);
RequestReplyFuture<String, Object, Object> replyFuture = requestReplyKafkaTemplate.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
SaleResponseFactory value = (SaleResponseFactory) consumerRecord.value();
System.out.println("!!!!!!!!!!!! " + value.getUnique_id());
}
#PostMapping("authorize_test")
public void performAuthTest() throws ExecutionException, InterruptedException, TimeoutException {
AuthRequestFactory obj = new AuthRequestFactory();
obj.setId(140);
ProducerRecord<String, Object> record = new ProducerRecord<>("tp-sale.request", obj);
RequestReplyFuture<String, Object, Object> replyFuture = requestReplyKafkaTemplate.sendAndReceive(record);
SendResult<String, Object> sendResult = replyFuture.getSendFuture().get(10, TimeUnit.SECONDS);
ConsumerRecord<String, Object> consumerRecord = replyFuture.get(10, TimeUnit.SECONDS);
AuthResponseFactory value = (AuthResponseFactory) consumerRecord.value();
System.out.println("!!!!!!!!!!!! " + value.getUnique_id());
}
}
ObjectFactoryDeserializer
public class ObjectFactoryDeserializer implements Deserializer<Object> {
#Override
public Object deserialize(String topic, byte[] data) {
return null;
}
#Override
public Object deserialize(String topic, Headers headers, byte[] data) {
ByteArrayInputStream bais = new ByteArrayInputStream(data);
try (ObjectInputStream ois = new ObjectInputStream(bais)) {
return ois.readObject();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
catch (ClassNotFoundException e) {
throw new IllegalStateException(e);
}
}
}
ObjectFactorySerializer
public class ObjectFactorySerializer implements Serializer<Object> {
#Override
public byte[] serialize(String topic, Object data) {
return null;
}
#Override
public byte[] serialize(String topic, Headers headers, Object data) {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
try (ObjectOutputStream oos = new ObjectOutputStream(baos)) {
oos.writeObject(data);
return baos.toByteArray();
}
catch (IOException e) {
throw new UncheckedIOException(e);
}
}
}
Consumer configuration:
#EnableKafka
#Configuration
public class KafkaConsumerConfig {
#Value(value = "${kafka.bootstrapAddress}")
private String bootstrapAddress;
#Bean
public ConsumerFactory<String, Object> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
props.put(ConsumerConfig.GROUP_ID_CONFIG, "tp-sale.request");
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, ObjectFactoryDeserializer.class);
return new DefaultKafkaConsumerFactory<>(props);
}
#Bean
public ProducerFactory<String, Object> saleResponseFactoryProducerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ObjectFactorySerializer.class);
return new DefaultKafkaProducerFactory<>(configProps);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, Object> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Object> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setReplyTemplate(saleResponseFactoryKafkaTemplate());
return factory;
}
#Bean
public KafkaTemplate<String, Object> saleResponseFactoryKafkaTemplate() {
return new KafkaTemplate<>(saleResponseFactoryProducerFactory());
}
}
Consumer
#Component
#KafkaListener(id = "tp-sale.request", topics = "tp-sale.request")
public class ConsumerListener {
private static final Logger LOGGER = LoggerFactory.getLogger(ConsumerListener.class);
#KafkaHandler
#SendTo("tp-sale.reply")
public AuthResponseFactory fooListener(AuthRequestFactory authRequestFactory) {
System.out.println("In AuthRequestFactoryListener: " + authRequestFactory);
AuthResponseFactory resObj = new AuthResponseFactory();
resObj.setUnique_id("123123");
return resObj;
}
#KafkaHandler
#SendTo("tp-sale.reply")
public SaleResponseFactory barListener(SaleRequestFactory saleRequestFactory) {
System.out.println("In SaleRequestFactoryListener: " + saleRequestFactory);
SaleResponseFactory resObj = new SaleResponseFactory();
resObj.setUnique_id("123123");
return resObj;
}
}
Full minimal working example
When I hit the endpoint authorize_test the code is working fine.
When I hit the endpoint sale_test I get this exception:
Producer exception:
14:06:48.706 [consumer-0-C-1] DEBUG KafkaMessageListenerContainer$ListenerConsumer[debug:313] - Commit list: {}
14:06:48.706 [consumer-0-C-1] DEBUG RecoveringBatchErrorHandler[debug:200] - Expected a BatchListenerFailedException; re-seeking batch
org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
Caused by: java.lang.ClassCastException: class org.engine.plugin.transactions.factory.SaleResponseFactory cannot be cast to class org.engine.plugin.transactions.factory.AuthResponseFactory (org.engine.plugin.transactions.factory.SaleResponseFactory and org.engine.plugin.transactions.factory.AuthResponseFactory are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader #6267c3bb)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:20)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:10)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1289)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1243)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1213)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1117)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1038)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
14:06:48.707 [consumer-0-C-1] ERROR KafkaMessageListenerContainer$ListenerConsumer[error:149] - Consumer exception
org.springframework.kafka.KafkaException: Seek to current after exception; nested exception is org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
at org.springframework.kafka.listener.SeekToCurrentBatchErrorHandler.handle(SeekToCurrentBatchErrorHandler.java:72)
at org.springframework.kafka.listener.RecoveringBatchErrorHandler.handle(RecoveringBatchErrorHandler.java:124)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1372)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1070)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
Caused by: java.lang.ClassCastException: class org.engine.plugin.transactions.factory.SaleResponseFactory cannot be cast to class org.engine.plugin.transactions.factory.AuthResponseFactory (org.engine.plugin.transactions.factory.SaleResponseFactory and org.engine.plugin.transactions.factory.AuthResponseFactory are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader #6267c3bb)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:20)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:10)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deseri^Calizer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1289)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1243)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1213)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1117)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1038)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
14:06:48.970 [http-nio-8090-exec-3] ERROR HandlerExecutionChain[triggerAfterCompletion:192] - HandlerInterceptor.afterCompletion threw exception
java.lang.NullPointerException: null
org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:193)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:166)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:202)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:97)
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:542)
14:06:49.142 [http-nio-8090-exec-3] DEBUG DispatcherServlet[logResult:1101] - Failed to complete request: java.lang.InterruptedException
14:06:49.143 [http-nio-8090-exec-3] DEBUG HstsHeaderWriter[writeHeaders:169] - Not injecting HSTS header since it did not match the requestMatcher
14:06:49.149 [consumer-0-C-1] DEBUG RecoveringBatchErrorHandler[debug:200] - Expected a BatchListenerFailedException; re-seeking batch
org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
Caused by: java.lang.ClassCastException: class org.engine.plugin.transactions.factory.SaleResponseFactory cannot be cast to class org.engine.plugin.transactions.factory.AuthResponseFactory (org.engine.plugin.transactions.factory.SaleResponseFactory and org.engine.plugin.transactions.factory.AuthResponseFactory are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader #6267c3bb)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:20)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:10)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
14:06:49.149 [consumer-0-C-1] ERROR KafkaMessageListenerContainer$ListenerConsumer[error:149] - Consumer exception
org.springframework.kafka.KafkaException: Seek to current after exception; nested exception is org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
at org.springframework.kafka.listener.SeekToCurrentBatchErrorHandler.handle(SeekToCurrentBatchErrorHandler.java:72)
at org.springframework.kafka.listener.RecoveringBatchErrorHandler.handle(RecoveringBatchErrorHandler.java:124)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1372)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1070)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
Caused by: java.lang.ClassCastException: class org.engine.plugin.transactions.factory.SaleResponseFactory cannot be cast to class org.engine.plugin.transactions.factory.AuthResponseFactory (org.engine.plugin.transactions.factory.SaleResponseFactory and org.engine.plugin.transactions.factory.AuthResponseFactory are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader #6267c3bb)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:20)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:10)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1289)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1243)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1213)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1117)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1038)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
14:06:49.150 [consumer-0-C-1] DEBUG KafkaMessageListenerContainer$ListenerConsumer[debug:313] - Commit list: {}
14:06:49.151 [consumer-0-C-1] DEBUG RecoveringBatchErrorHandler[debug:200] - Expected a BatchListenerFailedException; re-seeking batch
org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
Caused by: java.lang.ClassCastException: class org.engine.plugin.transactions.factory.SaleResponseFactory cannot be cast to class org.engine.plugin.transactions.factory.AuthResponseFactory (org.engine.plugin.transactions.factory.SaleResponseFactory and org.engine.plugin.transactions.factory.AuthResponseFactory are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader #6267c3bb)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:20)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:10)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1289)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1243)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1213)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1117)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1038)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
14:06:49.152 [consumer-0-C-1] ERROR KafkaMessageListenerContainer$ListenerConsumer[error:149] - Consumer exception
org.springframework.kafka.KafkaException: Seek to current after exception; nested exception is org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
at org.springframework.kafka.listener.SeekToCurrentBatchErrorHandler.handle(SeekToCurrentBatchErrorHandler.java:72)
at org.springframework.kafka.listener.RecoveringBatchErrorHandler.handle(RecoveringBatchErrorHandler.java:124)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1372)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1070)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value for partition tp-sale.reply-0 at offset 3. If needed, please seek past the record to continue consumption.
Caused by: java.lang.ClassCastException: class org.engine.plugin.transactions.factory.SaleResponseFactory cannot be cast to class org.engine.plugin.transactions.factory.AuthResponseFactory (org.engine.plugin.transactions.factory.SaleResponseFactory and org.engine.plugin.transactions.factory.AuthResponseFactory are in unnamed module of loader org.springframework.boot.loader.LaunchedURLClassLoader #6267c3bb)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:20)
at org.engine.plugin.transactions.factory.ResponseFactoryDeserializer.deserialize(ResponseFactoryDeserializer.java:10)
at org.apache.kafka.common.serialization.Deserializer.deserialize(Deserializer.java:60)
at org.apache.kafka.clients.consumer.internals.Fetcher.parseRecord(Fetcher.java:1324)
at org.apache.kafka.clients.consumer.internals.Fetcher.access$3400(Fetcher.java:129)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.fetchRecords(Fetcher.java:1555)
at org.apache.kafka.clients.consumer.internals.Fetcher$CompletedFetch.access$1700(Fetcher.java:1391)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchRecords(Fetcher.java:683)
at org.apache.kafka.clients.consumer.internals.Fetcher.fetchedRecords(Fetcher.java:634)
at org.apache.kafka.clients.consumer.KafkaConsumer.pollForFetches(KafkaConsumer.java:1289)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1243)
at org.apache.kafka.clients.consumer.KafkaConsumer.poll(KafkaConsumer.java:1216)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doPoll(KafkaMessageListenerContainer.java:1213)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:1117)
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1038)
at java.base/java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:515)
at java.base/java.util.concurrent.FutureTask.run(FutureTask.java:264)
at java.base/java.lang.Thread.run(Thread.java:835)
14:06:49.157 [consumer-0-C-1] DEBUG KafkaMessageListenerContainer$ListenerConsumer[debug:313] - Commit list: {}
Full log https://pastebin.com/Z5XJCNhA
Do you know where I'm wrong? I can't find my mistake. Looks like requestReplyKafkaTemplate is not configured properly.

Related

Unable to publish messages from flink to Kafka : Serilization error

Hi I am using the following code to publish messages from flink to Kafka.
final Properties props = new Properties();
props.put(ProducerConfig.ACKS_CONFIG, "all");
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
"org.apache.kafka.common.serialization.StringSerializer");
props.put("security.protocol", "SASL_SSL");
props.put("sasl.mechanism", "PLAIN");
props.put("client.dns.lookup", "use_all_dns_ips");
props.put("group.id", "flink-consumer-1");
DataStream<String> enrichedJson = flattenedJson.map(new MapFunction<JsonNode, String>() {
public String map(JsonNode value) throws Exception {
Integer customerId = value.get("customerId").intValue();
ObjectMapper mapper = new ObjectMapper();
String json = mapper.writeValueAsString(map.get(customerId).get("name"));
JsonNode jsonNode = mapper.readTree(json);
JsonNode obj = ((ObjectNode) value).set("name", jsonNode );
json = mapper.writeValueAsString(map.get(customerId).get("mobileNumber"));
jsonNode = mapper.readTree(json);
obj = ((ObjectNode) value).set("mobileNumber", jsonNode );
return obj.asText();
}
});
enrichedJson.print();
FlinkKafkaProducer010 <String> producer = new FlinkKafkaProducer010<String>("ENRICHED_CUSTOMER", new SimpleStringSchema(), props);
enrichedJson.addSink(producer);
I am getting the below mentioned error for serilization
Exception in thread "main" org.apache.flink.runtime.client.JobExecutionException: org.apache.kafka.common.errors.SerializationException: Can't convert value of class [B to class org.apache.kafka.common.serialization.StringSerializer specified in value.serializer
at org.apache.flink.runtime.minicluster.MiniCluster.executeJobBlocking(MiniCluster.java:625)
at org.apache.flink.streaming.api.environment.LocalStreamEnvironment.execute(LocalStreamEnvironment.java:121)
at flink.KafkaFlink.main(KafkaFlink.java:123)
Caused by: org.apache.kafka.common.errors.SerializationException: Can't convert value of class [B to class org.apache.kafka.common.serialization.StringSerializer specified in value.serializer
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:955)
at org.apache.kafka.clients.producer.KafkaProducer.send(KafkaProducer.java:912)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer010.invoke(FlinkKafkaProducer010.java:382)
at org.apache.flink.streaming.api.operators.StreamSink.processElement(StreamSink.java:56)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:560)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:535)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:515)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$BroadcastingOutputCollector.collect(OperatorChain.java:630)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$BroadcastingOutputCollector.collect(OperatorChain.java:583)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:679)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:657)
at org.apache.flink.streaming.api.operators.StreamMap.processElement(StreamMap.java:41)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:560)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:535)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:515)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:679)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:657)
at org.apache.flink.streaming.api.operators.TimestampedCollector.collect(TimestampedCollector.java:51)
at flink.KafkaFlink$1.flatMap(KafkaFlink.java:90)
at flink.KafkaFlink$1.flatMap(KafkaFlink.java:1)
at org.apache.flink.streaming.api.operators.StreamFlatMap.processElement(StreamFlatMap.java:50)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:560)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:535)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:515)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:679)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:657)
at org.apache.flink.streaming.api.operators.StreamMap.processElement(StreamMap.java:41)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.pushToOperator(OperatorChain.java:560)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:535)
at org.apache.flink.streaming.runtime.tasks.OperatorChain$CopyingChainingOutput.collect(OperatorChain.java:515)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:679)
at org.apache.flink.streaming.api.operators.AbstractStreamOperator$CountingOutput.collect(AbstractStreamOperator.java:657)
at org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collect(StreamSourceContexts.java:104)
at org.apache.flink.streaming.api.operators.StreamSourceContexts$NonTimestampContext.collectWithTimestamp(StreamSourceContexts.java:111)
at org.apache.flink.streaming.connectors.kafka.internals.AbstractFetcher.emitRecordWithTimestamp(AbstractFetcher.java:398)
at org.apache.flink.streaming.connectors.kafka.internal.Kafka010Fetcher.emitRecord(Kafka010Fetcher.java:89)
at org.apache.flink.streaming.connectors.kafka.internal.Kafka09Fetcher.runFetchLoop(Kafka09Fetcher.java:154)
at org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumerBase.run(FlinkKafkaConsumerBase.java:738)
at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:87)
at org.apache.flink.streaming.api.operators.StreamSource.run(StreamSource.java:56)
at org.apache.flink.streaming.runtime.tasks.SourceStreamTask.run(SourceStreamTask.java:99)
at org.apache.flink.streaming.runtime.tasks.StreamTask.invoke(StreamTask.java:306)
at org.apache.flink.runtime.taskmanager.Task.run(Task.java:703)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.lang.ClassCastException: class [B cannot be cast to class java.lang.String ([B and java.lang.String are in module java.base of loader 'bootstrap')
at org.apache.kafka.common.serialization.StringSerializer.serialize(StringSerializer.java:29)
at org.apache.kafka.common.serialization.Serializer.serialize(Serializer.java:62)
at org.apache.kafka.clients.producer.KafkaProducer.doSend(KafkaProducer.java:952)
... 43 more
Flink handles serialization differently than regular Kafka producers. Use ByteArraySerializer
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class);

No subscriptions have been created error in Reactor Kafka

Consumer Config file:
Here I am using StringDeserializers for both key and values. And the subscription has been made on a single topic.
#Bean("errorReceiver")
public ReceiverOptions<Object, String> errorConsumerConfig() {
Map<String, Object> errorConsumerProps = new HashMap<>();
errorConsumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, errorBootstrapServers);
errorConsumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "error-consumer");
errorConsumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "error-consumer-1");
errorConsumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
errorConsumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
ReceiverOptions<Object, String> errorReceiverOptions = ReceiverOptions.create(errorConsumerProps);
errorReceiverOptions.subscription(Collections.singleton("order_topic"))
.addAssignListener(partitions -> log.info("onPartitionsAssigned : {}", partitions))
.addRevokeListener(partitions -> log.info("onPartitionsRevoked : {}", partitions));
return errorReceiverOptions;
}
}
Consumer Code:
My log in consumer code is printing subscribed topic as null. The AppUtility is transforming data to string.
#Autowired
#Qualifier("errorReceiver")
private ReceiverOptions<Object, String> errorReceiverOptions;
#EventListener(ApplicationStartedEvent.class)
public Disposable getErrorsTopic() {
Flux<ReceiverRecord<Object, Object>> kafkaFlux = KafkaReceiver.create(errorReceiverOptions).receive();
log.info("subs topics : {}", errorReceiverOptions.subscriptionTopics());
return kafkaFlux.log()
.doOnNext(AppUtility::toBinary)
.doOnError(error -> log.error("error ocurred", error))
.subscribe();
}
Logs:
java.lang.IllegalStateException: No subscriptions have been created
at reactor.kafka.receiver.ReceiverOptions.subscriber(ReceiverOptions.java:385) ~[reactor-kafka-1.3.4.jar:1.3.4]
at reactor.kafka.receiver.internals.ConsumerEventLoop$SubscribeEvent.run(ConsumerEventLoop.java:187) ~[reactor-kafka-1.3.4.jar:1.3.4]
at reactor.core.scheduler.SchedulerTask.call(SchedulerTask.java:68) [reactor-core-3.4.6.jar:3.4.6]
at reactor.core.scheduler.SchedulerTask.call(SchedulerTask.java:28) [reactor-core-3.4.6.jar:3.4.6]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_261]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180) [na:1.8.0_261]
at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293) [na:1.8.0_261]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) [na:1.8.0_261]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) [na:1.8.0_261]
at java.lang.Thread.run(Thread.java:748) [na:1.8.0_261]
pom imports:
<dependency>
<groupId>io.projectreactor.kafka</groupId>
<artifactId>reactor-kafka</artifactId>
</dependency>
Has anyone faced such issue? I am unable to resolve this issue.
I forgot the prime concept of immutability in reactive programming. Solved this by assigning the options to another options object (4th last line in config code).
public ReceiverOptions<Object, String> errorConsumerConfig() {
Map<String, Object> errorConsumerProps = new HashMap<>();
errorConsumerProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, errorBootstrapServers);
errorConsumerProps.put(ConsumerConfig.CLIENT_ID_CONFIG, "error-consumer");
errorConsumerProps.put(ConsumerConfig.GROUP_ID_CONFIG, "error-consumer-1");
errorConsumerProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
errorConsumerProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
ReceiverOptions<Object, String> errorReceiverOptions = ReceiverOptions.create(errorConsumerProps);
ReceiverOptions<Object, String> options = errorReceiverOptions.subscription(Collections.singleton("order_topic")) // setting the subscription doesn't work unless assigned to an object, reason being immutability
.addAssignListener(partitions -> log.debug("onPartitionsAssigned : {}", partitions))
.addRevokeListener(partitions -> log.debug("onPartitionsRevoked : {}", partitions));
return options; }
Edit: Try setting subscription topic in your listener code.
Hi please have a look on the below examples, According to example you are missing something like the code below
return kafkaFlux.subscribe(record -> {
ReceiverOffset offset = record.receiverOffset();
System.out.printf("Received message: topic-partition=%s offset=%d timestamp=%s key=%d value=%s\n",
offset.topicPartition(),
offset.offset(),
dateFormat.format(new Date(record.timestamp())),
record.key(),
record.value());
offset.acknowledge();
latch.countDown();
});
https://github.com/reactor/reactor-kafka/blob/main/reactor-kafka-samples/src/main/java/reactor/kafka/samples/SampleConsumer.java

Serialisation error for Kafka avro consumer using Spring boot

I have created a Kafka Avro producer and consumer using spring boot as two different projects. While consuming the data I am getting the following exception.
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing key/value
for partition bookavro-0 at offset 3. If needed, please seek past the record to continue
consumption.
Caused by: org.apache.kafka.common.errors.SerializationException: Error deserializing Avro message
for id 1
Caused by: org.apache.kafka.common.errors.SerializationException: Could not find class
com.dailycodebuffer.kafka.apachekafkaproducerdemo.BookAvro specified in writer's schema whilst
finding reader's schema for a SpecificRecord.
2020-12-30 18:44:09.032 ERROR 22344 --- [ntainer#0-0-C-1] essageListenerContainer$ListenerConsumer
: Consumer exception
java.lang.IllegalStateException: This error handler cannot process 'SerializationException's
directly; please consider configuring an 'ErrorHandlingDeserializer' in the value and/or key deserializer
at org.springframework.kafka.listener.SeekUtils.seekOrRecover(SeekUtils.java:145) ~[spring-kafka-2.6.4.jar:2.6.4]
at org.springframework.kafka.listener.SeekToCurrentErrorHandler.handle(SeekToCurrentErrorHandler.java:113) ~[spring-kafka-2.6.4.jar:2.6.4]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.handleConsumerException(KafkaMessageListenerContainer.java:1425) [spring-kafka-2.6.4.jar:2.6.4]
at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:1122) [spring-kafka-2.6.4.jar:2.6.4]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) [na:1.8.0_202]
at java.util.concurrent.FutureTask.run(FutureTask.java:266) [na:1.8.0_202]
at java.lang.Thread.run(Thread.java:813) [na:1.8.0_202]
com.dailycodebuffer.kafka.apachekafkaproducerdemo.BookAvro is the package in producer project
Below is my consumer config:
#Bean
public ConsumerFactory<String, BookAvro> BookconsumerFactory(){
System.out.println("hi");
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
// configProps.put(ConsumerConfig.KEY, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG,"io.confluent.kafka.serializers.KafkaAvroDeserializer");
// configProps.put("value.deserializer","org.springframework.kafka.support.serializer.JsonDeserializer");
// configProps.put(JsonDeserializer.ADD_TYPE_INFO_HEADERS, false);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG,"group_json");
configProps.put("auto.offset.reset", "earliest");
configProps.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
configProps.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, "true");
System.out.println(configProps.toString());
return new DefaultKafkaConsumerFactory<String, BookAvro>(configProps);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<String, BookAvro> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, BookAvro> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(BookconsumerFactory());
System.out.println(factory.toString());
//factory.getContainerProperties().setAckMode(AckMode.MANUAL_IMMEDIATE);
return factory;
}
Following is the Producer Config:
#Bean
public ProducerFactory<String, BookAvro> producerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put( ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,"localhost:9092");
configProps.put( ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,KafkaAvroSerializer.class.getName());
configProps.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, "http://localhost:8081");
// configProps.put(KafkaAvroDeserializerConfig.SPECIFIC_AVRO_READER_CONFIG, true);
configProps.put(JsonSerializer.ADD_TYPE_INFO_HEADERS, false);
// configProps.put(KafkaAvroSerializerConfig., "true");
return new DefaultKafkaProducerFactory<>(configProps);
}
#Bean
public KafkaTemplate<String, BookAvro> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
Below is the Kafka listner:
#KafkaListener(groupId = "group_json", topics = "bookavro")
public void consumeBook( BookAvro book) {
System.out.println("message3" + book.toString());
}
BookAvro is the Avro class created using the Avsc file. Could anyone please help me to resolve this exception?

Spring Kafka transaction causes producer per message offset increased by two

I have a consume-transform-produce workflow in a micro service using Spring(boot) Kafka. I need to achieve the exactly-once scemantics provided by Kafka transaction.
Here's the code snippet below:
Config
#Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
props.put(ProducerConfig.ENABLE_IDEMPOTENCE_CONFIG, true);
props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, 1024 * 1024);
DefaultKafkaProducerFactory<String, String> defaultKafkaProducerFactory = new DefaultKafkaProducerFactory<>(props);
defaultKafkaProducerFactory.setTransactionIdPrefix("kafka-trx-");
return defaultKafkaProducerFactory;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 5000);
props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
props.put(ConsumerConfig.ISOLATION_LEVEL_CONFIG, "read_committed");
return new DefaultKafkaConsumerFactory<>(props);
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
#Bean
public KafkaTransactionManager<String, String> kafkaTransactionManager() {
return new KafkaTransactionManager<>(producerFactory());
}
#Bean
#Qualifier("chainedKafkaTransactionManager")
public ChainedKafkaTransactionManager<String, Object> chainedKafkaTransactionManager(KafkaTransactionManager<String, String> kafkaTransactionManager) {
return new ChainedKafkaTransactionManager<>(kafkaTransactionManager);
}
#Bean
public ConcurrentKafkaListenerContainerFactory<?, ?> concurrentKafkaListenerContainerFactory(ChainedKafkaTransactionManager<String, Object> chainedKafkaTransactionManager) {
ConcurrentKafkaListenerContainerFactory<String, String> concurrentKafkaListenerContainerFactory = new ConcurrentKafkaListenerContainerFactory<>();
concurrentKafkaListenerContainerFactory.setConsumerFactory(consumerFactory());
concurrentKafkaListenerContainerFactory.setBatchListener(true);
concurrentKafkaListenerContainerFactory.setConcurrency(nexusConsumerConcurrency);
//concurrentKafkaListenerContainerFactory.setReplyTemplate(kafkaTemplate());
concurrentKafkaListenerContainerFactory.getContainerProperties().setAckMode(AbstractMessageListenerContainer.AckMode.BATCH);
concurrentKafkaListenerContainerFactory.getContainerProperties().setTransactionManager(chainedKafkaTransactionManager);
return concurrentKafkaListenerContainerFactory;
}
Listener
#KafkaListener(topics = "${kafka.xxx.consumerTopic}", groupId = "${kafka.xxx.consumerGroup}", containerFactory = "concurrentKafkaListenerContainerFactory")
public void listen(#Payload List<String> msgs, #Header(KafkaHeaders.RECEIVED_PARTITION_ID) List<Integer> partitions, #Header(KafkaHeaders.OFFSET) List<Integer> offsets) {
int i = -1;
for (String msg : msgs) {
++i;
LOGGER.debug("partition={}; offset={}; msg={}", partitions.get(i), offsets.get(i), msg);
String json = transform(msg);
kafkaTemplate.executeInTransaction(kt -> kt.send(producerTopic, json));
}
}
However in the product environment, I encounter a weird problem. The offset is increased by two per message sent by the producer and consumer doesn't commit the consuming offset.
Consumer Offsets from topic1
Topic1 consumer detail
Produce to topic2
However the count of messages sent by the producer is the same as the consumed. The downstream of the producer can receive the msgs from topic2 continuously. There's no error or exception found in the log.
I wonder why consume-transform-produce workflow seems ok(exactly-once scemantics also guaranteed), but the consumed offset isn't committed and the produced msg offset increment is two instead of 1 for per single msg.
How to fix it? Thx!
That's the way it's designed. Kafka logs are immutable so an extra "slot" is used at the end of the transaction to indicate whether the transaction was committed or rolled back. This allows consumers with read_committed isolation level to skip over rolled-back transactions.
If you publish 10 records in a transaction, you will see the offset increase by 11. If you only publish one, it will increase by two.
if you want the publish to participate in the consumer-started transaction (for exactly-once), you should not be using executeInTransaction; that will start a new transaction.
/**
* Execute some arbitrary operation(s) on the operations and return the result.
* The operations are invoked within a local transaction and do not participate
* in a global transaction (if present).
* #param callback the callback.
* #param <T> the result type.
* #return the result.
* #since 1.1
*/
<T> T executeInTransaction(OperationsCallback<K, V, T> callback);
I don't see why the consumer offset would not be still sent to the consumer-started transaction though. You should turn on DEBUG logging to see what's happening (if it still happens after you fix the template code).
EDIT
The consumed offset (+1) is sent to the transaction by the listener container when the listener exits; turn on commit logging and you will see it...
#SpringBootApplication
public class So59152915Application {
public static void main(String[] args) {
SpringApplication.run(So59152915Application.class, args);
}
#Autowired
private KafkaTemplate<String, String> template;
#KafkaListener(id = "foo", topics = "so59152915-1", clientIdPrefix = "so59152915")
public void listen1(String in, #Header(KafkaHeaders.OFFSET) long offset) throws InterruptedException {
System.out.println(in + "#" + offset);
this.template.send("so59152915-2", in.toUpperCase());
Thread.sleep(2000);
}
#KafkaListener(id = "bar", topics = "so59152915-2")
public void listen2(String in) {
System.out.println(in);
}
#Bean
public NewTopic topic1() {
return new NewTopic("so59152915-1", 1, (short) 1);
}
#Bean
public NewTopic topic2() {
return new NewTopic("so59152915-2", 1, (short) 1);
}
#Bean
public ApplicationRunner runner(KafkaListenerEndpointRegistry registry) {
return args -> {
this.template.executeInTransaction(t -> {
IntStream.range(0, 11).forEach(i -> t.send("so59152915-1", "foo" + i));
try {
System.out.println("Hit enter to commit sends");
System.in.read();
}
catch (IOException e) {
e.printStackTrace();
}
return null;
});
};
}
}
#Component
class Configurer {
Configurer(ConcurrentKafkaListenerContainerFactory<?, ?> factory) {
factory.getContainerProperties().setCommitLogLevel(Level.INFO);
}
}
and
spring.kafka.producer.transaction-id-prefix=tx-
spring.kafka.consumer.properties.isolation.level=read_committed
spring.kafka.consumer.auto-offset-reset=earliest
and
foo0#56
2019-12-04 10:07:18.551 INFO 55430 --- [ foo-0-C-1] essageListenerContainer$ListenerConsumer : Sending offsets to transaction: {so59152915-1-0=OffsetAndMetadata{offset=57, leaderEpoch=null, metadata=''}}
foo1#57
FOO0
2019-12-04 10:07:18.558 INFO 55430 --- [ bar-0-C-1] essageListenerContainer$ListenerConsumer : Sending offsets to transaction: {so59152915-2-0=OffsetAndMetadata{offset=63, leaderEpoch=null, metadata=''}}
2019-12-04 10:07:20.562 INFO 55430 --- [ foo-0-C-1] essageListenerContainer$ListenerConsumer : Sending offsets to transaction: {so59152915-1-0=OffsetAndMetadata{offset=58, leaderEpoch=null, metadata=''}}
foo2#58
Please pay attention for your auto commit setup. As I see you set it false:
props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
so, in this situation you need to commit "manually" or set the auto commit true.

Kafka consumer unit test with Avro Schema registry failing

I'm writing a consumer which listens to a Kafka topic and consumes message whenever message is available. I've tested the logic/code by running Kafka locally and it's working fine.
While writing the unit/component test cases, it's failing with avro schema registry url error. I've tried different options available on internet but could not find anything working. I am not sure if my approach is even correct. Please help.
Listener Class
#KafkaListener(topics = "positionmgmt.v1", containerFactory = "genericKafkaListenerFactory")
public void receive(ConsumerRecord<String, GenericRecord> consumerRecord) {
try {
GenericRecord generic = consumerRecord.value();
Object obj = generic.get("metadata");
ObjectMapper mapper = new ObjectMapper();
Header headerMetaData = mapper.readValue(obj.toString(), Header.class);
System.out.println("Received payload : " + consumerRecord.value());
//Call backend with details in GenericRecord
}catch (Exception e){
System.out.println("Exception while reading message from Kafka " + e );
}
Kafka config
#Bean
public ConcurrentKafkaListenerContainerFactory<String, GenericRecord> genericKafkaListenerFactory() {
ConcurrentKafkaListenerContainerFactory<String, GenericRecord> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(genericConsumerFactory());
return factory;
}
public ConsumerFactory<String, GenericRecord> genericConsumerFactory() {
Map<String, Object> config = new HashMap<>();
config.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "127.0.0.1:9092");
config.put(ConsumerConfig.GROUP_ID_CONFIG, "group_id");
config.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
config.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, KafkaAvroDeserializer.class);
config.put(KafkaAvroDeserializerConfig.SCHEMA_REGISTRY_URL_CONFIG,"http://localhost:8081");
return new DefaultKafkaConsumerFactory<>(config);
}
Avro Schema
{
"type":"record",
"name":"KafkaEvent",
"namespace":"com.ms.model.avro",
"fields":[
{
"name":"metadata",
"type":{
"name":"metadata",
"type":"record",
"fields":[
{
"name":"correlationid",
"type":"string",
"doc":"this is corrleation id for transaction"
},
{
"name":"subject",
"type":"string",
"doc":"this is subject for transaction"
},
{
"name":"version",
"type":"string",
"doc":"this is version for transaction"
}
]
}
},
{
"name":"name",
"type":"string"
},
{
"name":"dept",
"type":"string"
},
{
"name":"empnumber",
"type":"string"
}
]
}
This is my test code which I tried...
#ComponentTest
#RunWith(SpringRunner.class)
#EmbeddedKafka(partitions = 1, topics = { "positionmgmt.v1" })
#SpringBootTest(classes={Application.class})
#DirtiesContext
public class ConsumeKafkaMessageTest {
private static final String TEST_TOPIC = "positionmgmt.v1";
#Autowired(required=true)
EmbeddedKafkaBroker embeddedKafkaBroker;
private Schema schema;
private SchemaRegistryClient schemaRegistry;
private KafkaAvroSerializer avroSerializer;
private KafkaAvroDeserializer avroDeserializer;
private MockSchemaRegistryClient mockSchemaRegistryClient = new MockSchemaRegistryClient();
private String registryUrl = "unused";
private String avroSchema = string representation of avro schema
#BeforeEach
public void setUp() throws Exception {
Schema.Parser parser = new Schema.Parser();
schema = parser.parse(avroSchema);
mockSchemaRegistryClient.register("Vendors-value", schema);
}
#Test
public void consumeKafkaMessage_receive_sucess() {
Schema metadataSchema = schema.getField("metadata").schema();
GenericRecord metadata = new GenericData.Record(metadataSchema);
metadata.put("version", "1.0");
metadata.put("correlationid", "correlationid");
metadata.put("subject", "metadata");
GenericRecord record = new GenericData.Record(schema);
record.put("metadata", metadata);
record.put("name", "ABC");
record.put("dept", "XYZ");
Consumer<String, GenericRecord> consumer = configureConsumer();
Producer<String, GenericRecord> producer = configureProducer();
ProducerRecord<String, GenericRecord> prodRecord = new ProducerRecord<String, GenericRecord>(TEST_TOPIC, record);
producer.send(prodRecord);
ConsumerRecord<String, GenericRecord> singleRecord = KafkaTestUtils.getSingleRecord(consumer, TEST_TOPIC);
assertNotNull(singleRecord.value());
consumer.close();
producer.close();
}
private Consumer<String, GenericRecord> configureConsumer() {
Map<String, Object> consumerProps = KafkaTestUtils.consumerProps("groupid", "true", embeddedKafkaBroker);
consumerProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
Consumer<String, GenericRecord> consumer = new DefaultKafkaConsumerFactory<String, GenericRecord>(consumerProps).createConsumer();
consumer.subscribe(Collections.singleton(TEST_TOPIC));
return consumer;
}
private Producer<String, GenericRecord> configureProducer() {
Map<String, Object> producerProps = new HashMap<>(KafkaTestUtils.producerProps(embeddedKafkaBroker));
producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, KafkaAvroSerializer.class.getName());
producerProps.put(KafkaAvroSerializerConfig.SCHEMA_REGISTRY_URL_CONFIG, mockSchemaRegistryClient);
producerProps.put(KafkaAvroSerializerConfig.AUTO_REGISTER_SCHEMAS, "false");
return new DefaultKafkaProducerFactory<String, GenericRecord>(producerProps).createProducer();
}
}
Error
component.com.ms.listener.ConsumeKafkaMessageTest > consumeKafkaMessage_receive_sucess() FAILED
org.apache.kafka.common.KafkaException: Failed to construct kafka producer
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:457)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:289)
at org.springframework.kafka.core.DefaultKafkaProducerFactory.createKafkaProducer(DefaultKafkaProducerFactory.java:318)
at org.springframework.kafka.core.DefaultKafkaProducerFactory.createProducer(DefaultKafkaProducerFactory.java:305)
at component.com.ms.listener.ConsumeKafkaMessageTest.configureProducer(ConsumeKafkaMessageTest.java:125)
at component.com.ms.listener.ConsumeKafkaMessageTest.consumeKafkaMessage_receive_sucess(ConsumeKafkaMessageTest.java:97)
Caused by:
io.confluent.common.config.ConfigException: Invalid value io.confluent.kafka.schemaregistry.client.MockSchemaRegistryClient#20751870 for configuration schema.registry.url: Expected a comma separated list.
at io.confluent.common.config.ConfigDef.parseType(ConfigDef.java:345)
at io.confluent.common.config.ConfigDef.parse(ConfigDef.java:249)
at io.confluent.common.config.AbstractConfig.<init>(AbstractConfig.java:78)
at io.confluent.kafka.serializers.AbstractKafkaAvroSerDeConfig.<init>(AbstractKafkaAvroSerDeConfig.java:105)
at io.confluent.kafka.serializers.KafkaAvroSerializerConfig.<init>(KafkaAvroSerializerConfig.java:32)
at io.confluent.kafka.serializers.KafkaAvroSerializer.configure(KafkaAvroSerializer.java:48)
at org.apache.kafka.common.serialization.ExtendedSerializer$Wrapper.configure(ExtendedSerializer.java:60)
at org.apache.kafka.clients.producer.KafkaProducer.<init>(KafkaProducer.java:372)
... 5 more
I investigated it a bit and I found out that the problem is in the CashedSchemaRegistryClient that is used by the KafkaAvroSerializer/Deserializer. It is used to fetch the schema definitions from the Confluent Schema Registry.
You already have your schema definition locally so you don't need to go to Schema Registry for them. (at least in your tests)
I had a similar problem and I solved it by creating a custom KafkaAvroSerializer/KafkaAvroDeserializer.
This is a sample of KafkaAvroSerializer. It is rather simple. You just need to extend the provided KafkaAvroSerializer and tell him to use MockSchemaRegistryClient.
public class CustomKafkaAvroSerializer extends KafkaAvroSerializer {
public CustomKafkaAvroSerializer() {
super();
super.schemaRegistry = new MockSchemaRegistryClient();
}
public CustomKafkaAvroSerializer(SchemaRegistryClient client) {
super(new MockSchemaRegistryClient());
}
public CustomKafkaAvroSerializer(SchemaRegistryClient client, Map<String, ?> props) {
super(new MockSchemaRegistryClient(), props);
}
}
This is a sample of KafkaAvroDeserializer. When the deserialize method is called you need to tell him which schema to use.
public class CustomKafkaAvroDeserializer extends KafkaAvroDeserializer {
#Override
public Object deserialize(String topic, byte[] bytes) {
this.schemaRegistry = getMockClient(KafkaEvent.SCHEMA$);
return super.deserialize(topic, bytes);
}
private static SchemaRegistryClient getMockClient(final Schema schema$) {
return new MockSchemaRegistryClient() {
#Override
public synchronized Schema getById(int id) {
return schema$;
}
};
}
}
The last step is to tell spring to use created Serializer/Deserializer
spring.kafka.producer.properties.schema.registry.url= not-used
spring.kafka.producer.value-serializer = CustomKafkaAvroSerializer
spring.kafka.producer.key-serializer = org.apache.kafka.common.serialization.StringSerializer
spring.kafka.producer.group-id = showcase-producer-id
spring.kafka.consumer.properties.schema.registry.url= not-used
spring.kafka.consumer.value-deserializer = CustomKafkaAvroDeserializer
spring.kafka.consumer.key-deserializer = org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.group-id = showcase-consumer-id
spring.kafka.auto.offset.reset = earliest
spring.kafka.producer.auto.register.schemas= true
spring.kafka.properties.specific.avro.reader= true
I wrote a short blog post about that:
https://medium.com/#igorvlahek1/no-need-for-schema-registry-in-your-spring-kafka-tests-a5b81468a0e1?source=friends_link&sk=e55f73b86504e9f577e259181c8d0e23
Link to the working sample project: https://github.com/ivlahek/kafka-avro-without-registry
The answer from #ivlahek is working, but if you look at this example 3 year later you might want to do slight modification to CustomKafkaAvroDeserializer
private static SchemaRegistryClient getMockClient(final Schema schema) {
return new MockSchemaRegistryClient() {
#Override
public ParsedSchema getSchemaBySubjectAndId(String subject, int id)
throws IOException, RestClientException {
return new AvroSchema(schema);
}
};
}
As the error says, you need to provide a string to the registry in the producer config, not an object.
Since you're using the Mock class, that string could be anything...
However, you'll need to construct the serializers given the registry instance
Serializer serializer = new KafkaAvroSerializer(mockSchemaRegistry);
// make config map with ("schema.registry.url", "unused")
serializer.configure(config, false);
Otherwise, it will try to create a non-mocked client
And put that into the properties
producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, serializer);
If your #KafkaListener is in test class then you can read it in StringDeserializer then convert it to the desired class manually
#Autowired
private MyKafkaAvroDeserializer myKafkaAvroDeserializer;
#KafkaListener( topics = "test")
public void inputData(ConsumerRecord<?, ?> consumerRecord) {
log.info("received payload='{}'", consumerRecord.toString(),consumerRecord.value());
GenericRecord genericRecord = (GenericRecord)myKafkaAvroDeserializer.deserialize("test",consumerRecord.value().toString().getBytes(StandardCharsets.UTF_8));
Myclass myclass = (Myclass) SpecificData.get().deepCopy(Myclass.SCHEMA$, genericRecord);
}
#Component
public class MyKafkaAvroDeserializer extends KafkaAvroDeserializer {
#Override
public Object deserialize(String topic, byte[] bytes) {
this.schemaRegistry = getMockClient(Myclass.SCHEMA$);
return super.deserialize(topic, bytes);
}
private static SchemaRegistryClient getMockClient(final Schema schema$) {
return new MockSchemaRegistryClient() {
#Override
public synchronized org.apache.avro.Schema getById(int id) {
return schema$;
}
};
}
}
Remember to add schema registry and key/value serializer in application.yml although it won't be used
consumer:
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
properties:
schema.registry.url :http://localhost:8080

Categories