Spring Stream kafka Binder Test Custom Headers - java

I'm trying to figure out how to include custom headers in the Spring Message<?> used in Spring Cloud Stream with the Kafka Binder. My goal is to include some custom header data that would be added on in one producer (function) class, passed to kafka and then consumed by another class in a different service (with the customer header data).
I feel like I am missing something as I can seem to get it to work using the TestChannelBinder e.g.
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
import java.util.function.Function;
#Component
#Slf4j
public class BaseStream implements Function<Message<String>, String> {
#Override
public String apply(Message<String> transactionMessage) {
log.debug("Converted Message: {} ", transactionMessage);
return transactionMessage.getPayload();
}
}
Test class with Test Binder:
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.binder.test.InputDestination;
import org.springframework.cloud.stream.binder.test.OutputDestination;
import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration;
import org.springframework.context.annotation.Import;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
#SpringBootTest
#TestPropertySource("classpath:testStream.properties")
#Import(TestChannelBinderConfiguration.class)
public class TestForStream {
#Autowired
InputDestination inputDestination;
#Autowired
OutputDestination outputDestination;
#Test
void contextLoads() {
inputDestination.send(MessageBuilder
.withPayload("Test Payload")
.setHeader("customHeader", "headerSpecificData")
.build());
}
}
testStream.properties
spring.cloud.function.definition=baseStream
spring.cloud.stream.bindings.baseStream-in-0.destination=test-in
spring.cloud.stream.bindings.baseStream-out-0.destination=test-out
spring.cloud.stream.bindings.baseStream-in-0.group=test-group-base
Log when running:
Converted Message: GenericMessage [payload=Test Payload, headers={id=5c6d1082-c084-0b25-4afc-b5d97bf537f9, customHeader=headerSpecificData, contentType=application/json, timestamp=1639398696800, target-protocol=kafka}]
Which is what I am looking to do. But when I try to test it for the kafka bider it seems to include the Message<String> object in the payload as a JSON string, which I thought would be parsed into the requested input of the function BaseStream.
Just wondering if someone could maybe see where i'm going wrong with my testing as I have tried various things to get this to work, and seeing as it works with a test binder I would assume it works for the Kafka Binder.
Test Class for Kafka Binder Test:
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.context.TestPropertySource;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
#EmbeddedKafka(partitions = 1, brokerProperties = { "listeners=PLAINTEXT://localhost:9092", "port=9092"})
#SpringBootTest
#TestPropertySource("classpath:testStream.properties")
public class TestForStream {
public static CountDownLatch latch = new CountDownLatch(1);
#Autowired
public EmbeddedKafkaBroker broker;
#Test
void contextLoads() {
sleep(5);//Included this as it takes some time to init>
sendMessage("test-in", MessageBuilder
.withPayload("Test Payload")
.setHeader("customHeader", "headerSpecificData")
.build());
}
public <T> ProducerFactory<String, T> createProducerFactory() {
Map<String, Object> configs = new HashMap<>(KafkaTestUtils.producerProps(broker));
configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
//Is JsonSerializer correct for a message?
return new DefaultKafkaProducerFactory<>(configs);
}
public <T> void sendMessage(String topic, T listObj) {
try {
KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.send(new ProducerRecord<>(topic, listObj));
}catch (Exception e){
e.printStackTrace();
}
}
public void sleep(long time){
try {
latch.await(time, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
Log of kafka binder test for message:
Converted Message: GenericMessage [payload={"payload":"Test Payload","headers":{"customHeader":"headerSpecificData","id":"d540a3ca-28db-b137-fc86-c25cc4b7eb8b","timestamp":1639399810476}}, headers={deliveryAttempt=1, kafka_timestampType=CREATE_TIME, kafka_receivedTopic=test-in, target-protocol=kafka, kafka_offset=0, scst_nativeHeadersPresent=true, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#79580279, id=1cf2d382-df29-2672-4180-07da99e58244, kafka_receivedPartitionId=0, kafka_receivedTimestamp=1639399810526, contentType=application/json, __TypeId__=[B#24c79350, kafka_groupId=test-group-base, timestamp=1639399810651}]
So here the message has been included in the payload and the kafka headers included in the headers as expected.
I have tried spring.cloud.stream.kafka.binder.headers and headerMode to see if they would change anything but to no avail.
Edit:
Using springCloudVersion = 2020.0.3

I was using:
public <T> void sendMessage(String topic, T listObj) {
try {
KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.send(new ProducerRecord<>(topic, listObj));
}catch (Exception e){
e.printStackTrace();
}
}
To send the message which was putting the message as the value.
What I should've been using:
public void sendMessage(String topic, Message<?> listObj) {
try {
KafkaTemplate<String, Message<?>> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.setDefaultTopic(topic);
kafkaTemplate.send(listObj);
}catch (Exception e){
e.printStackTrace();
}
}

Related

Moving a file from a directory to a success directory or an error directory with Spring Integration

I am trying to implement a Spring Integration class that takes a .xml file parses it and if it's valid move it to an "archived" directory and in case of invalidity move it to an error directory.
import com.nagarro.studentapi.integration.queue.StudentSender;
import com.nagarro.studentapi.util.XmlParser;
import org.aopalliance.aop.Advice;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.InboundChannelAdapter;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.config.EnableIntegration;
import org.springframework.integration.core.MessageSource;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.Pollers;
import org.springframework.integration.file.FileHeaders;
import org.springframework.integration.file.FileReadingMessageSource;
import org.springframework.integration.file.FileWritingMessageHandler;
import org.springframework.integration.file.filters.SimplePatternFileListFilter;
import org.springframework.integration.file.support.FileExistsMode;
import org.springframework.integration.handler.advice.AbstractRequestHandlerAdvice;
import org.springframework.integration.handler.advice.ExpressionEvaluatingRequestHandlerAdvice;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.MessagingException;
import java.io.File;
#Configuration
#EnableIntegration
public class IntegrationConfiguration {
private static final String XML = "*.xml";
private static final String STUDENT = "\\student.xml";
#Value("${student-api.xmlPath}")
private String inputPath;
#Value("${student-api.archivedDestination}")
private String successPath;
#Value("${student-api.errorDestination}")
private String errorPath;
#Bean
public MessageChannel messageChannel() {
return new DirectChannel();
}
#Bean
#InboundChannelAdapter(value = "messageChannel")
public MessageSource<File> messageProducer() {
FileReadingMessageSource messageSource = new FileReadingMessageSource();
messageSource.setDirectory(new File(inputPath));
messageSource.setFilter(new SimplePatternFileListFilter(XML));
return messageSource;
}
#Bean
#ServiceActivator(inputChannel = "messageChannel")
public MessageHandler handler() {
FileWritingMessageHandler handler = new FileWritingMessageHandler(new File(successPath));
handler.setFileExistsMode(FileExistsMode.REPLACE);
handler.setExpectReply(false);
return handler;
}
#Bean
public IntegrationFlow integrationFlow(XmlParser xmlParser) {
return IntegrationFlows.from(messageProducer(), spec -> spec.poller(Pollers.fixedDelay(1000)))
.enrichHeaders(h -> h.headerExpression(FileHeaders.ORIGINAL_FILE, "payload"))
.convert(String.class)
.transform((String path) -> xmlParser.parsePath(path))
.handle("xmlParser", "parsePath", e -> e.advice(errorAdvice()))
.get();
}
#Bean
public AbstractRequestHandlerAdvice errorAdvice() {
return new AbstractRequestHandlerAdvice() {
#Override
protected Object doInvoke(ExecutionCallback callback, Object target, Message<?> message) {
File file = message.getHeaders().get(FileHeaders.ORIGINAL_FILE, File.class);
try {
Object result = callback.execute();
file.renameTo(new File(successPath, STUDENT));
System.out.println("File renamed after success");
return result;
}
catch (Exception e) {
file.renameTo(new File(errorPath, STUDENT));
System.out.println("File renamed after failure");
throw e;
}
}
};
}
}
However whenever calback.execute() it's called I get this error and I don't quite understand why.
2022-09-06 18:20:07.971 ERROR 32152 --- [ scheduling-1] o.s.integration.handler.LoggingHandler : org.springframework.messaging.MessageHandlingException: error occurred during processing message in 'MethodInvokingMessageProcessor' [org.springframework.integration.handler.MethodInvokingMessageProcessor#1135e3d6]; nested exception is java.lang.IllegalArgumentException: No candidate methods found for messages., failedMessage=GenericMessage [payload=Student(firstname=John, lastname=Dose, cnp=123, birthDate=2000-12-12, address=Address(street=a, number=1, city=Craiova, country=Romania), grades=[Grade(discipline=a, date=2021-12-12, grade=10), Grade(discipline=b, date=2021-12-12, grade=9)]), headers={....
Although I have a message handler I suspect the reason for this problem is that i do not override the handle method. But i am unsure of how to do it.
You have several problem:
#InboundChannelAdapter and IntegrationFlows.from(messageProducer(). This way you create two independent polling endpoints for the same source.
#ServiceActivator - the endpoint to write has just read file from one of the sources.
There is no connection between #InboundChannelAdapter, your #ServiceActivator expectations and that flow.
You have .transform((String path) -> xmlParser.parsePath(path)) and then immediately after that handle("xmlParser", "parsePath") which looks, essentially the same, but does not make sense since you are going to call the same parsePath() twice, but for different payloads, where the second one is going to be as a result of the first parsePath() call.
Please, revise your logic carefully: right now some of your configuration is misleading and really error-prone. I believe that error you got is because your parsePath() expects a String, but not Student as we see in the payload for that handle().

AWS Lambda Java "Failed to connect to service endpoint:" despite supplying the endpoint with .withEndpointConfiguration

Please help me diagnose the error message "Failed to connect to service endpoint:". That is the complete error message. Kind of looks like it can't find the endpoint, but as you can see below, I do supply the endpoint with the ".withEndpointConfiguration" method.
Here is my code:
package xyz.bombchu;
import java.util.HashMap;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.InstanceProfileCredentialsProvider;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
import com.amazonaws.services.dynamodbv2.AmazonDynamoDBClientBuilder;
import com.amazonaws.services.dynamodbv2.document.DynamoDB;
import com.amazonaws.services.dynamodbv2.model.AttributeValue;
import com.amazonaws.services.lambda.runtime.Context;
import com.amazonaws.services.lambda.runtime.RequestHandler;
public class LambdaFunctionHandler implements RequestHandler<Object, String> {
DynamoDB ddb;
#Override
public String handleRequest(Object input, Context context) {
Regions REGION = Regions.AP_SOUTHEAST_2;
HashMap<String, AttributeValue> item_values =
new HashMap<String, AttributeValue>();
String relativeTime = "02000001";
item_values.put("dateTime", new AttributeValue().withN(relativeTime));
item_values.put("cID", new AttributeValue("TEST"));
AmazonDynamoDB ddb = AmazonDynamoDBClientBuilder.standard()
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration("dynamodb.ap-southeast-2.amazonaws.com", "ap-southeast-2"))
.withCredentials(new InstanceProfileCredentialsProvider())
.withClientConfiguration(new ClientConfiguration())
.build();
try {
ddb.putItem("myTableTest", item_values);
} catch (Exception e) {
System.err.println(e.getMessage());
System.exit(1);
}
}
}

Quarkus + Kafka + Smallrye exception handling

How can i handle exception on streaming processing with quarkus + kafka + smallrye?
My code is very similar to the imperative producer example on quarkus guide (https://quarkus.io/guides/kafka#imperative-usage)
import io.smallrye.reactive.messaging.annotations.Channel;
import io.smallrye.reactive.messaging.annotations.Emitter;
import javax.inject.Inject;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Consumes;
import javax.ws.rs.core.MediaType;
#Path("/prices")
public class PriceResource {
#Inject #Channel("price-create") Emitter<Double> priceEmitter;
#POST
#Consumes(MediaType.TEXT_PLAIN)
public void addPrice(Double price) {
priceEmitter.send(price);
}
}
I wanted something similar the vanilla Kafka library, that gives the option to handle the callback of each record requested to send.
ProducerRecord<String, String> record = new ProducerRecord<>("topic-name", key, value);
producer.send(record, new Callback() {
#Override
public void onCompletion(RecordMetadata metadata, Exception exception) {
logger.info(record.toString());
if (exception != null) {
logger.error("Producer exception", exception);
}
}
});
Tks
There is a section of the docs on Acknowlegement
#Incoming("i")
#Outgoing("j")
#Acknowledgment(Acknowledgment.Strategy.MANUAL)
public CompletionStage<Message<String>> manualAck(Message<String> input) {
return CompletableFuture.supplyAsync(input::getPayload)
.thenApply(Message::of)
.thenCompose(m -> input.ack().thenApply(x -> m));
}

Spring TCP support High CPU utilisation

I'm using spring TCP support to create a TCP server.
I noticed that the CPU is running at 91% when I sent only one request.
This is my code
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.IntegrationComponentScan;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.ip.tcp.TcpInboundGateway;
import org.springframework.integration.ip.tcp.connection.AbstractServerConnectionFactory;
import org.springframework.integration.ip.tcp.connection.MessageConvertingTcpMessageMapper;
import org.springframework.integration.ip.tcp.connection.TcpNetServerConnectionFactory;
import org.springframework.integration.ip.tcp.serializer.AbstractByteArraySerializer;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.converter.ByteArrayMessageConverter;
import org.springframework.messaging.converter.MessageConverter;
#Configuration
#IntegrationComponentScan
public class TCPServerConfig {
#Value("${tcp.listener.port}")
private int port;
#Bean
public MessageConvertingTcpMessageMapper mapper(final MessageConverter messageConverter) {
return new MessageConvertingTcpMessageMapper(messageConverter);
}
#Bean
public MessageConverter messageConverter() {
return new ByteArrayMessageConverter();
}
#Bean
public TcpInboundGateway tcpInGate(final AbstractServerConnectionFactory connectionFactory) {
final TcpInboundGateway inGate = new TcpInboundGateway();
inGate.setConnectionFactory(connectionFactory);
inGate.setRequestChannel(fromTcp());
return inGate;
}
#Bean
public MessageChannel fromTcp() {
return new DirectChannel();
}
#Bean
public AbstractServerConnectionFactory serverCF(
final AbstractByteArraySerializer byteArraySerializer) {
final TcpNetServerConnectionFactory connectionFactory =
new TcpNetServerConnectionFactory(this.port);
connectionFactory.setDeserializer(byteArraySerializer);
connectionFactory.setSerializer(byteArraySerializer);
return connectionFactory;
}
#Bean
public AbstractByteArraySerializer byteArraySerializer() {
return new ByteArrayCustomeSerializer();
}
}
and this is the how I convert and log the messages
import static org.apache.commons.lang3.StringUtils.EMPTY;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.nio.charset.Charset;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ArrayUtils;
import org.springframework.integration.annotation.MessageEndpoint;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.annotation.Transformer;
#Slf4j
#MessageEndpoint
class TCPMessageProcessor {
#Transformer(inputChannel = "fromTcp", outputChannel = "toProcess")
public String convertInput(final byte[] bytes) {
if(ArrayUtils.isEmpty(bytes)){
return EMPTY;
}
String inboundMessage = new String(bytes, Charset.forName("ASCII"));
log.info("Converted the message to string: '{}'. Handing it processor", inboundMessage);
return inboundMessage;
}
#ServiceActivator(inputChannel = "toProcess")
public String process(final String message) {
if(isBlank(message)){
return EMPTY;
}
log.info("Started processing message '{}'", message);
return "some response";
}
}
I add logging and found that once the application receives the first request it serves it correctly then calls ByteArrayCustomeSerializer#doDeserialize multiple times every second with input stream of empty byte array. Can anyone please provide insight on why is this happening and how one might avoid this behavior?
This is from org.springframework.integration.ip.tcp.connection.TcpNetConnection
"threadId":"pool-1-thread-3", "message":"Message received GenericMessage [payload=byte[0], headers={ip_tcp_remotePort=*****, ip_connectionId=localhost:*****:*****:d313c398-fc80-48dd-b9c1-f447c9172f09, ip_localInetAddress=/127.0.0.1, ip_address=127.0.0.1, id=1fd69791-c300-787b-c5cc-281a360ae8f4, ip_hostname=localhost, timestamp=1521623822108}]"
It's most likely a bug in your ByteArrayCustomeSerializer - edit the question to show the code.
Most likely you are not detecting the end of stream.
If the stream closes between messages, you need to throw a SoftEndOfStreamException to signal that the socket has closed in an "expected" fashion. If the stream closes during message deserialization, throw some other exception.

How to set up AWS message queuing (SMS) in React & Java Spring

I have a React-based application in an AWS EC2 instance and a Java Spring Boot-based application in another AWS EC2 instance. I need to send POST requests using AWS' SQS from the React application. Once the messages are sent, I need to retrieve them in the Java Spring application hosting API endpoints. Guidance on how to accomplish the send and receive operations would be appreciated.
I used below code for fetching the object from sqs.
MessagingConfiguration.java
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import com.amazon.sqs.javamessaging.SQSConnectionFactory;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.es.spring.messaging.MessageReceiver;
#Configuration
public class MessagingConfiguration {
#Value("${queue.endpoint}")
private String endpoint;
#Value("${queue.name}")
private String queueName;
#Autowired
private MessageReceiver messageReceiver;
#Bean
public DefaultMessageListenerContainer jmsListenerContainer() {
SQSConnectionFactory sqsConnectionFactory = SQSConnectionFactory.builder()
.withAWSCredentialsProvider(new DefaultAWSCredentialsProviderChain())
.withEndpoint(endpoint)
.withAWSCredentialsProvider(awsCredentialsProvider)
.withNumberOfMessagesToPrefetch(10).build();
DefaultMessageListenerContainer dmlc = new DefaultMessageListenerContainer();
dmlc.setConnectionFactory(sqsConnectionFactory);
dmlc.setDestinationName(queueName);
dmlc.setMessageListener(messageReceiver);
return dmlc;
}
#Bean
public JmsTemplate createJMSTemplate() {
SQSConnectionFactory sqsConnectionFactory = SQSConnectionFactory.builder()
.withAWSCredentialsProvider(awsCredentialsProvider)
.withEndpoint(endpoint)
.withNumberOfMessagesToPrefetch(10).build();
JmsTemplate jmsTemplate = new JmsTemplate(sqsConnectionFactory);
jmsTemplate.setDefaultDestinationName(queueName);
jmsTemplate.setDeliveryPersistent(false);
return jmsTemplate;
}
private final AWSCredentialsProvider awsCredentialsProvider = new AWSCredentialsProvider() {
#Override
public AWSCredentials getCredentials() {
return new BasicAWSCredentials("accessKey", "Secretkey");
}
#Override
public void refresh() {
}
};
}
MessageReceiver.java
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
#Component
public class MessageReceiver implements MessageListener{
Logger LOG = LoggerFactory.getLogger(MessageReceiver.class);
#Override
public void onMessage(Message message) {
try {
TextMessage textMessage = (TextMessage) message;
System.out.println("message:"+textMessage.getText());
CustomClass activeMq =new ObjectMapper().readValue(textMessage.getText(),CustomClass.class);
LOG.info("Application : Active Mq : {}",activeMq);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Send message in SQS.
AmazonSQS awsSqs = new AwsSqsClient().getAWSSqsclient();
awsSqs.sendMessage(new SendMessageRequest().withDelaySeconds(60)
.withQueueUrl("https://sqs-url/TestQueue").withMessageBody(input));
Hope above answer help you.

Categories