How to set up AWS message queuing (SMS) in React & Java Spring - java

I have a React-based application in an AWS EC2 instance and a Java Spring Boot-based application in another AWS EC2 instance. I need to send POST requests using AWS' SQS from the React application. Once the messages are sent, I need to retrieve them in the Java Spring application hosting API endpoints. Guidance on how to accomplish the send and receive operations would be appreciated.

I used below code for fetching the object from sqs.
MessagingConfiguration.java
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import com.amazon.sqs.javamessaging.SQSConnectionFactory;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.es.spring.messaging.MessageReceiver;
#Configuration
public class MessagingConfiguration {
#Value("${queue.endpoint}")
private String endpoint;
#Value("${queue.name}")
private String queueName;
#Autowired
private MessageReceiver messageReceiver;
#Bean
public DefaultMessageListenerContainer jmsListenerContainer() {
SQSConnectionFactory sqsConnectionFactory = SQSConnectionFactory.builder()
.withAWSCredentialsProvider(new DefaultAWSCredentialsProviderChain())
.withEndpoint(endpoint)
.withAWSCredentialsProvider(awsCredentialsProvider)
.withNumberOfMessagesToPrefetch(10).build();
DefaultMessageListenerContainer dmlc = new DefaultMessageListenerContainer();
dmlc.setConnectionFactory(sqsConnectionFactory);
dmlc.setDestinationName(queueName);
dmlc.setMessageListener(messageReceiver);
return dmlc;
}
#Bean
public JmsTemplate createJMSTemplate() {
SQSConnectionFactory sqsConnectionFactory = SQSConnectionFactory.builder()
.withAWSCredentialsProvider(awsCredentialsProvider)
.withEndpoint(endpoint)
.withNumberOfMessagesToPrefetch(10).build();
JmsTemplate jmsTemplate = new JmsTemplate(sqsConnectionFactory);
jmsTemplate.setDefaultDestinationName(queueName);
jmsTemplate.setDeliveryPersistent(false);
return jmsTemplate;
}
private final AWSCredentialsProvider awsCredentialsProvider = new AWSCredentialsProvider() {
#Override
public AWSCredentials getCredentials() {
return new BasicAWSCredentials("accessKey", "Secretkey");
}
#Override
public void refresh() {
}
};
}
MessageReceiver.java
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
#Component
public class MessageReceiver implements MessageListener{
Logger LOG = LoggerFactory.getLogger(MessageReceiver.class);
#Override
public void onMessage(Message message) {
try {
TextMessage textMessage = (TextMessage) message;
System.out.println("message:"+textMessage.getText());
CustomClass activeMq =new ObjectMapper().readValue(textMessage.getText(),CustomClass.class);
LOG.info("Application : Active Mq : {}",activeMq);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Send message in SQS.
AmazonSQS awsSqs = new AwsSqsClient().getAWSSqsclient();
awsSqs.sendMessage(new SendMessageRequest().withDelaySeconds(60)
.withQueueUrl("https://sqs-url/TestQueue").withMessageBody(input));
Hope above answer help you.

Related

Liferay Kafka integration

Over internet I didn't got reference to implement Kafka in liferay.
Below are the requirements needs to be implemented
Push the message to Kafka topic
Poll and receive message from Kafka topic.
Tried below, but unable to receive message from kafka after pushed from kafka terminal
Dependencies :
compileInclude "org.springframework.kafka:spring-kafka:2.9.2"
compileInclude "org.apache.kafka:kafka-streams:3.3.1"
Code:
import org.apache.kafka.common.serialization.Serdes;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import java.util.HashMap;
import java.util.Map;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG;
#Configuration
#EnableKafka
#EnableKafkaStreams
public class KafkaConfig {
#Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
KafkaStreamsConfiguration kStreamsConfig() {
Map<String, Object> props = new HashMap<>();
props.put(APPLICATION_ID_CONFIG, "streams-app");
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
return new KafkaStreamsConfiguration(props);
}
}
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import org.osgi.service.component.annotations.Component;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.transaction.annotation.Transactional;
#Component(immediate = true)
public class KafkaMessageReceiver {
public static Log _log = LogFactoryUtil.getLog(KafkaMessageReceiver.class);
#Async
#KafkaListener(
topics = "liferay-topic",
concurrency = "2"
)
#Transactional
public void handleMessage(String payload) {
_log.info(payload);
}
}

Required a single bean, 2 found

I am trying to build a springboot application integrated with S3 and SQS. I have an AppConfig, S3Config, SqsConfig wherein I autowire the S3Config and SqsConfig inside the AppConfig.
S3Config declares a bean s3client which I use to perform aws s3 operations. I am getting an error where in the autowiring is failing due to multiple bean definitions available but I'm not sure what part is causing what duplicacy. Attaching the snippets.
Note that I am creating a different definition of s3client bean depending on the profile ( local development vs server deployment )
AppConfig
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;
#Configuration
#Slf4j
#Component
#RefreshScope
#Getter
public class AppConfig {
#Autowired
S3Config s3Config;
#Autowired
SqsConfig sqsConfig;
}
S3Config
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
#Configuration
#Slf4j
#Component
#RefreshScope
#Getter
public class S3Config {
#Value("${aws.s3.roleArn}")
private String s3RoleArn;
#Value("${aws.s3.roleSessionName}")
private String s3RoleSessionName;
#Value("${aws.s3.region}")
private String s3region;
#Value("${aws.s3.inputBucketName}")
private String inputBucketName;
#Value("${aws.s3.outputBucketName}")
private String outputBucketName;
#Bean(name = "s3client")
#Profile("!local")
public AmazonS3 AmazonS3Client() {
log.info(String.format("validating_config_repo_values: s3RoleArn=%s s3RoleSessionName=%s s3RoleSessionName=%s", s3RoleArn, s3RoleSessionName,s3region));
AmazonS3 s3client = null;
try {
STSAssumeRoleSessionCredentialsProvider roleCredentialsProvider = new STSAssumeRoleSessionCredentialsProvider.Builder(
s3RoleArn, s3RoleSessionName).build();
AmazonS3ClientBuilder amazonS3ClientBuilder = AmazonS3ClientBuilder.standard()
.withCredentials(roleCredentialsProvider);
s3client = amazonS3ClientBuilder.withRegion(s3region).build();
} catch (Exception e) {
log.error(String.format("exception_while_creating_AmazonS3Client : %s", e));
}
return s3client;
}
#Bean(name = "s3client")
#Profile("local")
public AmazonS3 localhostAmazonS3Client() {
AmazonS3 s3client = null;
try {
s3client = AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).withCredentials(new DefaultAWSCredentialsProviderChain()).build();
} catch (Exception e) {
log.error(String.format("exception_while_creating_AmazonS3Client : %s", e));
}
return s3client;
}
}
UploadServiceImpl
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.*;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
#Service
#Slf4j
public class S3DatasetDownloadServiceImpl implements DatasetDownloadService {
#Autowired
AmazonS3 s3Client;
#Autowired
AppConfig appConfig;
public String downloadDataset(URI uri) throws IOException {
String key = uri.getPath();
String outputBucketName = appConfig.getS3Config().getOutputBucketName();
log.info(String.format("client_downloading_file : key=%s from bucket=%s",key,outputBucketName));
try {
} catch (Exception e) {
}
}
}
Kill me. Found the error, I have the bean name as s3client ( c of client small ) whereas on autowiring I am using AmazonS3 s3Client;
Fixing the casing worked!

Spring Stream kafka Binder Test Custom Headers

I'm trying to figure out how to include custom headers in the Spring Message<?> used in Spring Cloud Stream with the Kafka Binder. My goal is to include some custom header data that would be added on in one producer (function) class, passed to kafka and then consumed by another class in a different service (with the customer header data).
I feel like I am missing something as I can seem to get it to work using the TestChannelBinder e.g.
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
import java.util.function.Function;
#Component
#Slf4j
public class BaseStream implements Function<Message<String>, String> {
#Override
public String apply(Message<String> transactionMessage) {
log.debug("Converted Message: {} ", transactionMessage);
return transactionMessage.getPayload();
}
}
Test class with Test Binder:
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.cloud.stream.binder.test.InputDestination;
import org.springframework.cloud.stream.binder.test.OutputDestination;
import org.springframework.cloud.stream.binder.test.TestChannelBinderConfiguration;
import org.springframework.context.annotation.Import;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.test.context.TestPropertySource;
#SpringBootTest
#TestPropertySource("classpath:testStream.properties")
#Import(TestChannelBinderConfiguration.class)
public class TestForStream {
#Autowired
InputDestination inputDestination;
#Autowired
OutputDestination outputDestination;
#Test
void contextLoads() {
inputDestination.send(MessageBuilder
.withPayload("Test Payload")
.setHeader("customHeader", "headerSpecificData")
.build());
}
}
testStream.properties
spring.cloud.function.definition=baseStream
spring.cloud.stream.bindings.baseStream-in-0.destination=test-in
spring.cloud.stream.bindings.baseStream-out-0.destination=test-out
spring.cloud.stream.bindings.baseStream-in-0.group=test-group-base
Log when running:
Converted Message: GenericMessage [payload=Test Payload, headers={id=5c6d1082-c084-0b25-4afc-b5d97bf537f9, customHeader=headerSpecificData, contentType=application/json, timestamp=1639398696800, target-protocol=kafka}]
Which is what I am looking to do. But when I try to test it for the kafka bider it seems to include the Message<String> object in the payload as a JSON string, which I thought would be parsed into the requested input of the function BaseStream.
Just wondering if someone could maybe see where i'm going wrong with my testing as I have tried various things to get this to work, and seeing as it works with a test binder I would assume it works for the Kafka Binder.
Test Class for Kafka Binder Test:
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.integration.support.MessageBuilder;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.kafka.test.EmbeddedKafkaBroker;
import org.springframework.kafka.test.context.EmbeddedKafka;
import org.springframework.kafka.test.utils.KafkaTestUtils;
import org.springframework.test.context.TestPropertySource;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
#EmbeddedKafka(partitions = 1, brokerProperties = { "listeners=PLAINTEXT://localhost:9092", "port=9092"})
#SpringBootTest
#TestPropertySource("classpath:testStream.properties")
public class TestForStream {
public static CountDownLatch latch = new CountDownLatch(1);
#Autowired
public EmbeddedKafkaBroker broker;
#Test
void contextLoads() {
sleep(5);//Included this as it takes some time to init>
sendMessage("test-in", MessageBuilder
.withPayload("Test Payload")
.setHeader("customHeader", "headerSpecificData")
.build());
}
public <T> ProducerFactory<String, T> createProducerFactory() {
Map<String, Object> configs = new HashMap<>(KafkaTestUtils.producerProps(broker));
configs.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configs.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, JsonSerializer.class);
//Is JsonSerializer correct for a message?
return new DefaultKafkaProducerFactory<>(configs);
}
public <T> void sendMessage(String topic, T listObj) {
try {
KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.send(new ProducerRecord<>(topic, listObj));
}catch (Exception e){
e.printStackTrace();
}
}
public void sleep(long time){
try {
latch.await(time, TimeUnit.SECONDS);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
Log of kafka binder test for message:
Converted Message: GenericMessage [payload={"payload":"Test Payload","headers":{"customHeader":"headerSpecificData","id":"d540a3ca-28db-b137-fc86-c25cc4b7eb8b","timestamp":1639399810476}}, headers={deliveryAttempt=1, kafka_timestampType=CREATE_TIME, kafka_receivedTopic=test-in, target-protocol=kafka, kafka_offset=0, scst_nativeHeadersPresent=true, kafka_consumer=org.apache.kafka.clients.consumer.KafkaConsumer#79580279, id=1cf2d382-df29-2672-4180-07da99e58244, kafka_receivedPartitionId=0, kafka_receivedTimestamp=1639399810526, contentType=application/json, __TypeId__=[B#24c79350, kafka_groupId=test-group-base, timestamp=1639399810651}]
So here the message has been included in the payload and the kafka headers included in the headers as expected.
I have tried spring.cloud.stream.kafka.binder.headers and headerMode to see if they would change anything but to no avail.
Edit:
Using springCloudVersion = 2020.0.3
I was using:
public <T> void sendMessage(String topic, T listObj) {
try {
KafkaTemplate<String, T> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.send(new ProducerRecord<>(topic, listObj));
}catch (Exception e){
e.printStackTrace();
}
}
To send the message which was putting the message as the value.
What I should've been using:
public void sendMessage(String topic, Message<?> listObj) {
try {
KafkaTemplate<String, Message<?>> kafkaTemplate = new KafkaTemplate<>(createProducerFactory());
kafkaTemplate.setDefaultTopic(topic);
kafkaTemplate.send(listObj);
}catch (Exception e){
e.printStackTrace();
}
}

Spring Boot 2 Transactional Annotation Not Working

Question regarding our Spring Boot 2.3.8.RELEASE implementation of #Transactional. The requirement is to implement distributed transactions that writes to an instance of PostgreSQL and Artemis queues. If one commit fails, then so should the other. We are using Atomikos for our JTA Transaction Manager.
I think I have implemented everything I need, but clearly not. When I throw an Exception in my service code to test the rollback functionality, it clearly does not work: The message is written to Artemis even after I throw an exception in the service code.
Any help with diagnosing and fixing would be much appreciated. If any additional details are required, please let me know.
Please find the details of the implementation below:
Spring Boot Application:
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.JmsAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.activemq.ActiveMQAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.artemis.ArtemisAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.context.ConfigurableApplicationContext;
import xxx.xxx.Users;
import xxx.xxx.TransactionServiceImpl;
#SpringBootApplication
(
exclude = {
DataSourceAutoConfiguration.class,
HibernateJpaAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class,
JmsAutoConfiguration.class,
ActiveMQAutoConfiguration.class,
ArtemisAutoConfiguration.class
}
)
public class BApplication implements CommandLineRunner
{
public static void main(String[] args) throws Exception
{
// SpringApplication.run(BoilerplateApplication.class, args);
ConfigurableApplicationContext ctx = SpringApplication.run(BApplication.class, args);
System.in.read();
ctx.close();
}
#Autowired
TransactionServiceImpl tsi;
#Override
public void run(String... args) throws Exception
{
Users user = new Users();
user.setFirstName("Moe");
user.setGender("M");
user.setLastName("Moe");
tsi.save(user);
}
}
Here is the JTA Configuration:
JTA Configuration
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.sql.DataSource;
import javax.transaction.SystemException;
import javax.transaction.UserTransaction;
import java.util.Properties;
import javax.annotation.PostConstruct;
import org.springframework.context.annotation.Bean;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.DependsOn;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import com.atomikos.icatch.config.UserTransactionService;
import com.atomikos.icatch.config.UserTransactionServiceImp;
import com.atomikos.icatch.jta.UserTransactionImp;
import com.atomikos.icatch.jta.UserTransactionManager;
import com.atomikos.jdbc.AtomikosDataSourceBean;
import com.atomikos.jms.AtomikosConnectionFactoryBean;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.postgresql.xa.PGXADataSource;
#Configuration("jtaConfig")
public class JtaConfig
{
private static final Logger appLogger = LoggerFactory.getLogger(JtaConfig.class);
#Value("${amq.broker.url}")
private String brokerUrl;
#Value("${amq.broker.username}")
private String brokerUsername;
#Value("${amq.broker.password}")
private String brokerPassword;
#Value("${postgresql.datasource.url}")
String dataSourceUrl;
#Value("${postgresql.datasource.username}")
String dsUsername;
#Value("${postgresql.datasource.password}")
String dsPassword;
#Value("${postgresql.datasource.driver.classname}")
String dsClassName;
#Value("${postgresql.initial.connections}")
int initialDSConnections;
#Value("${postgresql.max.connections}")
int maxDSConnections;
#Bean(initMethod = "init", destroyMethod = "shutdownForce")
public UserTransactionService userTransactionService()
{
Properties atProps = new Properties();
atProps.put("com.atomikos.icatch.service", "com.atomikos.icatch.standalone.UserTransactionServiceFactory");
return new UserTransactionServiceImp(atProps);
}
#Bean (initMethod = "init", destroyMethod = "close")
#DependsOn("userTransactionService")
public UserTransactionManager atomikosTransactionManager()
{
UserTransactionManager utm = new UserTransactionManager();
utm.setStartupTransactionService(false);
utm.setForceShutdown(true);
return utm;
}
#Bean
#DependsOn("userTransactionService")
public UserTransaction userTransaction()
{
UserTransactionImp ut = new UserTransactionImp();
try
{
ut.setTransactionTimeout(1000);
}
catch (SystemException _e)
{
appLogger.error("Configuration exception.", _e);
return null;
}
return ut;
}
#Bean
public Properties hibernateProperties()
{
Properties hibernateProp = new Properties();
hibernateProp.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQLDialect");
hibernateProp.put("hibernate.hbm2ddl.auto", "create-drop");
hibernateProp.put("hibernate.show_sql", true);
hibernateProp.put("hibernate.max_fetch_depth", 3);
hibernateProp.put("hibernate.jdbc.batch_size", 10);
hibernateProp.put("hibernate.jdbc.fetch_size", 50);
return hibernateProp;
}
#Bean
public JpaVendorAdapter jpaVendorAdapter()
{
return new HibernateJpaVendorAdapter();
}
#Primary
#Bean(name = "pgDataSource1", initMethod = "init", destroyMethod = "close")
public DataSource pgDataSource1()
{
PGXADataSource primaryXaDataSource = new PGXADataSource();
primaryXaDataSource.setUrl(dataSourceUrl);
primaryXaDataSource.setUser(dsUsername);
primaryXaDataSource.setPassword(dsPassword);
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(primaryXaDataSource);
xaDataSource.setUniqueResourceName("primaryXaDs1");
xaDataSource.setMinPoolSize(initialDSConnections);
xaDataSource.setMaxPoolSize(maxDSConnections);
return xaDataSource;
}
#Primary
#Bean(name = "jmsConnectionFactory", initMethod = "init", destroyMethod = "close")
public ConnectionFactory connectionFactory()
{
AtomikosConnectionFactoryBean atomikosConnectionFactoryBean = new AtomikosConnectionFactoryBean();
ActiveMQConnectionFactory activeMqXaConnectionFactory = new ActiveMQConnectionFactory();
try
{
activeMqXaConnectionFactory.setBrokerURL(brokerUrl);
activeMqXaConnectionFactory.setUser(brokerUsername);
activeMqXaConnectionFactory.setPassword(brokerPassword);
atomikosConnectionFactoryBean.setUniqueResourceName("jmsXAConnectionFactory");
atomikosConnectionFactoryBean.setLocalTransactionMode(false);
atomikosConnectionFactoryBean.setXaConnectionFactory(activeMqXaConnectionFactory);
}
catch (JMSException _e)
{
appLogger.info("JMS Configuration Error: " + _e);
_e.printStackTrace();
}
return atomikosConnectionFactoryBean;
}
#PostConstruct
public void postConstructDetails()
{
appLogger.info("Post Construct Start: JtaConfig.");
appLogger.info(" - JMS: Artemis URL: {}", brokerUrl);
appLogger.info(" - Artemis Username: {}", brokerUsername);
appLogger.info(" - Artemis Password: {}", brokerPassword);
appLogger.info(" - DS: PostgreSQL URL: {}", dataSourceUrl);
appLogger.info(" - DS: PostgreSQL Username: {}", dsUsername);
appLogger.info(" - DS: PostgreSQL Password: {}", dsPassword);
appLogger.info(" - DS: PostgreSQL Min Conn: {}", initialDSConnections);
appLogger.info(" - DS: PostgreSQL Max Conn: {}", maxDSConnections);
appLogger.info("Post Construct End: JtaConfig.");
appLogger.info(" ");
}
}
Here is the implementation for Services Configuration:
Services Configuration:
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.jta.JtaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;
#Configuration
#EnableTransactionManagement
#ComponentScan(basePackages = "xxx.xxx.service")
public class ServicesConfig
{
private Logger appLogger = LoggerFactory.getLogger(ServicesConfig.class);
#Autowired
JtaConfig jtaConfig;
#Bean(name = "xaJmsTemplate")
public JmsTemplate jmsTemplate()
{
JmsTemplate jmsTemplate = new JmsTemplate();
jmsTemplate.setConnectionFactory(jtaConfig.connectionFactory());
jmsTemplate.setPubSubDomain(false);
return jmsTemplate;
}
#Bean(name = "entityManangerFactory")
public EntityManagerFactory entityManagerFactory()
{
LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean();
factoryBean.setPackagesToScan("xxx.xxx.model");
factoryBean.setDataSource(jtaConfig.pgDataSource1());
factoryBean.setJpaProperties(jtaConfig.hibernateProperties());
factoryBean.setPersistenceUnitName("entityManagerFactoryA");
factoryBean.setJpaVendorAdapter(jtaConfig.jpaVendorAdapter());
factoryBean.afterPropertiesSet();
return factoryBean.getNativeEntityManagerFactory();
}
#Bean(name = "transactionManager")
public PlatformTransactionManager transactionManager()
{
JtaTransactionManager ptm = new JtaTransactionManager();
ptm.setTransactionManager(jtaConfig.atomikosTransactionManager());
ptm.setUserTransaction(jtaConfig.userTransaction());
return ptm;
}
#PostConstruct
public void postConstructDetails()
{
appLogger.info("Post Construct Start: ServicesConfig.");
appLogger.info(" - JMS: Artemis URL: {}", jtaConfig);
appLogger.info(" - JMS Template: {}", jmsTemplate());
appLogger.info("Post Construct End: ServicesConfig.");
appLogger.info(" ");
}
}
Here is the Service implementation:
TransactionServiceImpl
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import xxx.xxx.Users;
#Service("transactionService")
#Transactional
public class TransactionServiceImpl implements TransactionServiceIntf
{
private static final Logger appLogger = LoggerFactory.getLogger(TransactionServiceImpl.class);
#Autowired
#Qualifier("xaJmsTemplate")
JmsTemplate jmsTemplate;
#Override
public Users save(Users _user)
{
appLogger.info("TransactionServiceImpl: save: Entered.");
Users user = _user;
try
{
if(user == null)
{
appLogger.info("User: Null.");
}
else
{
if(jmsTemplate == null)
{
appLogger.info("JMS Template: Null.");
}
else
{
appLogger.info("JMS Template: Saving.");
jmsTemplate.convertAndSend("crequests", user);
}
}
// The rollback should happen with the exception.
throw new Exception();
}
catch(Exception _e)
{
appLogger.error("Catching exception: " + _e);
}
appLogger.info("TransactionServiceImpl: save: Exiting.");
return user;
}
}

Spring TCP support High CPU utilisation

I'm using spring TCP support to create a TCP server.
I noticed that the CPU is running at 91% when I sent only one request.
This is my code
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.IntegrationComponentScan;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.ip.tcp.TcpInboundGateway;
import org.springframework.integration.ip.tcp.connection.AbstractServerConnectionFactory;
import org.springframework.integration.ip.tcp.connection.MessageConvertingTcpMessageMapper;
import org.springframework.integration.ip.tcp.connection.TcpNetServerConnectionFactory;
import org.springframework.integration.ip.tcp.serializer.AbstractByteArraySerializer;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.converter.ByteArrayMessageConverter;
import org.springframework.messaging.converter.MessageConverter;
#Configuration
#IntegrationComponentScan
public class TCPServerConfig {
#Value("${tcp.listener.port}")
private int port;
#Bean
public MessageConvertingTcpMessageMapper mapper(final MessageConverter messageConverter) {
return new MessageConvertingTcpMessageMapper(messageConverter);
}
#Bean
public MessageConverter messageConverter() {
return new ByteArrayMessageConverter();
}
#Bean
public TcpInboundGateway tcpInGate(final AbstractServerConnectionFactory connectionFactory) {
final TcpInboundGateway inGate = new TcpInboundGateway();
inGate.setConnectionFactory(connectionFactory);
inGate.setRequestChannel(fromTcp());
return inGate;
}
#Bean
public MessageChannel fromTcp() {
return new DirectChannel();
}
#Bean
public AbstractServerConnectionFactory serverCF(
final AbstractByteArraySerializer byteArraySerializer) {
final TcpNetServerConnectionFactory connectionFactory =
new TcpNetServerConnectionFactory(this.port);
connectionFactory.setDeserializer(byteArraySerializer);
connectionFactory.setSerializer(byteArraySerializer);
return connectionFactory;
}
#Bean
public AbstractByteArraySerializer byteArraySerializer() {
return new ByteArrayCustomeSerializer();
}
}
and this is the how I convert and log the messages
import static org.apache.commons.lang3.StringUtils.EMPTY;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.nio.charset.Charset;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ArrayUtils;
import org.springframework.integration.annotation.MessageEndpoint;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.annotation.Transformer;
#Slf4j
#MessageEndpoint
class TCPMessageProcessor {
#Transformer(inputChannel = "fromTcp", outputChannel = "toProcess")
public String convertInput(final byte[] bytes) {
if(ArrayUtils.isEmpty(bytes)){
return EMPTY;
}
String inboundMessage = new String(bytes, Charset.forName("ASCII"));
log.info("Converted the message to string: '{}'. Handing it processor", inboundMessage);
return inboundMessage;
}
#ServiceActivator(inputChannel = "toProcess")
public String process(final String message) {
if(isBlank(message)){
return EMPTY;
}
log.info("Started processing message '{}'", message);
return "some response";
}
}
I add logging and found that once the application receives the first request it serves it correctly then calls ByteArrayCustomeSerializer#doDeserialize multiple times every second with input stream of empty byte array. Can anyone please provide insight on why is this happening and how one might avoid this behavior?
This is from org.springframework.integration.ip.tcp.connection.TcpNetConnection
"threadId":"pool-1-thread-3", "message":"Message received GenericMessage [payload=byte[0], headers={ip_tcp_remotePort=*****, ip_connectionId=localhost:*****:*****:d313c398-fc80-48dd-b9c1-f447c9172f09, ip_localInetAddress=/127.0.0.1, ip_address=127.0.0.1, id=1fd69791-c300-787b-c5cc-281a360ae8f4, ip_hostname=localhost, timestamp=1521623822108}]"
It's most likely a bug in your ByteArrayCustomeSerializer - edit the question to show the code.
Most likely you are not detecting the end of stream.
If the stream closes between messages, you need to throw a SoftEndOfStreamException to signal that the socket has closed in an "expected" fashion. If the stream closes during message deserialization, throw some other exception.

Categories