I am trying to build a springboot application integrated with S3 and SQS. I have an AppConfig, S3Config, SqsConfig wherein I autowire the S3Config and SqsConfig inside the AppConfig.
S3Config declares a bean s3client which I use to perform aws s3 operations. I am getting an error where in the autowiring is failing due to multiple bean definitions available but I'm not sure what part is causing what duplicacy. Attaching the snippets.
Note that I am creating a different definition of s3client bean depending on the profile ( local development vs server deployment )
AppConfig
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;
#Configuration
#Slf4j
#Component
#RefreshScope
#Getter
public class AppConfig {
#Autowired
S3Config s3Config;
#Autowired
SqsConfig sqsConfig;
}
S3Config
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.amazonaws.auth.STSAssumeRoleSessionCredentialsProvider;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cloud.context.config.annotation.RefreshScope;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import org.springframework.stereotype.Component;
#Configuration
#Slf4j
#Component
#RefreshScope
#Getter
public class S3Config {
#Value("${aws.s3.roleArn}")
private String s3RoleArn;
#Value("${aws.s3.roleSessionName}")
private String s3RoleSessionName;
#Value("${aws.s3.region}")
private String s3region;
#Value("${aws.s3.inputBucketName}")
private String inputBucketName;
#Value("${aws.s3.outputBucketName}")
private String outputBucketName;
#Bean(name = "s3client")
#Profile("!local")
public AmazonS3 AmazonS3Client() {
log.info(String.format("validating_config_repo_values: s3RoleArn=%s s3RoleSessionName=%s s3RoleSessionName=%s", s3RoleArn, s3RoleSessionName,s3region));
AmazonS3 s3client = null;
try {
STSAssumeRoleSessionCredentialsProvider roleCredentialsProvider = new STSAssumeRoleSessionCredentialsProvider.Builder(
s3RoleArn, s3RoleSessionName).build();
AmazonS3ClientBuilder amazonS3ClientBuilder = AmazonS3ClientBuilder.standard()
.withCredentials(roleCredentialsProvider);
s3client = amazonS3ClientBuilder.withRegion(s3region).build();
} catch (Exception e) {
log.error(String.format("exception_while_creating_AmazonS3Client : %s", e));
}
return s3client;
}
#Bean(name = "s3client")
#Profile("local")
public AmazonS3 localhostAmazonS3Client() {
AmazonS3 s3client = null;
try {
s3client = AmazonS3ClientBuilder.standard().withRegion(Regions.DEFAULT_REGION).withCredentials(new DefaultAWSCredentialsProviderChain()).build();
} catch (Exception e) {
log.error(String.format("exception_while_creating_AmazonS3Client : %s", e));
}
return s3client;
}
}
UploadServiceImpl
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.model.GetObjectRequest;
import com.amazonaws.services.s3.model.S3Object;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.io.*;
import java.net.URI;
import java.nio.charset.Charset;
import java.nio.charset.StandardCharsets;
#Service
#Slf4j
public class S3DatasetDownloadServiceImpl implements DatasetDownloadService {
#Autowired
AmazonS3 s3Client;
#Autowired
AppConfig appConfig;
public String downloadDataset(URI uri) throws IOException {
String key = uri.getPath();
String outputBucketName = appConfig.getS3Config().getOutputBucketName();
log.info(String.format("client_downloading_file : key=%s from bucket=%s",key,outputBucketName));
try {
} catch (Exception e) {
}
}
}
Kill me. Found the error, I have the bean name as s3client ( c of client small ) whereas on autowiring I am using AmazonS3 s3Client;
Fixing the casing worked!
Related
Over internet I didn't got reference to implement Kafka in liferay.
Below are the requirements needs to be implemented
Push the message to Kafka topic
Poll and receive message from Kafka topic.
Tried below, but unable to receive message from kafka after pushed from kafka terminal
Dependencies :
compileInclude "org.springframework.kafka:spring-kafka:2.9.2"
compileInclude "org.apache.kafka:kafka-streams:3.3.1"
Code:
import org.apache.kafka.common.serialization.Serdes;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import java.util.HashMap;
import java.util.Map;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG;
#Configuration
#EnableKafka
#EnableKafkaStreams
public class KafkaConfig {
#Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
KafkaStreamsConfiguration kStreamsConfig() {
Map<String, Object> props = new HashMap<>();
props.put(APPLICATION_ID_CONFIG, "streams-app");
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
return new KafkaStreamsConfiguration(props);
}
}
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import org.osgi.service.component.annotations.Component;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.transaction.annotation.Transactional;
#Component(immediate = true)
public class KafkaMessageReceiver {
public static Log _log = LogFactoryUtil.getLog(KafkaMessageReceiver.class);
#Async
#KafkaListener(
topics = "liferay-topic",
concurrency = "2"
)
#Transactional
public void handleMessage(String payload) {
_log.info(payload);
}
}
Sample yaml looks like
"mappings":
"name": "foo"
"aliases": "abc"
Trying to implement it using PropertySourceFactory, but unsuccessful.
import import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.PropertySource;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
#Configuration
//#ConfigurationProperties
#PropertySource(value = "classpath:order_config.yml", factory = YamlPropertySourceFactory.class)
public class ValidatorConfig {
#Value("${yaml.mappings}")
private Map<String, String> mappings = new HashMap<>();
#Value("${yaml.mappings.name}")
private String create;
public String getValidatorBean(String tenant, String requestType) {
System.out.println(mappings);
return "yes";
}
}
import org.springframework.beans.factory.config.YamlPropertiesFactoryBean;
import org.springframework.core.env.PropertiesPropertySource;
import org.springframework.core.env.PropertySource;
import org.springframework.core.io.support.EncodedResource;
import org.springframework.core.io.support.PropertySourceFactory;
import java.io.IOException;
import java.util.Properties;
public class YamlPropertySourceFactory implements PropertySourceFactory {
#Override
public PropertySource<?> createPropertySource(String name, EncodedResource encodedResource)
throws IOException {
YamlPropertiesFactoryBean factory = new YamlPropertiesFactoryBean();
factory.setResources(encodedResource.getResource());
Properties properties = factory.getObject();
return new PropertiesPropertySource(encodedResource.getResource().getFilename(), properties);
}
}
Have tried using a bunch of methods using #Value, #ConfigurationProperties, but unsuccessful
Can we implement it using YamlMapFactoryBean. Have not been able to find its working demonstration.
I have a SpringBoot Kafka streams maven app. I use a spring-boot-starter-parent 2.4.4 for my springboot dependencies and kafka-streams 2.7.0.
I am stuck at running tests with
java.lang.NullPointerException
when trying to load my application configuration from either
resources/application.yml or test/resources/application-test.resources or test/resources/application.yml
I have a Config class with this annotations and getters and setters for fields, which are defined with same name as in the application.yml
package com.acme.rtc.configuration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;
#ConfigurationProperties(prefix = "topics")
#Component
#Configuration
public class ConfigProps {
private String MATRIXX_ADJ_EVENT_TOPIC;
private String OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
private String OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
private String EVENTS_NO_MVNO;
public void setMATRIXX_ADJ_EVENT_TOPIC(String MATRIXX_ADJ_EVENT_TOPIC) {
this.MATRIXX_ADJ_EVENT_TOPIC = MATRIXX_ADJ_EVENT_TOPIC;
}
public void setOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC(String OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC) {
this.OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC = OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
}
public void setOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC(String OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC) {
this.OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC = OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
}
public String getEVENTS_NO_MVNO() {
return EVENTS_NO_MVNO;
}
public void setEVENTS_NO_MVNO(String EVENTS_NO_MVNO) {
this.EVENTS_NO_MVNO = EVENTS_NO_MVNO;
}
public String getMATRIXX_ADJ_EVENT_TOPIC() {
return MATRIXX_ADJ_EVENT_TOPIC;
}
public String getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC() {
return OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
}
public String getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC() {
return OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
}
}
I am doing #Autowire of this class in my test and app class,
#Autowired
ConfigProps cp;
and trying to access fields using cp.getBootstrapServerHost() but this resolves to a NullPointer in my test class. But resolves properly in my application class...
My test class looks like this
package distinct;
import com.acme.rtc.configuration.ConfigProps;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import com.acme.rtc.configuration.KafkaConfiguration;
import com.acme.rtc.configuration.TopologyConfiguration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.util.List;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
#SpringBootTest
#ContextConfiguration(classes = TopologyConfiguration.class)
#SpringJUnitConfig
public class TestWithTopologyTestDriver {
private TestInputTopic<String, String> inputTopicWrong;
private TestOutputTopic<String, String> outputTopicWrong;
private TestInputTopic<String, String> inputTopicRight;
private TestOutputTopic<String, String> outputTopicRight;
private TopologyTestDriver topologyTestDriver;
#Autowired
ConfigProps configProps;
#BeforeEach
public void setUp() {
KafkaProperties properties = new KafkaProperties();
properties.setBootstrapServers(singletonList("localhost:9092"));
KafkaStreamsConfiguration config = new KafkaConfiguration(properties).getStreamsConfig();
StreamsBuilder sb = new StreamsBuilder();
Topology topology = new TopologyConfiguration().createTopology(sb);
topologyTestDriver = new TopologyTestDriver(topology, config.asProperties());
inputTopicWrong =
topologyTestDriver.createInputTopic(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), new StringSerializer(),
new StringSerializer());
outputTopicWrong =
topologyTestDriver.createOutputTopic(configProps.getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC(), new StringDeserializer(),
new StringDeserializer());
inputTopicRight =
topologyTestDriver.createInputTopic(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), new StringSerializer(),
new StringSerializer());
outputTopicRight =
topologyTestDriver.createOutputTopic(configProps.getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC(), new StringDeserializer(),
new StringDeserializer());
}
#AfterEach
public void tearDown() {
topologyTestDriver.close();
}
#Test
void wrongDistinctTopology() {
testTopology(inputTopicWrong, outputTopicWrong);
}}
where TopologyConfiguration is my application and that has this signature
package com.acme.rtc.configuration;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.stereotype.Component;
#Configuration
#ConfigurationProperties(prefix = "topics")
#Component
#RequiredArgsConstructor
public class TopologyConfiguration {
#Autowired
Environment env;
#Autowired
ConfigProps configProps;
private void acmeStreamsTopoloy(StreamsBuilder streamsBuilder) {
Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
Serializer<JsonNode> jsonSerializer = new JsonSerializer();
Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
System.out.println("ConfigProps.getMattrix: "+configProps.getMATRIXX_ADJ_EVENT_TOPIC());
KStream<String, String> inputStream =
streamsBuilder.stream(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), Consumed.with(Serdes.String(), Serdes.String()));
KStream<String, String>[] branches = inputStream.branch(
(key, value)-> value.contains("KGN"),
(key, value)-> value.contains("LEB"),
(key, value)->true);
branches[0].to(configProps.getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC());
branches[1].to(configProps.getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC());
branches[2].to(configProps.getEVENTS_NO_MVNO());
}
#Bean
public Topology createTopology(StreamsBuilder streamsBuilder) {
acmeStreamsTopoloy(streamsBuilder);
return streamsBuilder.build();
}
}
My KafkaConfiguration class
package com.acme.rtc.configuration;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.HashMap;
import java.util.Map;
#Configuration
#RequiredArgsConstructor
public class KafkaConfiguration {
public static final String APP_ID = "acme-stream-rtc";
private final KafkaProperties kafkaProperties;
#Autowired
#Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration getStreamsConfig() {
Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 2);
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
props.put(JsonSerializer.ADD_TYPE_INFO_HEADERS,false);
KafkaStreamsConfiguration streamsConfig = new KafkaStreamsConfiguration(props);
return streamsConfig;
}
}
My application.yml has the right syntax etc.
spring:
kafka:
bootstrap-servers: localhost:9092
json:
value:
default:
type: true
kafka:
streams:
properties:
default:
value:
serde: org.springframework.kafka.support.serializer.JsonSerde
admin:
security:
protocol: SSL
ssl:
trust-store-location: ${TRUSTSTORE_LOCATION}
trust-store-password: ${TRUSTSTORE_PASSWORD}
key-store-location: ${KEYSTORE_LOCATION}
key-store-password: ${KEYSTORE_PASSWORD}
key-password: ${KEY_PASSWORD}
topics:
MATRIXX_ADJ_EVENT_TOPIC: input-matrixx-adj-event
OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC: output-KGN-adj-event
OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC: output-LEB-adj-event
EVENTS_NO_MVNO: events-no-mvno-spec
My main class
package com.acme.rtc;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.kafka.annotation.EnableKafkaStreams;
#SpringBootApplication
#EnableKafkaStreams
public class StreamProcessing {
public static void main(String[] args) {
SpringApplication.run(StreamProcessing.class, args);
}
}
I am not sure if I am missing any context when Autowiring my ConfigProps class or if I need further annotations on my test class.
For JUnit4 you need #Runwith(SpringJUnit4ClassRunner.class) alongside the #ContextConfiguration.
For JUnit5, use #SpringJUnitConfig.
For proper loading of properties, though, you need #SpringBootTest.
Boot 2.4 uses JUnit5.
And you should not have #ConfigurationProperties on the test.
EDIT
I just tested it with no problems.
#Configuration
public class Config {
#Bean
String str() {
return "str";
}
}
#ConfigurationProperties(prefix = "foo")
#Component
public class MyProps {
String bar;
public String getBar() {
return this.bar;
}
public void setBar(String bar) {
this.bar = bar;
}
#Override
public String toString() {
return "MyProps [bar=" + this.bar + "]";
}
}
#SpringBootApplication
public class So67078244Application {
public static void main(String[] args) {
SpringApplication.run(So67078244Application.class, args);
}
}
#SpringBootTest
class So67078244ApplicationTests {
#Autowired
MyProps props;
#Test
void contextLoads() {
System.out.println(this.props);
}
}
foo.bar=baz
MyProps [bar=baz]
In Login class #Autowired is not null but when I try to use postman call Logout class #Autowired get null. I copy code from Login to Logout class just change variable name all code is same pattern I don't know what happening
package xxx.api.login
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import com.fasterxml.jackson.databind.ObjectMapper;
import xxx.api.domain.logout.LoginRequest;
import xxx.api.domain.logout.LoginResponse;
import xxx.converter.FieldValidation;
import xxx.dto.log.MessageLog;
import xxx.logging.LogFactory;
#Component
public class LoginClient {
#Autowired
FieldValidation fieldValidation;
public LoginResponse login(LoginRequest loginRequest) throws Exception {
ObjectMapper mapper = new ObjectMapper();
final String url = "http://localhost:xxx/xxx/login";
LoginResponse responseObject = null;
try {
String requestData = mapper.writeValueAsString(loginRequest);
responseObject = login(url, requestData);
fieldValidation.validateResponse(responseObject, "login");
// 'fieldValidation' not null
This is my Login class. Return result as I expect.
package xxx.api.logout;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;
import com.fasterxml.jackson.databind.ObjectMapper;
import xxx.api.domain.logout.LogoutRequest;
import xxx.api.domain.logout.LogoutResponse;
import xxx.converter.FieldValidation;
import xxx.dto.log.MessageLog;
import xxx.logging.LogFactory;
#Component
public class LogoutClient {
#Autowired
FieldValidation fieldValidation;
public LogoutResponse logout(LogoutRequest logoutRequest) throws Exception {
ObjectMapper mapper = new ObjectMapper();
final String url = "http://localhost:xxx/xxx/logout";
LogoutResponse responseObject = null;
try {
String requestData = mapper.writeValueAsString(logoutRequest);
responseObject = logout(url, requestData);
fieldValidation.validateResponse(responseObject, "logout");
// 'fieldValidation' is null then throws nullPointerException
This is my Logout class
package xxx.api.logout;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestHeader;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import xxx.api.domain.logout.LogoutRequest;
import xxx.api.domain.logout.LogoutResponse;
import xxx.dto.log.MessageLog;
import xxx.exception.common.ErrorException;
import xxx.api.logout.LogoutClient;
import xxx.converter.FieldValidation;
#RestController
#RequestMapping(path = "/xxx")
public class LogoutServer {
#Autowired
FieldValidation fieldValidation;
#Autowired
WSLogFactory wsLog;
#PostMapping(value = "/logout", consumes = MediaType.APPLICATION_JSON_UTF8_VALUE)
public ResponseEntity<?> logout(#RequestHeader(value = "requestId") String requestId,
#RequestBody(required = true) LogoutRequest logoutRequest) throws ErrorException, Exception {
writeLogRequest(logoutRequest, logoutRequest.getClientIP(), "Request", "/xxx/logout");
fieldValidation.validateRequest(logoutRequest, "logout");
try {
LogoutClient logoutClient = new LogoutClient();
LogoutResponse response = logoutClient.logout(logoutRequest);
return new ResponseEntity<LogoutResponse>(response, HttpStatus.OK);
} catch (Exception e) {
throw e;
}
}
This is Logout Controller
#org.springframework.context.annotation.Configuration
#PropertySource("file:${layout.properties_file}")
public class FieldValidation {
// do somethings
}
This is my FieldValidation class.
{
"timestamp": 1550490230074,
"status": 500,
"error": "Internal Server Error",
"exception": "java.lang.NullPointerException",
"message": "No message available",
"path": "/umm/logout"
}
This is return when I call my Logout class.
This is my project structure
xxx.api.login <<< LoginClient.java
xxx.api.logout <<< LogoutClient.java
xxx.converter <<< FieldValidation.java
This is my main program
package xxx;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
public class Application {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
}
I try to replace #Component to #Configuration but still doesn't works.
I don't need 'fieldValidation' in Logout class or another class be a null value.
package xxx.converter;
import java.lang.reflect.Field;
import java.math.BigDecimal;
import java.text.SimpleDateFormat;
import java.time.LocalDate;
import java.time.Year;
import java.time.YearMonth;
import java.time.format.DateTimeFormatter;
import java.time.format.DateTimeParseException;
import java.time.format.ResolverStyle;
import java.util.Date;
import java.util.List;
import java.util.Locale;
import org.springframework.context.annotation.Configuration;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.validator.routines.BigDecimalValidator;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import com.fasterxml.jackson.databind.ObjectMapper;
import xxx.exception.common.ErrorException;
#Configuration
#PropertySource("file:${layout.properties_file}")
public class FieldValidation {
public void validateObject(Object objResponse, String config) throws Exception {
try {
List<Configuration> confs = objectMapper.readValue(config,
objectMapper.getTypeFactory().constructCollectionType(List.class, Configuration.class));
for (int i = 0; i < confs.size(); i++) {
Configuration configuration = confs.get(i);
Object objValue = getValue(configuration.getFieldName(), objResponse);
Validation validation = new Validation();
BeanUtils.copyProperties(validation, configuration);
isValid(objValue, validation);
if (configuration.getType().equalsIgnoreCase("object")) {
List<Validation> validations = configuration.getValidation();
Class<?> act = Class.forName(configuration.getClassName());
Object objectConfig = act.cast(objValue);
validateObject(objectConfig, validations);
}
}
} catch (Exception e) {
throw e;
}
}
This is full import of FieldValidation class and replace #Configuration and I get this error.
configuration.getFieldName() << error The method is undefined for type Configuration
configuration.getType() << error The method is undefined for type Configuration
configuration.getValidation() << error The method is undefined for type Configuration
configuration.getClassName() << error The method is undefined for type Configuration
I have a React-based application in an AWS EC2 instance and a Java Spring Boot-based application in another AWS EC2 instance. I need to send POST requests using AWS' SQS from the React application. Once the messages are sent, I need to retrieve them in the Java Spring application hosting API endpoints. Guidance on how to accomplish the send and receive operations would be appreciated.
I used below code for fetching the object from sqs.
MessagingConfiguration.java
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.jms.listener.DefaultMessageListenerContainer;
import com.amazon.sqs.javamessaging.SQSConnectionFactory;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.auth.DefaultAWSCredentialsProviderChain;
import com.es.spring.messaging.MessageReceiver;
#Configuration
public class MessagingConfiguration {
#Value("${queue.endpoint}")
private String endpoint;
#Value("${queue.name}")
private String queueName;
#Autowired
private MessageReceiver messageReceiver;
#Bean
public DefaultMessageListenerContainer jmsListenerContainer() {
SQSConnectionFactory sqsConnectionFactory = SQSConnectionFactory.builder()
.withAWSCredentialsProvider(new DefaultAWSCredentialsProviderChain())
.withEndpoint(endpoint)
.withAWSCredentialsProvider(awsCredentialsProvider)
.withNumberOfMessagesToPrefetch(10).build();
DefaultMessageListenerContainer dmlc = new DefaultMessageListenerContainer();
dmlc.setConnectionFactory(sqsConnectionFactory);
dmlc.setDestinationName(queueName);
dmlc.setMessageListener(messageReceiver);
return dmlc;
}
#Bean
public JmsTemplate createJMSTemplate() {
SQSConnectionFactory sqsConnectionFactory = SQSConnectionFactory.builder()
.withAWSCredentialsProvider(awsCredentialsProvider)
.withEndpoint(endpoint)
.withNumberOfMessagesToPrefetch(10).build();
JmsTemplate jmsTemplate = new JmsTemplate(sqsConnectionFactory);
jmsTemplate.setDefaultDestinationName(queueName);
jmsTemplate.setDeliveryPersistent(false);
return jmsTemplate;
}
private final AWSCredentialsProvider awsCredentialsProvider = new AWSCredentialsProvider() {
#Override
public AWSCredentials getCredentials() {
return new BasicAWSCredentials("accessKey", "Secretkey");
}
#Override
public void refresh() {
}
};
}
MessageReceiver.java
import java.io.PrintWriter;
import java.io.StringWriter;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import com.fasterxml.jackson.databind.ObjectMapper;
#Component
public class MessageReceiver implements MessageListener{
Logger LOG = LoggerFactory.getLogger(MessageReceiver.class);
#Override
public void onMessage(Message message) {
try {
TextMessage textMessage = (TextMessage) message;
System.out.println("message:"+textMessage.getText());
CustomClass activeMq =new ObjectMapper().readValue(textMessage.getText(),CustomClass.class);
LOG.info("Application : Active Mq : {}",activeMq);
} catch (Exception e) {
e.printStackTrace();
}
}
}
Send message in SQS.
AmazonSQS awsSqs = new AwsSqsClient().getAWSSqsclient();
awsSqs.sendMessage(new SendMessageRequest().withDelaySeconds(60)
.withQueueUrl("https://sqs-url/TestQueue").withMessageBody(input));
Hope above answer help you.