I have created a scheduler using thread . The main motive of this scheduler is to execute a particular task everyday at 12am. But it seems to be executing the same task two times(not sure,seems like two different threads executing at same time).
ScheduledTask.java
package com.abc.advertising.scheduler;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.text.SimpleDateFormat;
import java.util.Calendar;
import java.util.Date;
import javax.annotation.PostConstruct;
import javax.annotation.Resource;
import javax.sql.DataSource;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import com.heb.advertising.service.ReportEmail;
import com.heb.advertising.service.ReportService;
#Component
public class ScheduledTask {
private static final Logger LOG = Logger.getLogger(ScheduledTask.class.getName());
#Autowired
#Qualifier(value="ReportService")
private ReportService reportService;
#Autowired
#Qualifier(value="reportEmail")
private ReportEmail reportEmail;
SimpleDateFormat format = new SimpleDateFormat("MM/dd/yyyy HH:mm:ss");
public static Date primaryDate=null;
public Date existDate;
#PostConstruct
public void scheduledTask(){
//System.out.println("Hii Scheduled!!");
Calendar timeOfDay = Calendar.getInstance();
timeOfDay.set(Calendar.HOUR_OF_DAY, 00);
timeOfDay.set(Calendar.MINUTE,30);
timeOfDay.set(Calendar.SECOND, 00);
new DailyRunnerDaemon(timeOfDay, new Runnable()
{
#Override
public void run()
{
try
{
reportService.initScheduled();
}
catch(Exception e)
{
StringWriter sw = new StringWriter();
PrintWriter pw = new PrintWriter(sw);
e.printStackTrace(pw);
reportEmail.sentErrorEmail("PA",sw.toString());
LOG.error("An error occurred performing daily Scheduled Task", e);
}
}
}, "Scheduled-Task").start();
}
}
DailyRunnerDaemon.java
package com.abc.advertising.scheduler;
import java.util.Calendar;
import java.util.Date;
import java.util.Timer;
import java.util.TimerTask;
public class DailyRunnerDaemon
{
private final Runnable dailyTask;
private final int hour;
private final int minute;
private final int second;
private final String runThreadName;
public DailyRunnerDaemon(Calendar timeOfDay, Runnable dailyTask, String runThreadName)
{
this.dailyTask = dailyTask;
this.hour = timeOfDay.get(Calendar.HOUR_OF_DAY);
this.minute = timeOfDay.get(Calendar.MINUTE);
this.second = timeOfDay.get(Calendar.SECOND);
this.runThreadName = runThreadName;
}
public void start()
{
startTimer();
}
private void startTimer()
{
new Timer(runThreadName, true).schedule(new TimerTask()
{
#Override
public void run()
{
dailyTask.run();
startTimer();
}
}, getNextRunTime());
}
private Date getNextRunTime()
{
Calendar startTime = Calendar.getInstance();
Calendar now = Calendar.getInstance();
startTime.set(Calendar.HOUR_OF_DAY, hour);
startTime.set(Calendar.MINUTE, minute);
startTime.set(Calendar.SECOND, second);
startTime.set(Calendar.MILLISECOND, 0);
if(startTime.before(now) || startTime.equals(now))
{
startTime.add(Calendar.DATE, 1);
}
return startTime.getTime();
}
}
ApplicationConfiguration.java
package com.abc;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executor;
import javax.sql.DataSource;
import org.apache.log4j.Logger;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.amqp.RabbitAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.builder.SpringApplicationBuilder;
import org.springframework.boot.context.embedded.FilterRegistrationBean;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.web.SpringBootServletInitializer;
import org.springframework.boot.orm.jpa.EntityScan;
import org.springframework.context.MessageSource;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.PropertySource;
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
import org.springframework.context.support.ReloadableResourceBundleMessageSource;
import org.springframework.data.jpa.repository.config.EnableJpaAuditing;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.instrument.classloading.InstrumentationLoadTimeWeaver;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.support.PersistenceAnnotationBeanPostProcessor;
import org.springframework.orm.jpa.vendor.HibernateJpaDialect;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import org.springframework.web.filter.DelegatingFilterProxy;
import org.springframework.web.multipart.support.MultipartFilter;
import org.springframework.web.servlet.View;
import org.springframework.web.servlet.view.ContentNegotiatingViewResolver;
import org.springframework.web.servlet.view.json.MappingJackson2JsonView;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.S3ClientOptions;
import com.jolbox.bonecp.BoneCPDataSource;
/**
* Application configuration in JavaConfig (replacing what would be the web.xml and spring-context.xml).
*/
#EnableTransactionManagement()
#EnableJpaRepositories(basePackages= {"com.abc"},entityManagerFactoryRef="entityManagerFactoryBean")
#ComponentScan({"com.abc"})
#EnableJpaAuditing
#EntityScan(basePackages={"com.abc.advertising.entity"})
#SpringBootApplication(exclude = {RabbitAutoConfiguration.class})
#PropertySource("classpath:application-${spring.profiles.active}.properties")
public class ApplicationConfiguration extends SpringBootServletInitializer{
private static final String DATABASE = "org.hibernate.dialect.Oracle10gDialect";
private static final String PUNIT_LOCATION = "classpath:META- INF/persistence.xml";
private static final String PUNIT = "punit";
private static final String V2_AUTH_SIGNER = "S3SignerType";
private static Logger LOG = Logger.getLogger(ApplicationConfiguration.class.getName());
private #Value("${cloud.aws.credentials.accessKey}") String accessKey;
private #Value("${cloud.aws.credentials.secretKey}") String secretKey;
private #Value("${cloud.aws.s3.endpoint}") String s3endpoint;
/**
* Spring Boot Runner.
*
* #param args Optional parameters from command-line.
*/
public static void main(String[] args) {
LOG.info("ApplicationConfiguration ....");
PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer = propertyPlaceholderConfigurer();
SpringApplication.run(ApplicationConfiguration.class, args);
}
#Bean
public static PropertySourcesPlaceholderConfigurer propertyPlaceholderConfigurer() {
PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer = new PropertySourcesPlaceholderConfigurer();
return propertySourcesPlaceholderConfigurer;
}
#Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
return application.sources(ApplicationConfiguration.class);
}
#Bean
public MessageSource messageSource() {
ReloadableResourceBundleMessageSource messageSource = new ReloadableResourceBundleMessageSource();
messageSource.setBasenames("classpath:/messages");
messageSource.setDefaultEncoding("UTF-8");
messageSource.setCacheSeconds(3600);
messageSource.setConcurrentRefresh(true);
return messageSource;
}
private #Value("${spring.datasource.primary.driver-class-name}") String driverClassName;
private #Value("${spring.datasource.primary.url}") String primaryDataSourceURL;
private #Value("${spring.datasource.primary.username}") String primaryDataSourUserName;
private #Value("${spring.datasource.primary.password}") String primaryDataSourUserPassword;
#Bean(name="promoDataSource")
#Primary
public DataSource primaryDataSource() {
BoneCPDataSource dataSource = new BoneCPDataSource();
dataSource.setDriverClass(driverClassName);
dataSource.setJdbcUrl(primaryDataSourceURL);
dataSource.setUsername(primaryDataSourUserName);
dataSource.setPassword(primaryDataSourUserPassword);
dataSource.setIdleConnectionTestPeriodInMinutes(60);
dataSource.setIdleMaxAgeInSeconds(300);
dataSource.setMaxConnectionsPerPartition(100);
dataSource.setMinConnectionsPerPartition(10);
dataSource.setPartitionCount(3);
dataSource.setAcquireRetryDelayInMs(10000);
dataSource.setAcquireIncrement(5);
dataSource.setStatementsCacheSize(100);
dataSource.setReleaseHelperThreads(5);
dataSource.setConnectionTimeoutInMs(6000000);
dataSource.setServiceOrder("FIFO");
dataSource.setDefaultAutoCommit(true);
return dataSource;
}
#Bean
#ConfigurationProperties(prefix = "datasource.arbaf")
public DataSource secondaryDataSource() {
return DataSourceBuilder.create().build();
}
#Bean(name="transactionManager")
public JpaTransactionManager jpaTransactionManager() {
JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(entityManagerFactoryBean().getObject());
transactionManager.setDataSource(primaryDataSource());
transactionManager.setJpaDialect(getHibernateJpaDialect());
return transactionManager;
}
private HibernateJpaDialect getHibernateJpaDialect(){
HibernateJpaDialect promoPlannerJpaDialect = new HibernateJpaDialect();
return promoPlannerJpaDialect;
}
private HibernateJpaVendorAdapter vendorAdaptor() {
HibernateJpaVendorAdapter vendorAdapter = new HibernateJpaVendorAdapter();
vendorAdapter.setDatabasePlatform(DATABASE);
vendorAdapter.setShowSql(false);
vendorAdapter.setGenerateDdl(false);
return vendorAdapter;
}
#Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactoryBean() {
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
entityManagerFactoryBean.setJpaVendorAdapter(vendorAdaptor());
entityManagerFactoryBean.setDataSource(primaryDataSource());
/* entityManagerFactoryBean.setPersistenceProviderClass(HibernatePersistenceProvider.class);*/
entityManagerFactoryBean.setPackagesToScan(getClass().getPackage().getName());
/* entityManagerFactoryBean.setJpaProperties(jpaHibernateProperties());*/
entityManagerFactoryBean.setPersistenceXmlLocation(PUNIT_LOCATION);
entityManagerFactoryBean.setPersistenceUnitName(PUNIT);
entityManagerFactoryBean.setLoadTimeWeaver(new InstrumentationLoadTimeWeaver());
return entityManagerFactoryBean;
}
public PersistenceAnnotationBeanPostProcessor getPersistenceAnnotationBeanPostProcessor(){
return new PersistenceAnnotationBeanPostProcessor();
}
/*
* Reorder the filters so that the multipart filter resolves before the spring security filter as prescribe
* by Spring when trying to handle those types of requests.
*/
#Bean
public FilterRegistrationBean multipartFilterRegistration() {
FilterRegistrationBean registration =
new FilterRegistrationBean(new MultipartFilter());
registration.setOrder(0);
return registration;
}
#Bean
public FilterRegistrationBean springSecurityFilterRegistration() {
FilterRegistrationBean registrationBean =
new FilterRegistrationBean(new DelegatingFilterProxy("springSecurityFilterChain"));
registrationBean.setOrder(1);
return registrationBean;
}
#Bean
public ContentNegotiatingViewResolver contentNegotiatingViewResolver() {
ContentNegotiatingViewResolver contentNegotiatingViewResolver = new ContentNegotiatingViewResolver();
contentNegotiatingViewResolver.setOrder(1);
Map<String,String> m=new HashMap<String,String>();
m.put("json", "application/json");
m.put("file", "application/vnd.ms-excel");
contentNegotiatingViewResolver.setMediaTypes(m);
List<View> list=new ArrayList<>();
list.add(new MappingJackson2JsonView());
contentNegotiatingViewResolver.setDefaultViews(list);
return contentNegotiatingViewResolver;
}
}
You are using Spring/SpringBoot, your component is running task periodically, you should consider of using #Scheduled(cron="0 0 0 0 * *")
Related
I have a SpringBoot Kafka streams maven app. I use a spring-boot-starter-parent 2.4.4 for my springboot dependencies and kafka-streams 2.7.0.
I am stuck at running tests with
java.lang.NullPointerException
when trying to load my application configuration from either
resources/application.yml or test/resources/application-test.resources or test/resources/application.yml
I have a Config class with this annotations and getters and setters for fields, which are defined with same name as in the application.yml
package com.acme.rtc.configuration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;
#ConfigurationProperties(prefix = "topics")
#Component
#Configuration
public class ConfigProps {
private String MATRIXX_ADJ_EVENT_TOPIC;
private String OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
private String OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
private String EVENTS_NO_MVNO;
public void setMATRIXX_ADJ_EVENT_TOPIC(String MATRIXX_ADJ_EVENT_TOPIC) {
this.MATRIXX_ADJ_EVENT_TOPIC = MATRIXX_ADJ_EVENT_TOPIC;
}
public void setOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC(String OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC) {
this.OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC = OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
}
public void setOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC(String OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC) {
this.OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC = OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
}
public String getEVENTS_NO_MVNO() {
return EVENTS_NO_MVNO;
}
public void setEVENTS_NO_MVNO(String EVENTS_NO_MVNO) {
this.EVENTS_NO_MVNO = EVENTS_NO_MVNO;
}
public String getMATRIXX_ADJ_EVENT_TOPIC() {
return MATRIXX_ADJ_EVENT_TOPIC;
}
public String getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC() {
return OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
}
public String getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC() {
return OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
}
}
I am doing #Autowire of this class in my test and app class,
#Autowired
ConfigProps cp;
and trying to access fields using cp.getBootstrapServerHost() but this resolves to a NullPointer in my test class. But resolves properly in my application class...
My test class looks like this
package distinct;
import com.acme.rtc.configuration.ConfigProps;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import com.acme.rtc.configuration.KafkaConfiguration;
import com.acme.rtc.configuration.TopologyConfiguration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.util.List;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
#SpringBootTest
#ContextConfiguration(classes = TopologyConfiguration.class)
#SpringJUnitConfig
public class TestWithTopologyTestDriver {
private TestInputTopic<String, String> inputTopicWrong;
private TestOutputTopic<String, String> outputTopicWrong;
private TestInputTopic<String, String> inputTopicRight;
private TestOutputTopic<String, String> outputTopicRight;
private TopologyTestDriver topologyTestDriver;
#Autowired
ConfigProps configProps;
#BeforeEach
public void setUp() {
KafkaProperties properties = new KafkaProperties();
properties.setBootstrapServers(singletonList("localhost:9092"));
KafkaStreamsConfiguration config = new KafkaConfiguration(properties).getStreamsConfig();
StreamsBuilder sb = new StreamsBuilder();
Topology topology = new TopologyConfiguration().createTopology(sb);
topologyTestDriver = new TopologyTestDriver(topology, config.asProperties());
inputTopicWrong =
topologyTestDriver.createInputTopic(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), new StringSerializer(),
new StringSerializer());
outputTopicWrong =
topologyTestDriver.createOutputTopic(configProps.getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC(), new StringDeserializer(),
new StringDeserializer());
inputTopicRight =
topologyTestDriver.createInputTopic(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), new StringSerializer(),
new StringSerializer());
outputTopicRight =
topologyTestDriver.createOutputTopic(configProps.getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC(), new StringDeserializer(),
new StringDeserializer());
}
#AfterEach
public void tearDown() {
topologyTestDriver.close();
}
#Test
void wrongDistinctTopology() {
testTopology(inputTopicWrong, outputTopicWrong);
}}
where TopologyConfiguration is my application and that has this signature
package com.acme.rtc.configuration;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.stereotype.Component;
#Configuration
#ConfigurationProperties(prefix = "topics")
#Component
#RequiredArgsConstructor
public class TopologyConfiguration {
#Autowired
Environment env;
#Autowired
ConfigProps configProps;
private void acmeStreamsTopoloy(StreamsBuilder streamsBuilder) {
Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
Serializer<JsonNode> jsonSerializer = new JsonSerializer();
Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
System.out.println("ConfigProps.getMattrix: "+configProps.getMATRIXX_ADJ_EVENT_TOPIC());
KStream<String, String> inputStream =
streamsBuilder.stream(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), Consumed.with(Serdes.String(), Serdes.String()));
KStream<String, String>[] branches = inputStream.branch(
(key, value)-> value.contains("KGN"),
(key, value)-> value.contains("LEB"),
(key, value)->true);
branches[0].to(configProps.getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC());
branches[1].to(configProps.getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC());
branches[2].to(configProps.getEVENTS_NO_MVNO());
}
#Bean
public Topology createTopology(StreamsBuilder streamsBuilder) {
acmeStreamsTopoloy(streamsBuilder);
return streamsBuilder.build();
}
}
My KafkaConfiguration class
package com.acme.rtc.configuration;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.HashMap;
import java.util.Map;
#Configuration
#RequiredArgsConstructor
public class KafkaConfiguration {
public static final String APP_ID = "acme-stream-rtc";
private final KafkaProperties kafkaProperties;
#Autowired
#Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration getStreamsConfig() {
Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 2);
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
props.put(JsonSerializer.ADD_TYPE_INFO_HEADERS,false);
KafkaStreamsConfiguration streamsConfig = new KafkaStreamsConfiguration(props);
return streamsConfig;
}
}
My application.yml has the right syntax etc.
spring:
kafka:
bootstrap-servers: localhost:9092
json:
value:
default:
type: true
kafka:
streams:
properties:
default:
value:
serde: org.springframework.kafka.support.serializer.JsonSerde
admin:
security:
protocol: SSL
ssl:
trust-store-location: ${TRUSTSTORE_LOCATION}
trust-store-password: ${TRUSTSTORE_PASSWORD}
key-store-location: ${KEYSTORE_LOCATION}
key-store-password: ${KEYSTORE_PASSWORD}
key-password: ${KEY_PASSWORD}
topics:
MATRIXX_ADJ_EVENT_TOPIC: input-matrixx-adj-event
OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC: output-KGN-adj-event
OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC: output-LEB-adj-event
EVENTS_NO_MVNO: events-no-mvno-spec
My main class
package com.acme.rtc;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.kafka.annotation.EnableKafkaStreams;
#SpringBootApplication
#EnableKafkaStreams
public class StreamProcessing {
public static void main(String[] args) {
SpringApplication.run(StreamProcessing.class, args);
}
}
I am not sure if I am missing any context when Autowiring my ConfigProps class or if I need further annotations on my test class.
For JUnit4 you need #Runwith(SpringJUnit4ClassRunner.class) alongside the #ContextConfiguration.
For JUnit5, use #SpringJUnitConfig.
For proper loading of properties, though, you need #SpringBootTest.
Boot 2.4 uses JUnit5.
And you should not have #ConfigurationProperties on the test.
EDIT
I just tested it with no problems.
#Configuration
public class Config {
#Bean
String str() {
return "str";
}
}
#ConfigurationProperties(prefix = "foo")
#Component
public class MyProps {
String bar;
public String getBar() {
return this.bar;
}
public void setBar(String bar) {
this.bar = bar;
}
#Override
public String toString() {
return "MyProps [bar=" + this.bar + "]";
}
}
#SpringBootApplication
public class So67078244Application {
public static void main(String[] args) {
SpringApplication.run(So67078244Application.class, args);
}
}
#SpringBootTest
class So67078244ApplicationTests {
#Autowired
MyProps props;
#Test
void contextLoads() {
System.out.println(this.props);
}
}
foo.bar=baz
MyProps [bar=baz]
Question regarding our Spring Boot 2.3.8.RELEASE implementation of #Transactional. The requirement is to implement distributed transactions that writes to an instance of PostgreSQL and Artemis queues. If one commit fails, then so should the other. We are using Atomikos for our JTA Transaction Manager.
I think I have implemented everything I need, but clearly not. When I throw an Exception in my service code to test the rollback functionality, it clearly does not work: The message is written to Artemis even after I throw an exception in the service code.
Any help with diagnosing and fixing would be much appreciated. If any additional details are required, please let me know.
Please find the details of the implementation below:
Spring Boot Application:
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.JmsAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.activemq.ActiveMQAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.artemis.ArtemisAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.context.ConfigurableApplicationContext;
import xxx.xxx.Users;
import xxx.xxx.TransactionServiceImpl;
#SpringBootApplication
(
exclude = {
DataSourceAutoConfiguration.class,
HibernateJpaAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class,
JmsAutoConfiguration.class,
ActiveMQAutoConfiguration.class,
ArtemisAutoConfiguration.class
}
)
public class BApplication implements CommandLineRunner
{
public static void main(String[] args) throws Exception
{
// SpringApplication.run(BoilerplateApplication.class, args);
ConfigurableApplicationContext ctx = SpringApplication.run(BApplication.class, args);
System.in.read();
ctx.close();
}
#Autowired
TransactionServiceImpl tsi;
#Override
public void run(String... args) throws Exception
{
Users user = new Users();
user.setFirstName("Moe");
user.setGender("M");
user.setLastName("Moe");
tsi.save(user);
}
}
Here is the JTA Configuration:
JTA Configuration
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.sql.DataSource;
import javax.transaction.SystemException;
import javax.transaction.UserTransaction;
import java.util.Properties;
import javax.annotation.PostConstruct;
import org.springframework.context.annotation.Bean;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.DependsOn;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import com.atomikos.icatch.config.UserTransactionService;
import com.atomikos.icatch.config.UserTransactionServiceImp;
import com.atomikos.icatch.jta.UserTransactionImp;
import com.atomikos.icatch.jta.UserTransactionManager;
import com.atomikos.jdbc.AtomikosDataSourceBean;
import com.atomikos.jms.AtomikosConnectionFactoryBean;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.postgresql.xa.PGXADataSource;
#Configuration("jtaConfig")
public class JtaConfig
{
private static final Logger appLogger = LoggerFactory.getLogger(JtaConfig.class);
#Value("${amq.broker.url}")
private String brokerUrl;
#Value("${amq.broker.username}")
private String brokerUsername;
#Value("${amq.broker.password}")
private String brokerPassword;
#Value("${postgresql.datasource.url}")
String dataSourceUrl;
#Value("${postgresql.datasource.username}")
String dsUsername;
#Value("${postgresql.datasource.password}")
String dsPassword;
#Value("${postgresql.datasource.driver.classname}")
String dsClassName;
#Value("${postgresql.initial.connections}")
int initialDSConnections;
#Value("${postgresql.max.connections}")
int maxDSConnections;
#Bean(initMethod = "init", destroyMethod = "shutdownForce")
public UserTransactionService userTransactionService()
{
Properties atProps = new Properties();
atProps.put("com.atomikos.icatch.service", "com.atomikos.icatch.standalone.UserTransactionServiceFactory");
return new UserTransactionServiceImp(atProps);
}
#Bean (initMethod = "init", destroyMethod = "close")
#DependsOn("userTransactionService")
public UserTransactionManager atomikosTransactionManager()
{
UserTransactionManager utm = new UserTransactionManager();
utm.setStartupTransactionService(false);
utm.setForceShutdown(true);
return utm;
}
#Bean
#DependsOn("userTransactionService")
public UserTransaction userTransaction()
{
UserTransactionImp ut = new UserTransactionImp();
try
{
ut.setTransactionTimeout(1000);
}
catch (SystemException _e)
{
appLogger.error("Configuration exception.", _e);
return null;
}
return ut;
}
#Bean
public Properties hibernateProperties()
{
Properties hibernateProp = new Properties();
hibernateProp.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQLDialect");
hibernateProp.put("hibernate.hbm2ddl.auto", "create-drop");
hibernateProp.put("hibernate.show_sql", true);
hibernateProp.put("hibernate.max_fetch_depth", 3);
hibernateProp.put("hibernate.jdbc.batch_size", 10);
hibernateProp.put("hibernate.jdbc.fetch_size", 50);
return hibernateProp;
}
#Bean
public JpaVendorAdapter jpaVendorAdapter()
{
return new HibernateJpaVendorAdapter();
}
#Primary
#Bean(name = "pgDataSource1", initMethod = "init", destroyMethod = "close")
public DataSource pgDataSource1()
{
PGXADataSource primaryXaDataSource = new PGXADataSource();
primaryXaDataSource.setUrl(dataSourceUrl);
primaryXaDataSource.setUser(dsUsername);
primaryXaDataSource.setPassword(dsPassword);
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(primaryXaDataSource);
xaDataSource.setUniqueResourceName("primaryXaDs1");
xaDataSource.setMinPoolSize(initialDSConnections);
xaDataSource.setMaxPoolSize(maxDSConnections);
return xaDataSource;
}
#Primary
#Bean(name = "jmsConnectionFactory", initMethod = "init", destroyMethod = "close")
public ConnectionFactory connectionFactory()
{
AtomikosConnectionFactoryBean atomikosConnectionFactoryBean = new AtomikosConnectionFactoryBean();
ActiveMQConnectionFactory activeMqXaConnectionFactory = new ActiveMQConnectionFactory();
try
{
activeMqXaConnectionFactory.setBrokerURL(brokerUrl);
activeMqXaConnectionFactory.setUser(brokerUsername);
activeMqXaConnectionFactory.setPassword(brokerPassword);
atomikosConnectionFactoryBean.setUniqueResourceName("jmsXAConnectionFactory");
atomikosConnectionFactoryBean.setLocalTransactionMode(false);
atomikosConnectionFactoryBean.setXaConnectionFactory(activeMqXaConnectionFactory);
}
catch (JMSException _e)
{
appLogger.info("JMS Configuration Error: " + _e);
_e.printStackTrace();
}
return atomikosConnectionFactoryBean;
}
#PostConstruct
public void postConstructDetails()
{
appLogger.info("Post Construct Start: JtaConfig.");
appLogger.info(" - JMS: Artemis URL: {}", brokerUrl);
appLogger.info(" - Artemis Username: {}", brokerUsername);
appLogger.info(" - Artemis Password: {}", brokerPassword);
appLogger.info(" - DS: PostgreSQL URL: {}", dataSourceUrl);
appLogger.info(" - DS: PostgreSQL Username: {}", dsUsername);
appLogger.info(" - DS: PostgreSQL Password: {}", dsPassword);
appLogger.info(" - DS: PostgreSQL Min Conn: {}", initialDSConnections);
appLogger.info(" - DS: PostgreSQL Max Conn: {}", maxDSConnections);
appLogger.info("Post Construct End: JtaConfig.");
appLogger.info(" ");
}
}
Here is the implementation for Services Configuration:
Services Configuration:
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.jta.JtaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;
#Configuration
#EnableTransactionManagement
#ComponentScan(basePackages = "xxx.xxx.service")
public class ServicesConfig
{
private Logger appLogger = LoggerFactory.getLogger(ServicesConfig.class);
#Autowired
JtaConfig jtaConfig;
#Bean(name = "xaJmsTemplate")
public JmsTemplate jmsTemplate()
{
JmsTemplate jmsTemplate = new JmsTemplate();
jmsTemplate.setConnectionFactory(jtaConfig.connectionFactory());
jmsTemplate.setPubSubDomain(false);
return jmsTemplate;
}
#Bean(name = "entityManangerFactory")
public EntityManagerFactory entityManagerFactory()
{
LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean();
factoryBean.setPackagesToScan("xxx.xxx.model");
factoryBean.setDataSource(jtaConfig.pgDataSource1());
factoryBean.setJpaProperties(jtaConfig.hibernateProperties());
factoryBean.setPersistenceUnitName("entityManagerFactoryA");
factoryBean.setJpaVendorAdapter(jtaConfig.jpaVendorAdapter());
factoryBean.afterPropertiesSet();
return factoryBean.getNativeEntityManagerFactory();
}
#Bean(name = "transactionManager")
public PlatformTransactionManager transactionManager()
{
JtaTransactionManager ptm = new JtaTransactionManager();
ptm.setTransactionManager(jtaConfig.atomikosTransactionManager());
ptm.setUserTransaction(jtaConfig.userTransaction());
return ptm;
}
#PostConstruct
public void postConstructDetails()
{
appLogger.info("Post Construct Start: ServicesConfig.");
appLogger.info(" - JMS: Artemis URL: {}", jtaConfig);
appLogger.info(" - JMS Template: {}", jmsTemplate());
appLogger.info("Post Construct End: ServicesConfig.");
appLogger.info(" ");
}
}
Here is the Service implementation:
TransactionServiceImpl
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import xxx.xxx.Users;
#Service("transactionService")
#Transactional
public class TransactionServiceImpl implements TransactionServiceIntf
{
private static final Logger appLogger = LoggerFactory.getLogger(TransactionServiceImpl.class);
#Autowired
#Qualifier("xaJmsTemplate")
JmsTemplate jmsTemplate;
#Override
public Users save(Users _user)
{
appLogger.info("TransactionServiceImpl: save: Entered.");
Users user = _user;
try
{
if(user == null)
{
appLogger.info("User: Null.");
}
else
{
if(jmsTemplate == null)
{
appLogger.info("JMS Template: Null.");
}
else
{
appLogger.info("JMS Template: Saving.");
jmsTemplate.convertAndSend("crequests", user);
}
}
// The rollback should happen with the exception.
throw new Exception();
}
catch(Exception _e)
{
appLogger.error("Catching exception: " + _e);
}
appLogger.info("TransactionServiceImpl: save: Exiting.");
return user;
}
}
I have this class below ... it's responsible for start my application , is there any
maneuver to make #ImportResources annotation dynamic?
For my test covering I would like to load another resource. I am trying to avoid to run my unit test with my parallel configuration, because the results are being intermittent.
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import org.apache.http.client.HttpClient;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.ssl.TrustStrategy;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ImportResource;
import org.springframework.http.client.BufferingClientHttpRequestFactory;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import org.springframework.web.client.RestTemplate;
import br.com.bradesco.ciar.srv.agorainv.infrastructure.LoggingClientHttpRequestInterceptor;
#SpringBootApplication
#ImportResource("spring/*.xml")
public class Application {
#Value("${rest.timeout.read}")
private int readTimeout;
#Value("${rest.timeout.connect}")
private int connectTimeout;
public static void main(final String[] args) {
SpringApplication.run(Application.class, args);
}
#Bean
public HttpClient httpClient() {
try {
return HttpClients.custom().setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE)
.setSSLContext(new SSLContextBuilder().loadTrustMaterial(null, new TrustStrategy() {
#Override
public boolean isTrusted(final X509Certificate[] arg0, final String arg1) throws CertificateException {
return true;
}
}).build()).build();
}
catch (final Exception e) {
throw new RuntimeException(e);
}
}
#Bean
public HttpComponentsClientHttpRequestFactory httpRequestFactory() {
final HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory(httpClient());
requestFactory.setReadTimeout(readTimeout);
requestFactory.setConnectTimeout(connectTimeout);
return requestFactory;
}
#Bean
public RestTemplate restTemplate() {
final RestTemplate restTemplate = new RestTemplate(new BufferingClientHttpRequestFactory(httpRequestFactory()));
restTemplate.getInterceptors().add(new LoggingClientHttpRequestInterceptor());
return restTemplate;
}
#Bean
public LocalValidatorFactoryBean restValidator() {
return new LocalValidatorFactoryBean();
}
}
You could try moving the #ImportResource annotation to a separate configuration class annotated with a #Profile annotation, e.g:
#Profile("!Test")
#Configuration
#ImportResource("spring/*.xml")
class ResourceConfiguration {}
I have two database structures, example:
1- MAIN_DATABASE: USER PASSWORD
2: CUSTOMER DATABASE: CUSTOMER_A CUSTOMER_B CUSTOMER_C
I want to access the main database and after validating the data, redirect to the customer database.
I currently use spring and configure it in applicationContext.xml
Example:
<bean id = "encryptionPassword" class = "utils.EncryptionPasswordSpring" />
<bean id = "dataSource" class = "com.mchange.v2.c3p0.ComboPooledDataSource" destroy-method = "close">
<property name = "driverClass" value = "com.mysql.jdbc.Driver" />
<property name = "user" value = "user" />
<property name = "password" value = "123456" />
<property name = "jdbcUrl" value = "jdbc:mysql://localhost/testdb?useSSL = false" />
</bean>
Any example, suggestion? Thanks.
The below is my code for dynamic datasource with mybatis. one is main ds. another is read ds. Hope it useful to you.
use AbstractRoutingDataSource to define a DynamicDataSource
import java.util.Map;
import javax.sql.DataSource;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
public class DynamicDataSource extends AbstractRoutingDataSource {
#Override
protected DataSource determineTargetDataSource() {
return super.determineTargetDataSource();
}
/**
*/
#Override
protected Object determineCurrentLookupKey() {
return DynamicDataSourceContextHolder.getDataSourceKey();
}
/**
* #param defaultDataSource
*/
public void setDefaultDataSource(Object defaultDataSource) {
super.setDefaultTargetDataSource(defaultDataSource);
}
/**
* #param dataSources
*/
public void setDataSources(Map<Object, Object> dataSources) {
super.setTargetDataSources(dataSources);
DynamicDataSourceContextHolder.addDataSourceKeys(dataSources.keySet());
}
}
use ThreadLocal to switch datasource in context
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public class DynamicDataSourceContextHolder {
private static final ThreadLocal<String> contextHolder = new ThreadLocal<String>() {
/**
make main as default ds
*/
#Override
protected String initialValue() {
return "main";
}
};
/**
*
*/
private static List<Object> dataSourceKeys = Collections.synchronizedList(new ArrayList<>());
/**
* switch ds
*
* #param key
*/
public static void setDataSourceKey(String key) {
contextHolder.set(key);
}
/**
* get ds
*
* #return
*/
public static String getDataSourceKey() {
return contextHolder.get();
}
/**
* reset ds
*/
public static void clearDataSourceKey() {
contextHolder.remove();
}
/**
* judge if ds existed
*
* #param key
* #return
*/
public static boolean containDataSourceKey(String key) {
return dataSourceKeys.contains(key);
}
/**
* add ds
*
* #param keys
* #return
*/
public static boolean addDataSourceKeys(Collection<? extends Object> keys) {
return dataSourceKeys.addAll(keys);
}
}
inject different datasource via application.yml or application.properties
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import javax.sql.DataSource;
import org.apache.ibatis.plugin.Interceptor;
import org.apache.ibatis.session.SqlSessionFactory;
import org.mybatis.spring.SqlSessionFactoryBean;
import org.mybatis.spring.SqlSessionTemplate;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.jdbc.DataSourceBuilder;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import org.springframework.core.io.support.PathMatchingResourcePatternResolver;
import org.springframework.jdbc.datasource.DataSourceTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
#Configuration
public class DataSourceConfig {
#Primary
#Bean
#ConfigurationProperties("spring.datasource.main")
public DataSource main() {
return DataSourceBuilder.create().build();
}
#Bean
#ConfigurationProperties("spring.datasource.read")
public DataSource read() {
return DataSourceBuilder.create().build();
}
#Bean
public DataSource dynamicDataSource(
#Qualifier("main") DataSource main,
#Qualifier("read") DataSource read
) {
Map<Object, Object> targetDataSources = new HashMap<>(2);
targetDataSources.put("main", main);
targetDataSources.put("read", read);
DynamicDataSource dynamicDataSource = new DynamicDataSource();
dynamicDataSource.setDefaultTargetDataSource(main); //default
dynamicDataSource.setDataSources(targetDataSources);
return dynamicDataSource;
}
#Bean
public SqlSessionFactory sqlSessionFactory(
#Qualifier("dynamicDataSource") DataSource dynamicDataSource)
throws Exception {
SqlSessionFactoryBean bean = new SqlSessionFactoryBean();
bean.setDataSource(dynamicDataSource);
bean.setMapperLocations(new PathMatchingResourcePatternResolver().getResources("classpath*:mappings/**/*.xml"));
return bean.getObject();
}
#Bean(name = "sqlSessionTemplate")
public SqlSessionTemplate sqlSessionTemplate(
#Qualifier("sqlSessionFactory") SqlSessionFactory sqlSessionFactory)
throws Exception {
return new SqlSessionTemplate(sqlSessionFactory);
}
#Bean
public PlatformTransactionManager transactionManager(
#Qualifier("dynamicDataSource") DataSource dynamicDataSource
) {
return new DataSourceTransactionManager(dynamicDataSource);
}
}
define an AOP to control which Dao method use which datasource. Dao is interface to access DB via mybatis.
import org.aspectj.lang.JoinPoint;
import org.aspectj.lang.annotation.After;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Before;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
#Aspect
#Order(-1)
#Component
public class DynamicDataSourceAspect {
private final static Logger logger = LoggerFactory.getLogger(DynamicDataSourceAspect.class);
private final String[] QUERY_PREFIX = {
"select","get","find","query","quickGet"
};
#Pointcut("execution( * com.biz.dao..*.*(..))")
public void daoAspect() {
}
#Before("daoAspect()")
public void beforeDao(JoinPoint point) {
boolean isQueryMethod = isQueryMethod(point.getSignature().getName());
if (isQueryMethod) {
switchDataSource("read");
}
}
#After("daoAspect()")
public void afterDao(JoinPoint point) {
restoreDataSource();
}
//===============================private method
private void switchDataSource(String key) {
if (!DynamicDataSourceContextHolder.containDataSourceKey(key)) {
logger.debug("======>DataSource [{}] doesn't exist, use default DataSource [{}] " + key);
} else {
// switch ds
DynamicDataSourceContextHolder.setDataSourceKey(key);
logger.debug("======>Switch DataSource to " + DynamicDataSourceContextHolder.getDataSourceKey());
}
}
private void restoreDataSource() {
// reset to default ds
DynamicDataSourceContextHolder.clearDataSourceKey();
}
private boolean isQueryMethod(String methodName) {
for (String prefix : QUERY_PREFIX) {
if (methodName.startsWith(prefix)) {
return true;
}
}
return false;
}
}
Configure two beans with two different set of configs in app.props
For one configuration you can use this (beanName = dataSource1):
#Configuration
#EnableRetry
public class DataSourceConfiguration {
#Value("${datasource1.username}")
private String username;
#Value("${datasource1.password}")
private String password;
#Value("${datasource1.url}")
private String connection;
#Bean(name = "dataSource1")
#Primary
public DataSource mainDataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.cj.jdbc.Driver");
dataSource.setUrl(connection);
dataSource.setUsername(username);
dataSource.setPassword(password);
return dataSource;
}
}
I have configured all properties, but my app still loads without spring security as if it does not exist... Please help me, what I am doing wrong.
Here I get my rooms without auth with postman:
Here below are my classes:
SecurityConfiguration:
package com.vidaflo.config;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.EnableWebSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.config.http.SessionCreationPolicy;
#Configuration
#EnableWebSecurity
#ComponentScan("com.vidaflo")
public class SecurityConfiguration extends WebSecurityConfigurerAdapter {
#Autowired
public void configureGlobalSecurity(AuthenticationManagerBuilder auth) throws Exception {
auth.inMemoryAuthentication().withUser("bill").password("abc123").roles("ADMIN");
auth.inMemoryAuthentication().withUser("tom").password("abc123").roles("USER");
}
#Override
protected void configure(HttpSecurity http) throws Exception {
http.csrf().disable()
.authorizeRequests().antMatchers("/room/**").hasRole("ADMIN")
.and()
.httpBasic()
.and()
.sessionManagement().sessionCreationPolicy(SessionCreationPolicy.STATELESS);
}
}
SecurityInitializer:
package com.vidaflo.config;
import org.springframework.security.web.context.AbstractSecurityWebApplicationInitializer;
public class SecurityInitializer extends AbstractSecurityWebApplicationInitializer {
}
WebConfiguration:
package com.vidaflo.config;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurationSupport;
#Configuration
#EnableWebMvc
#ComponentScan(basePackages = "com.vidaflo.controllers")
public class WebConfiguration extends WebMvcConfigurationSupport {
}
Tomcat embedded:
package com.vidaflo.server;
import com.vidaflo.config.ApplicationConfiguration;
import com.vidaflo.config.DatabaseConfiguration;
import com.vidaflo.config.SecurityConfiguration;
import com.vidaflo.config.WebConfiguration;
import lombok.extern.slf4j.Slf4j;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.startup.Tomcat;
import org.springframework.core.env.PropertiesPropertySource;
import org.springframework.web.context.support.AnnotationConfigWebApplicationContext;
import org.springframework.web.servlet.DispatcherServlet;
#Slf4j
public class Application {
private static final String APPLICATION_PROPERTIES = System.getProperty("app.properties");
private static final int DEFAULT_PORT = 8080;
private static final String DEFAULT_CONTEXT_PATH = "/app";
private AppProperties appProperties;
private AnnotationConfigWebApplicationContext ctx;
public static void main(String[] args) throws LifecycleException {
Application app = new Application(APPLICATION_PROPERTIES);
Server server = new TomcatServer(new Tomcat());
app.run(server);
}
public Application(String fieldName) {
loadProperties(fieldName);
}
public void run(Server server) {
initApplicationContext();
server.run(getConfig());
}
private void loadProperties(String fieldName) {
appProperties = new AppProperties();
appProperties.load(fieldName);
}
private void initApplicationContext() {
log.info("Initialize application context...");
ctx = new AnnotationConfigWebApplicationContext();
ctx.register(SecurityConfiguration.class);
ctx.register(ApplicationConfiguration.class);
ctx.register(WebConfiguration.class);
ctx.register(DatabaseConfiguration.class);
ctx.getEnvironment()
.getPropertySources()
.addLast(new PropertiesPropertySource("applicationEnvironment", appProperties.getProperties()));
}
private ServerConfig getConfig() {
ServerConfig serverConfig = new ServerConfig();
serverConfig.setPort(appProperties.getPort(DEFAULT_PORT));
serverConfig.setContextPath(appProperties.getContextPath(DEFAULT_CONTEXT_PATH));
serverConfig.setServlet(getServlet());
return serverConfig;
}
private DispatcherServlet getServlet() {
return new DispatcherServlet(ctx);
}
}
Rest controller:
package com.vidaflo.controllers;
import com.vidaflo.dto.RoomDto;
import com.vidaflo.model.location.Room;
import com.vidaflo.repositories.LocationRepository;
import com.vidaflo.services.RoomService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import java.util.List;
import java.util.stream.Collectors;
#RestController
public class RoomController {
#Autowired
private RoomService roomService;
#Autowired
private LocationRepository locationService;
#PostMapping("/room/save")
public String save(#RequestParam(name = "name") String name,
#RequestParam(name = "location_id") Long locationId) {
roomService.save(name, locationService.findOne(locationId));
return "room added";
}
#GetMapping("/room/all")
public List<RoomDto> findAll() {
return roomService.findAll().stream()
.map(this::toDto)
.collect(Collectors.toList());
}
private RoomDto toDto(Room room) {
return RoomDto.builder()
.id(room.getId())
.name(room.getName())
.build();
}
}
Please tell me if I should add additional details. I rly need help and I can't understand what I'm doing wrong.
Found an answer, we should manually add filter for spring security in tomcat embedded config like this:
FilterDef filterDef = new FilterDef();
filterDef.setFilterName("springSecurityFilterChain");
filterDef.setFilterClass("org.springframework.web.filter.DelegatingFilterProxy");
container.addFilterDef(filterDef);
FilterMap filterMapping = new FilterMap();
filterMapping.setFilterName("springSecurityFilterChain");
filterMapping.addURLPattern("/*");
container.addFilterMap(filterMapping);
Try to change role "ADMIN" "USER" to "ROLE_ADMIN" "ROLE_USER" in configureGlobalSecurity method and in the enum "Roles", but in configure method don't change.