MongoDb equivalent of writer in Spring Batch? - java

I'm following Spring doc for creating batch service. It implements ItemWriter using JdbcBatchItemWriter, so could you please help me write the MongoDb equivalent of following code using MongoItemWriter?
I found two tutorials using MongoDb, but they use XML files to define beans & seem outdated.
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
// tag::readerwriterprocessor[]
#Bean
public ItemWriter<Person> writer(DataSource dataSource) {
JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<Person>();
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Person>());
writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)");
writer.setDataSource(dataSource);
return writer;
}
// end::readerwriterprocessor[]
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
}

package hello;
import com.mongodb.DBAddress;
import com.mongodb.MongoClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.data.MongoItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
// tag::readerwriterprocessor[]
#Bean
public ItemReader<Person> reader() {
FlatFileItemReader<Person> reader = new FlatFileItemReader<Person>();
reader.setResource(new ClassPathResource("sample-data.csv"));
reader.setLineMapper(new DefaultLineMapper<Person>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "firstName", "lastName" });
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
setTargetType(Person.class);
}});
}});
return reader;
}
#Bean
public ItemProcessor<Person, Person> processor() {
return new PersonItemProcessor();
}
#Bean
public ItemWriter<Record> writer() {
MongoItemWriter<Record> writer = new MongoItemWriter<Record>();
try {
writer.setTemplate(mongoTemplate());
} catch (Exception e) {
log.error(e.toString());
}
writer.setCollection("people");
return writer;
}
// end::readerwriterprocessor[]
// tag::jobstep[]
#Bean
public Job importUserJob(JobBuilderFactory jobs, Step s1, JobExecutionListener listener) {
return jobs.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(s1)
.end()
.build();
}
#Bean
public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader<Person> reader,
ItemWriter<Person> writer, ItemProcessor<Person, Person> processor) {
return stepBuilderFactory.get("step1")
.<Person, Person> chunk(10)
.reader(reader)
.processor(processor)
.writer(writer)
.build();
}
// end::jobstep[]
#Bean
public MongoDbFactory mongoDbFactory() throws Exception {
return new SimpleMongoDbFactory(new MongoClient(), "db-name");
}
#Bean
public MongoTemplate mongoTemplate() throws Exception {
MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory());
return mongoTemplate;
}
}

Related

Consider defining a bean of type 'io.ipl.amaresh.data.JobCompletionNotificationListener' in your configuration. The bean could not be found in that

My Config class
package io.ipl.amaresh.ipldashboard.data;
import javax.sql.DataSource;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider;
import org.springframework.batch.item.database.JdbcBatchItemWriter;
import org.springframework.batch.item.database.builder.JdbcBatchItemWriterBuilder;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import io.ipl.amaresh.data.JobCompletionNotificationListener;
import io.ipl.amaresh.ipldashboard.model.Match;
#Configuration
#EnableBatchProcessing
public class BatchConfig {
private final String[] FIELD_NAMES = new String[] { "id", "city", "date", "player_of_match", "venue",
"neutral_venue", "team1", "team2", "toss_winner", "toss_decision", "winner", "result", "result_margin",
"eliminator", "method", "umpire1", "umpire2" };
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Bean
public FlatFileItemReader<MatchInput> reader() {
return new FlatFileItemReaderBuilder<MatchInput>().name("MatchItemReader")
.resource(new ClassPathResource("match-data.csv")).delimited().names(FIELD_NAMES)
.fieldSetMapper(new BeanWrapperFieldSetMapper<MatchInput>() {
{
setTargetType(MatchInput.class);
}
}).build();
}
#Bean
public MatchDataProcessor processor() {
return new MatchDataProcessor();
}
#Bean
public JdbcBatchItemWriter<Match> writer(DataSource dataSource) {
return new JdbcBatchItemWriterBuilder<Match>()
.itemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<>())
.sql("INSERT INTO match (id, city, date, player_of_match, venue, team1, team2, toss_winner, toss_decision, match_winner, result, result_margin, umpire1, umpire2) "
+ " VALUES (:id, :city, :date, :playerOfMatch, :venue, :team1, :team2, :tossWinner, :tossDecision, :matchWinner, :result, :resultMargin, :umpire1, :umpire2)")
.dataSource(dataSource).build();
}
#Bean
public Job importUserJob(JobCompletionNotificationListener listener, Step step1) {
return jobBuilderFactory
.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(step1)
.end()
.build();
}
#Bean
public Step step1(JdbcBatchItemWriter<Match> writer) {
return stepBuilderFactory
.get("step1")
.<MatchInput, Match>chunk(10)
.reader(reader())
.processor(processor())
.writer(writer)
.build();
}
}
My Listener class
package io.ipl.amaresh.data;
import java.util.HashMap;
import java.util.Map;
import javax.persistence.EntityManager;
import javax.transaction.Transactional;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.BatchStatus;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.listener.JobExecutionListenerSupport;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Component;
import io.ipl.amaresh.ipldashboard.model.Team;
#Component
public class JobCompletionNotificationListener extends JobExecutionListenerSupport {
private static final Logger log = LoggerFactory.getLogger(JobCompletionNotificationListener.class);
private final EntityManager em;
#Autowired
public JobCompletionNotificationListener(EntityManager em) {
this.em = em;
}
#Override
#Transactional
public void afterJob(JobExecution jobExecution) {
if(jobExecution.getStatus() == BatchStatus.COMPLETED) {
log.info("!!! JOB FINISHED! Time to verify the results");
Map<String, Team> teamData = new HashMap<>();
em.createQuery("select m.team1, count(*) from Match m group by m.team1", Object[].class)
.getResultList()
.stream()
.map(e -> new Team((String) e[0], (long) e[1]))
.forEach(team -> teamData.put(team.getTeamName(), team));
em.createQuery("select m.team2, count(*) from Match m group by m.team2", Object[].class)
.getResultList()
.stream()
.forEach(e -> {
Team team = teamData.get((String) e[0]);
team.setTotalMatches(team.getTotalMatches() + (long) e[1]);
});
em.createQuery("select m.matchWinner, count(*) from Match m group by m.matchWinner", Object[].class)
.getResultList()
.stream()
.forEach(e -> {
Team team = teamData.get((String) e[0]);
if (team != null) team.setTotalWins((long) e[1]);
});
teamData.values().forEach(team -> em.persist(team));
teamData.values().forEach(team -> System.out.println(team));
}
}
}
Description:
Parameter 0 of method importUserJob in io.ipl.amaresh.ipldashboard.data.BatchConfig required a bean of type 'io.ipl.amaresh.data.JobCompletionNotificationListener' that could not be found.
Action:
Consider defining a bean of type 'io.ipl.amaresh.data.JobCompletionNotificationListener' in your configuration.
This is caused by the fact that you declare in your config that you need a JobCompletionNotificationListener in following lines:
#Bean
public Job importUserJob(JobCompletionNotificationListener listener, ...)
But you never declare how to build/retrieve such a component in your config.
Depending on your usage of Spring, you need to include JobCompletionNotificationListener in the context: explicit #Bean, package scanning..
EDIT: Notice that auto detection won't work here as your BatchConfig is in a subpackage compare to your #Component bean. AFAIK Spring auto detection only looks in current package.

Spring Boot 2 Transactional Annotation Not Working

Question regarding our Spring Boot 2.3.8.RELEASE implementation of #Transactional. The requirement is to implement distributed transactions that writes to an instance of PostgreSQL and Artemis queues. If one commit fails, then so should the other. We are using Atomikos for our JTA Transaction Manager.
I think I have implemented everything I need, but clearly not. When I throw an Exception in my service code to test the rollback functionality, it clearly does not work: The message is written to Artemis even after I throw an exception in the service code.
Any help with diagnosing and fixing would be much appreciated. If any additional details are required, please let me know.
Please find the details of the implementation below:
Spring Boot Application:
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.jdbc.DataSourceTransactionManagerAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.JmsAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.activemq.ActiveMQAutoConfiguration;
import org.springframework.boot.autoconfigure.jms.artemis.ArtemisAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import org.springframework.context.ConfigurableApplicationContext;
import xxx.xxx.Users;
import xxx.xxx.TransactionServiceImpl;
#SpringBootApplication
(
exclude = {
DataSourceAutoConfiguration.class,
HibernateJpaAutoConfiguration.class,
DataSourceTransactionManagerAutoConfiguration.class,
JmsAutoConfiguration.class,
ActiveMQAutoConfiguration.class,
ArtemisAutoConfiguration.class
}
)
public class BApplication implements CommandLineRunner
{
public static void main(String[] args) throws Exception
{
// SpringApplication.run(BoilerplateApplication.class, args);
ConfigurableApplicationContext ctx = SpringApplication.run(BApplication.class, args);
System.in.read();
ctx.close();
}
#Autowired
TransactionServiceImpl tsi;
#Override
public void run(String... args) throws Exception
{
Users user = new Users();
user.setFirstName("Moe");
user.setGender("M");
user.setLastName("Moe");
tsi.save(user);
}
}
Here is the JTA Configuration:
JTA Configuration
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.sql.DataSource;
import javax.transaction.SystemException;
import javax.transaction.UserTransaction;
import java.util.Properties;
import javax.annotation.PostConstruct;
import org.springframework.context.annotation.Bean;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.context.annotation.Primary;
import org.springframework.context.annotation.DependsOn;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Configuration;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import com.atomikos.icatch.config.UserTransactionService;
import com.atomikos.icatch.config.UserTransactionServiceImp;
import com.atomikos.icatch.jta.UserTransactionImp;
import com.atomikos.icatch.jta.UserTransactionManager;
import com.atomikos.jdbc.AtomikosDataSourceBean;
import com.atomikos.jms.AtomikosConnectionFactoryBean;
import org.apache.activemq.artemis.jms.client.ActiveMQConnectionFactory;
import org.postgresql.xa.PGXADataSource;
#Configuration("jtaConfig")
public class JtaConfig
{
private static final Logger appLogger = LoggerFactory.getLogger(JtaConfig.class);
#Value("${amq.broker.url}")
private String brokerUrl;
#Value("${amq.broker.username}")
private String brokerUsername;
#Value("${amq.broker.password}")
private String brokerPassword;
#Value("${postgresql.datasource.url}")
String dataSourceUrl;
#Value("${postgresql.datasource.username}")
String dsUsername;
#Value("${postgresql.datasource.password}")
String dsPassword;
#Value("${postgresql.datasource.driver.classname}")
String dsClassName;
#Value("${postgresql.initial.connections}")
int initialDSConnections;
#Value("${postgresql.max.connections}")
int maxDSConnections;
#Bean(initMethod = "init", destroyMethod = "shutdownForce")
public UserTransactionService userTransactionService()
{
Properties atProps = new Properties();
atProps.put("com.atomikos.icatch.service", "com.atomikos.icatch.standalone.UserTransactionServiceFactory");
return new UserTransactionServiceImp(atProps);
}
#Bean (initMethod = "init", destroyMethod = "close")
#DependsOn("userTransactionService")
public UserTransactionManager atomikosTransactionManager()
{
UserTransactionManager utm = new UserTransactionManager();
utm.setStartupTransactionService(false);
utm.setForceShutdown(true);
return utm;
}
#Bean
#DependsOn("userTransactionService")
public UserTransaction userTransaction()
{
UserTransactionImp ut = new UserTransactionImp();
try
{
ut.setTransactionTimeout(1000);
}
catch (SystemException _e)
{
appLogger.error("Configuration exception.", _e);
return null;
}
return ut;
}
#Bean
public Properties hibernateProperties()
{
Properties hibernateProp = new Properties();
hibernateProp.put("hibernate.dialect", "org.hibernate.dialect.PostgreSQLDialect");
hibernateProp.put("hibernate.hbm2ddl.auto", "create-drop");
hibernateProp.put("hibernate.show_sql", true);
hibernateProp.put("hibernate.max_fetch_depth", 3);
hibernateProp.put("hibernate.jdbc.batch_size", 10);
hibernateProp.put("hibernate.jdbc.fetch_size", 50);
return hibernateProp;
}
#Bean
public JpaVendorAdapter jpaVendorAdapter()
{
return new HibernateJpaVendorAdapter();
}
#Primary
#Bean(name = "pgDataSource1", initMethod = "init", destroyMethod = "close")
public DataSource pgDataSource1()
{
PGXADataSource primaryXaDataSource = new PGXADataSource();
primaryXaDataSource.setUrl(dataSourceUrl);
primaryXaDataSource.setUser(dsUsername);
primaryXaDataSource.setPassword(dsPassword);
AtomikosDataSourceBean xaDataSource = new AtomikosDataSourceBean();
xaDataSource.setXaDataSource(primaryXaDataSource);
xaDataSource.setUniqueResourceName("primaryXaDs1");
xaDataSource.setMinPoolSize(initialDSConnections);
xaDataSource.setMaxPoolSize(maxDSConnections);
return xaDataSource;
}
#Primary
#Bean(name = "jmsConnectionFactory", initMethod = "init", destroyMethod = "close")
public ConnectionFactory connectionFactory()
{
AtomikosConnectionFactoryBean atomikosConnectionFactoryBean = new AtomikosConnectionFactoryBean();
ActiveMQConnectionFactory activeMqXaConnectionFactory = new ActiveMQConnectionFactory();
try
{
activeMqXaConnectionFactory.setBrokerURL(brokerUrl);
activeMqXaConnectionFactory.setUser(brokerUsername);
activeMqXaConnectionFactory.setPassword(brokerPassword);
atomikosConnectionFactoryBean.setUniqueResourceName("jmsXAConnectionFactory");
atomikosConnectionFactoryBean.setLocalTransactionMode(false);
atomikosConnectionFactoryBean.setXaConnectionFactory(activeMqXaConnectionFactory);
}
catch (JMSException _e)
{
appLogger.info("JMS Configuration Error: " + _e);
_e.printStackTrace();
}
return atomikosConnectionFactoryBean;
}
#PostConstruct
public void postConstructDetails()
{
appLogger.info("Post Construct Start: JtaConfig.");
appLogger.info(" - JMS: Artemis URL: {}", brokerUrl);
appLogger.info(" - Artemis Username: {}", brokerUsername);
appLogger.info(" - Artemis Password: {}", brokerPassword);
appLogger.info(" - DS: PostgreSQL URL: {}", dataSourceUrl);
appLogger.info(" - DS: PostgreSQL Username: {}", dsUsername);
appLogger.info(" - DS: PostgreSQL Password: {}", dsPassword);
appLogger.info(" - DS: PostgreSQL Min Conn: {}", initialDSConnections);
appLogger.info(" - DS: PostgreSQL Max Conn: {}", maxDSConnections);
appLogger.info("Post Construct End: JtaConfig.");
appLogger.info(" ");
}
}
Here is the implementation for Services Configuration:
Services Configuration:
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.transaction.jta.JtaTransactionManager;
import org.springframework.transaction.PlatformTransactionManager;
import org.springframework.transaction.annotation.EnableTransactionManagement;
import javax.annotation.PostConstruct;
import javax.persistence.EntityManagerFactory;
#Configuration
#EnableTransactionManagement
#ComponentScan(basePackages = "xxx.xxx.service")
public class ServicesConfig
{
private Logger appLogger = LoggerFactory.getLogger(ServicesConfig.class);
#Autowired
JtaConfig jtaConfig;
#Bean(name = "xaJmsTemplate")
public JmsTemplate jmsTemplate()
{
JmsTemplate jmsTemplate = new JmsTemplate();
jmsTemplate.setConnectionFactory(jtaConfig.connectionFactory());
jmsTemplate.setPubSubDomain(false);
return jmsTemplate;
}
#Bean(name = "entityManangerFactory")
public EntityManagerFactory entityManagerFactory()
{
LocalContainerEntityManagerFactoryBean factoryBean = new LocalContainerEntityManagerFactoryBean();
factoryBean.setPackagesToScan("xxx.xxx.model");
factoryBean.setDataSource(jtaConfig.pgDataSource1());
factoryBean.setJpaProperties(jtaConfig.hibernateProperties());
factoryBean.setPersistenceUnitName("entityManagerFactoryA");
factoryBean.setJpaVendorAdapter(jtaConfig.jpaVendorAdapter());
factoryBean.afterPropertiesSet();
return factoryBean.getNativeEntityManagerFactory();
}
#Bean(name = "transactionManager")
public PlatformTransactionManager transactionManager()
{
JtaTransactionManager ptm = new JtaTransactionManager();
ptm.setTransactionManager(jtaConfig.atomikosTransactionManager());
ptm.setUserTransaction(jtaConfig.userTransaction());
return ptm;
}
#PostConstruct
public void postConstructDetails()
{
appLogger.info("Post Construct Start: ServicesConfig.");
appLogger.info(" - JMS: Artemis URL: {}", jtaConfig);
appLogger.info(" - JMS Template: {}", jmsTemplate());
appLogger.info("Post Construct End: ServicesConfig.");
appLogger.info(" ");
}
}
Here is the Service implementation:
TransactionServiceImpl
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import xxx.xxx.Users;
#Service("transactionService")
#Transactional
public class TransactionServiceImpl implements TransactionServiceIntf
{
private static final Logger appLogger = LoggerFactory.getLogger(TransactionServiceImpl.class);
#Autowired
#Qualifier("xaJmsTemplate")
JmsTemplate jmsTemplate;
#Override
public Users save(Users _user)
{
appLogger.info("TransactionServiceImpl: save: Entered.");
Users user = _user;
try
{
if(user == null)
{
appLogger.info("User: Null.");
}
else
{
if(jmsTemplate == null)
{
appLogger.info("JMS Template: Null.");
}
else
{
appLogger.info("JMS Template: Saving.");
jmsTemplate.convertAndSend("crequests", user);
}
}
// The rollback should happen with the exception.
throw new Exception();
}
catch(Exception _e)
{
appLogger.error("Catching exception: " + _e);
}
appLogger.info("TransactionServiceImpl: save: Exiting.");
return user;
}
}

Spring-Batch: Testing custom itemReader

I am trying to test my custom itemReader :
#Bean
#StepScope
MyMultiLineItemReader itemReader(#Value("#{stepExecutionContext['fileName']}") String filename) throws MalformedURLException {
MyMultiLineItemReader itemReader = new MyMultiLineItemReader();
itemReader.setDelegate(myFlatFileItemReader(filename));
return itemReader;
}
#Bean
#StepScope
public FlatFileItemReader<String> myFlatFileItemReader(#Value("#{stepExecutionContext['fileName']}") String filename) throws MalformedURLException {
return new FlatFileItemReaderBuilder<String>()
.name("myFlatFileItemReader")
.resource(new UrlResource(filename))
.lineMapper(new PassThroughLineMapper())
.build();
}
my test class looks like
#Test
public void givenMockedStep_whenReaderCalled_thenSuccess() throws Exception {
// given
JobExecution jobExecution = new JobExecution(5l);
ExecutionContext ctx = new ExecutionContext();
ctx.put("fileName", "src/main/resources/data/input.txt");
jobExecution.setExecutionContext(ctx);
JobSynchronizationManager.register(jobExecution);
StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(ctx);
// when
StepScopeTestUtils.doInStepScope(stepExecution, () -> {
...
});
}
When I run the test case, the process fails because the fileName parameter is null.
I am looking for the right way of testing the itemReader.
Thanks
You don't need to create a JobExecution and register it in a JobSynchronizationManager to test a step-scoped component. Mocking a step execution and using it in StepScopeTestUtils.doInStepScope is enough. Here is a complete example:
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.PassThroughLineMapper;
import org.springframework.batch.test.MetaDataInstanceFactory;
import org.springframework.batch.test.StepScopeTestUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
#RunWith(SpringRunner.class)
#ContextConfiguration(classes = StepScopedComponentTest.MyConfiguration.class)
public class StepScopedComponentTest {
#Autowired
private FlatFileItemReader<String> reader;
#Test
public void givenMockedStep_whenReaderCalled_thenSuccess() throws Exception {
// given
ExecutionContext ctx = new ExecutionContext();
ctx.put("fileName", "data/input.txt");
StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(ctx);
// when
List<String> items = StepScopeTestUtils.doInStepScope(stepExecution, () -> {
List<String> result = new ArrayList<>();
String item;
reader.open(stepExecution.getExecutionContext());
while ((item = reader.read()) != null) {
result.add(item);
}
reader.close();
return result;
});
// then
Assert.assertEquals(2, items.size());
Assert.assertEquals("foo", items.get(0));
Assert.assertEquals("bar", items.get(1));
}
#Configuration
#EnableBatchProcessing
static class MyConfiguration {
#Bean
#StepScope
public FlatFileItemReader<String> myFlatFileItemReader(#Value("#{stepExecutionContext['fileName']}") String filename) {
return new FlatFileItemReaderBuilder<String>()
.name("myFlatFileItemReader")
.resource(new ClassPathResource(filename))
.lineMapper(new PassThroughLineMapper())
.build();
}
}
}
This test passes assuming the class path resource data/input.txt contains two lines foo and bar.

Spring Batch - Pass parameter from Writer to Listener for afterJob(JobExecution jobExecution)

I have a writer from which I have a dynamic value that needs to be passed to JobExecutionListener's afterJob(JobExecution jobExecution). Would appreciate some advice. Thank you.
<beans:bean>
<job id="GoodJob">
<step id="XXX"
allow-start-if-complete="true"
parent="XXX">
<tasklet transaction-manager="XXX">
<chunk reader="READER"
writer="WRITER"
commit-interval="100"/>
</tasklet>
</step>
<listener>
<beans:bean class="class that implements JobExecutionListener">
<beans:constructor-arg name="value"
value="DEFAULT IS FALSE DURING INITIALIZATION/MIGHT GET CHANGED IN WRITER, GET THIS VALUE FROM WRITER"/>
</beans:bean>
</listener>
</job>
</beans:beans>
You can pass data between these two components through the job execution context. This is detailed in the Passing Data to Future Steps section. Here is a quick example:
import java.util.Arrays;
import java.util.List;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecution;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.JobLauncher;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.support.ListItemReader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.context.annotation.AnnotationConfigApplicationContext;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
#Configuration
#EnableBatchProcessing
public class MyJob {
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory steps;
#Bean
public ItemReader<Integer> itemReader() {
return new ListItemReader<>(Arrays.asList(1, 2, 3, 4));
}
#Bean
public ItemWriter<Integer> itemWriter() {
return new ItemWriter<Integer>() {
private StepExecution stepExecution;
#Override
public void write(List<? extends Integer> items) throws Exception {
for (Integer item : items) {
System.out.println("item = " + item);
}
stepExecution.getJobExecution().getExecutionContext().put("data", "foo");
}
#BeforeStep
public void saveStepExecution(StepExecution stepExecution) {
this.stepExecution = stepExecution;
}
};
}
#Bean
public Step step() {
return steps.get("step")
.<Integer, Integer>chunk(2)
.reader(itemReader())
.writer(itemWriter())
.build();
}
#Bean
public Job job() {
return jobs.get("job")
.start(step())
.listener(new JobExecutionListener() {
#Override
public void beforeJob(JobExecution jobExecution) {
}
#Override
public void afterJob(JobExecution jobExecution) {
ExecutionContext executionContext = jobExecution.getExecutionContext();
String data = executionContext.getString("data");
System.out.println("data from writer = " + data);
}
})
.build();
}
public static void main(String[] args) throws Exception {
ApplicationContext context = new AnnotationConfigApplicationContext(MyJob.class);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
jobLauncher.run(job, new JobParameters());
}
}
In this example, the writer writes the key data with value foo in the job execution context. Then, this key is retrieved from the execution context in the job listener.
Hope this helps.
I will add new information to what Mahmoud said.
Now you have to add one extra step - promote ExecutionContext from Step to Job context if you want to get your data in JobListener.
#Bean
public ExecutionContextPromotionListener promotionListener() {
ExecutionContextPromotionListener listener = new ExecutionContextPromotionListener();
listener.setKeys(new String[] {"data"});
return listener;
}
Than register it in your step.
#Bean
public Step step() {
return steps.get("step")
.<Integer, Integer>chunk(2)
.reader(itemReader())
.writer(itemWriter())
.listener(promotionListener())
.build();
}

spring-kafka how to run consumer in background

I found a tutorial on spring-kafka where they created a producer and consumer. However, the program was run through a test case. As the test case ends, the consumer stops.
How to ensure the consumer keeps running in the backgorund so that I can test some messages from my terminal command line.
SpringKafkaExampleApplication.java
package com.howtoprogram.kafka;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
public class SpringKafkaExampleApplication {
public static void main(String[] args) {
SpringApplication.run(SpringKafkaExampleApplication.class,
args);
}
}
KafkaProducerConfig.java
package com.howtoprogram.kafka;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
#Configuration
#EnableKafka
public class KafkaProducerConfig {
#Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
"localhost:9092");
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);
return props;
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<String, String>(producerFactory());
}
}
KafkaConsumerConfig.java
package com.howtoprogram.kafka;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
#Configuration
#EnableKafka
public class KafkaConsumerConfig {
#Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(3);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return propsMap;
}
#Bean
public Listener listener() {
return new Listener();
}
}
Listener.java
package com.howtoprogram.kafka;
import java.util.concurrent.CountDownLatch;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
public class Listener {
public final CountDownLatch countDownLatch1 = new CountDownLatch(1);
#KafkaListener(id = "foo", topics = "topic1", group = "group1")
public void listen(ConsumerRecord<?, ?> record) {
System.out.println(record);
countDownLatch1.countDown();
}
}
SpringKafkaExampleApplicationTests.java
package com.howtoprogram.kafka;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.concurrent.TimeUnit;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
#RunWith(SpringRunner.class)
#SpringBootTest
public class SpringKafkaExampleApplicationTests {
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#Autowired
private Listener listener;
#Test
public void contextLoads() throws InterruptedException {
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send("topic1", "ABC");
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
#Override
public void onSuccess(SendResult<String, String> result) {
System.out.println("success");
}
#Override
public void onFailure(Throwable ex) {
System.out.println("failed");
}
});
System.out.println(Thread.currentThread().getId());
assertThat(this.listener.countDownLatch1.await(60, TimeUnit.SECONDS)).isTrue();
}
}
Please help!
We run our Kafka application in a while(true) loop with an #Scheduled on a Spring Bean : https://docs.spring.io/spring/docs/current/spring-framework-reference/html/scheduling.html
This way you can also delay the consumption of messages when the rest of your application is initialising.
#Scheduled(initialDelay = 5000L, fixedDelay = 10000L)
public void process() {
while (keepRunning) {
try {
ConsumerRecords<String, String> records = consumer.poll(500);
// do processing here
}
}
}
The fixedDelay is a bit strange. This value needs to be available, but is effectively ignored.
It might be tempting to start the consumer in the #PostConstruct but this way Spring keeps thinking the bean is in the init-phase. (so don't do this as Artem Bilan mentions below)
Add this code to your main after SpringApplication.run():
System.out.println("Hit 'Enter' to terminate");
System.in.read();
ctx.close();
System.exit(0);
And your program won't exit until you hit the Enter button in console.

Categories