My ThreadPoolTaskExecutor's corePoolSize is 5 and in JdbcPaginingItemReader bean config I have set saveState to false (per documentation it should be set to false if used in a multi-threaded env) and my table have a primary key, which I am using in sortKey attribute of queryProvider yet when task-executor spawns all five threads simultaneously they all are trying to read the data, startAfterValues of JdbcPaginingItemReader is getting messed up. Reader call from each thread is reading the duplicate rows due to the fact that startAfterValues is not thread safe.
How do I overcome this?
Here are my config info.
<job id="myJob" xmlns="http://www.springframework.org/schema/batch" incrementer="jobIncrementer">
<step id="step1">
<tasklet task-executor="myTaskExecutor">
<chunk reader="myReader" writer="myWriter" commit-interval="1000" />
<transaction-attributes isolation="READ_COMMITTED" />
</tasklet>
</step>
</job>
<bean id="myReader" class="org.springframework.batch.item.database.JdbcPagingItemReader" scope="step">
<property name="dataSource" ref="myDataSource" />
<property name="queryProvider">
<bean class="org.springframework.batch.item.database.support.SqlPagingQueryProviderFactoryBean">
<property name="dataSource" ref="myDataSource"/>
<property name="selectClause" value="SELECT ID, NAME"/>
<property name="fromClause" value="FROM EMPLOYEE" />
<property name="whereClause" value="where 1=1" />
<property name="sortKey" value="ID"/>
</bean>
</property>
<property name="pageSize" value="1000"/>
<property name="fetchSize" value="1000"/>
<property name="saveState" value="false"/>
<property name="rowMapper">
<bean class="com...MyRowMapper"/>
</property>
</bean>
<bean id="myWriter" class="com...MyItemWriter" scope="step">
<constructor-arg name="jdbcTemplate" ref="jdbcTemplate" />
<constructor-arg name="namedParamJdbcTemplate" ref="namedParamJdbcTemplate" />
<constructor-arg name="endUserID" value="123" />
</bean>
<bean id="myTaskExecutor" class="org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor">
<property name="corePoolSize" value="5" />
<property name="maxPoolSize" value="10"/>
</bean>
Related
I have defined a queue - blah.queue and have defined the dead-letter-routing-key for it. However, when I have a poison message which fails with a Exception, the same message is requeued and reattempted (in an infinite loop).
I would expect that after 3 retries, the message should be put on the exchange with the dead-letter-routing-key. But that doesn't seem to be happening.
I have the following settings:
<rabbit:queue name="blah.queue" auto-delete="false" durable="true">
<rabbit:queue-arguments>
<entry key="x-dead-letter-exchange" value="" />
<entry key="x-dead-letter-routing-key" value="blah.queue.dlq.route" />
<entry key="x-ha-policy" value="all" />
</rabbit:queue-arguments>
</rabbit:queue>
<rabbit:direct-exchange name="${rabbit.idesk.exchange}">
<rabbit:bindings>
<rabbit:binding queue="blah.queue" key="blah.route" />
</rabbit:bindings>
</rabbit:direct-exchange>
<bean id="myConsumer" class="com.ankit.CustomConsumer" />
<bean id="myConsumerMessageListenerAdapter" class="org.springframework.amqp.rabbit.listener.adapter.MessageListenerAdapter">
<constructor-arg ref="myConsumer" />
<constructor-arg ref="myMessageConverter" />
</bean>
<bean id="myConsumerMessageListenerContainer" class="org.springframework.amqp.rabbit.listener.SimpleMessageListenerContainer">
<property name="queueNames" value="blah.queue" />
<property name="connectionFactory" ref="queueConnectionFactory" />
<property name="messageListener" ref="myConsumerMessageListenerAdapter" />
<property name="errorHandler" ref="loggingErrorHandler" />
<property name="adviceChain">
<list>
<ref bean="retryAdvice" />
</list>
</property>
</bean>
<bean id="loggingErrorHandler" class="org.springframework.scheduling.support.TaskUtils.LoggingErrorHandler" />
<bean id="myMessageConverter"
class="org.springframework.amqp.support.converter.JsonMessageConverter">
<property name="classMapper">
<bean class="com.ankit.queue.mapper.NamedClassMapper">
<constructor-arg
value="com.ankit.dto.EventDTO" />
</bean>
</property>
<property name="createMessageIds" value="true" />
</bean>
<bean id="retryAdvice" class="org.springframework.amqp.rabbit.config.StatefulRetryOperationsInterceptorFactoryBean">
<property name="messageRecoverer" ref="rejectAndDontRequeueRecoverer" />
<property name="retryOperations" ref="retryTemplate" />
</bean>
<bean id="retryTemplate" class="org.springframework.retry.support.RetryTemplate">
<property name="retryPolicy" ref="simpleRetryPolicy" />
<property name="backOffPolicy">
<bean class="org.springframework.retry.backoff.FixedBackOffPolicy">
<property name="backOffPeriod" value="5000" />
</bean>
</property>
</bean>
<bean id="simpleRetryPolicy" class="org.springframework.retry.policy.SimpleRetryPolicy">
<property name="maxAttempts" value="3" />
</bean>
Do you not see the WARN message from the recoverer?
#Override
public void recover(Message message, Throwable cause) {
if (this.logger.isWarnEnabled()) {
this.logger.warn("Retries exhausted for message " + message, cause);
}
throw new ListenerExecutionFailedException("Retry Policy Exhausted",
new AmqpRejectAndDontRequeueException(cause), message);
}
Turn on DEBUG logging to watch the retry behavior.
I am trying to read rows from CSV files and persist into database. I am using MultiResourceItemReader delegated with FlatFileItemReader and JdbcBatchItemWriter for the read/persist operations. I configured the commit-interval of 50(for example)and skip policy.
I am using spring-batch-3.0.8, oracle database.
To be simple to understand,In the CSV file, I have 2 rows and commit-interval is 2.
Here, ROLLNO-201 is the record already present in DB.
Observation:
1.If the 1st row is duplicate of the record present in DB, and 2nd row is new record. I see the new record is inserted into DB, skipping the 1st row as it is duplicate.[working fine as expected].
ROLLNO NAME CLASS CITY
201 JOHN 4 MADISON
202 STEPHEN 5 MADISON
2.If the 1st row is new record, and 2nd row is duplicate of the record present in DB. I see the new record is not inserted into DB.[Issue].
ROLLNO NAME CLASS CITY
202 STEPHEN 5 MADISON
201 JOHN 4 MADISON
I see the transaction rollback is based on the commit-interval's records and not based on chunkSize. If commit-interval is 10, then the 10th record should not be a duplicate for transaction to commit.
Can anyone help me on this as I'm clueless here?
<bean id="jobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean">
<property name="transactionManager" ref="transactionManager" />
<property name="dataSource" ref="cisDataSource" />
<property name="lobHandler" ref="lobHandler" />
<property name="isolationLevelForCreate" value="ISOLATION_READ_COMMITTED" />
</bean>
<bean id="transactionManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager">
<property name="dataSource" ref="cisDataSource" />
</bean>
<bean id="cisDataSource" class="org.apache.commons.dbcp.BasicDataSource">
<property name="initialSize" value="1" />
<property name="maxActive" value="${db.connection.pool.size}" />
<property name="driverClassName" value="oracle.jdbc.driver.OracleDriver" />
<property name="url" value="${cisdb.connection.string}" />
<property name="username" value="${cisdb.username}" />
<property name="password" value="${cisdb.password}" />
</bean>
<batch:job id="mdtJob1">
<batch:step id="mdtJob1Step1">
<batch:tasklet ref="fileTransferToProcessingFolderTasklet" />
<batch:next on="COMPLETED" to="mdtJob1Step2" />
</batch:step>
<batch:step id="mdtJob1Step2">
<batch:tasklet>
<batch:chunk reader="multiResourceReader" writer="naxAddressSqlItemWriter"
commit-interval="5">
<batch:skip-policy>
<bean class="org.springframework.batch.core.step.skip.AlwaysSkipItemSkipPolicy" scope="step"/>
</batch:skip-policy>
<batch:retry-policy>
<bean class="org.springframework.retry.policy.NeverRetryPolicy" scope="step"/>
</batch:retry-policy>
</batch:chunk>
<batch:no-rollback-exception-classes>
<batch:include class="java.sql.SQLException"/>
<batch:include class="org.springframework.dao.DuplicateKeyException"/>
<batch:include class="java.sql.SQLIntegrityConstraintViolationException"/>
</batch:no-rollback-exception-classes>
</batch:tasklet>
<batch:next on="COMPLETED" to="mdtJob1Step3" />
</batch:step>
<batch:step id="mdtJob1Step3">
<batch:tasklet ref="fileTransferToArchiveFolderTasklet" />
</batch:step>
</batch:job>
<bean id="multiResourceReader"
class="org.springframework.batch.item.file.MultiResourceItemReader"
scope="step">
<property name="delegate" ref="flatFileItemReader" />
<property name="resources" value="${batch.processing.files}" />
</bean>
<bean id="flatFileItemReader" class="org.springframework.batch.item.file.FlatFileItemReader">
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<property name="lineTokenizer">
<bean
class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<property name="names"
value="${csv.fields.in.order}" />
</bean>
</property>
<property name="fieldSetMapper">
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="addressDto" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="naxAddressSqlItemWriter"
class="org.springframework.batch.item.database.JdbcBatchItemWriter">
<property name="dataSource" ref="cdmDataSource" />
<property name="sql" value="${nax.address.insertion.query}" />
<property name="itemSqlParameterSourceProvider">
<bean class="org.springframework.batch.item.database.BeanPropertyItemSqlParameterSourceProvider" />
</property>
</bean>
I hope you would have got the solution by now. Just In case if you haven't
This solution had worked for me.
If you try to make commit-interval = 1, this will make sure one row will get inserted at once and duplicate row would not have new entry into DB.
For example:
chunk(1)
I'm trying to integrate Atomikos transaction manager into a Spring Integration program that forwards JMS from ActiveMQ to a WebMethods ESB.
The spring integration part only retrieves JMs from local ActiveMQ broker and sends them to a distant ESB broker.
When I test the nominal case, JMS is sent well and passes through the ESB and is dispatched to the subscribers then.
When I test the case where ESB sending fails, I have an issue : the JMS is never published back. I suppose it's a transaction issue because the transaction should have been rolled back when program tried to publish on ESB broker but it seems not.
Here's my spring config :
<bean id="transactionManager"
class="org.springframework.transaction.jta.JtaTransactionManager">
<property name="transactionManager" ref="AtomikosTransactionManager" />
<property name="userTransaction" ref="AtomikosUserTransaction" />
</bean>
<!-- Atomikos Transaction Manager Defintion (JTA) -->
<bean id="AtomikosTransactionManager" class="com.atomikos.icatch.jta.UserTransactionManager"
init-method="init" destroy-method="close" depends-on="atomikosConnectionFactorySource,connectionFactoryDestination">
<property name="transactionTimeout" value="300" />
<property name="forceShutdown" value="false" />
</bean>
<bean id="AtomikosUserTransaction" class="com.atomikos.icatch.jta.UserTransactionImp">
<property name="transactionTimeout" value="300" />
</bean>
<bean id="jmsXaConnectionFactory" class="org.apache.activemq.ActiveMQXAConnectionFactory">
<property name="brokerURL" value="${source.java.naming.provider.url}" />
<property name="userName" value="${source.username}" />
<property name="password" value="${source.passwd}" />
</bean>
<bean id="atomikosConnectionFactorySource" class="com.atomikos.jms.AtomikosConnectionFactoryBean"
init-method="init" destroy-method="close">
<property name="poolSize" value="1" />
<property name="uniqueResourceName" value="activemq" />
<property name="xaConnectionFactory" ref="jmsXaConnectionFactory" />
</bean>
<bean id="connectionFactorySource"
class="org.springframework.jms.connection.SingleConnectionFactory">
<property name="targetConnectionFactory" ref="jmsXaConnectionFactory" />
<property name="clientId" value="CustomerOrderForwarderID" />
<property name="reconnectOnException" value="true" />
</bean>
<!-- Destination JNDI Context -->
<bean id="jndiTemplateDestination" class="org.springframework.jndi.JndiTemplate"
lazy-init="true">
<property name="environment">
<props>
<prop key="java.naming.factory.initial">${destination.java.naming.factory.initial}</prop>
<prop key="java.naming.provider.url">${destination.java.naming.provider.url}</prop>
<prop key="java.naming.factory.url.pkgs">${destination.java.naming.factory.url.pkgs}</prop>
</props>
</property>
</bean>
<!-- Destination Connection factory -->
<bean id="customerOrderXAConnectionFactoryDestination" class="org.springframework.jndi.JndiObjectFactoryBean"
lazy-init="true">
<property name="jndiTemplate" ref="jndiTemplateDestination" />
<property name="jndiName"
value="${destination.java.naming.factory.connection}" />
<property name="lookupOnStartup" value="false" />
<property name="proxyInterface" value="javax.jms.XAConnectionFactory" />
</bean>
<bean id="connectionFactoryDestination" class="com.atomikos.jms.AtomikosConnectionFactoryBean"
init-method="init" destroy-method="close">
<property name="poolSize" value="100" />
<property name="uniqueResourceName" value="esb" />
<property name="xaConnectionFactory" ref="customerOrderXAConnectionFactoryDestination" />
<property name="localTransactionMode" value="true" />
</bean>
<bean id="ddr" class="com.adeo.transverse.jms.forwarder.customerorder.DynamicDestinationResolver" />
<bean id="userCredentialsConnectionFactoryDestination"
class="org.springframework.jms.connection.UserCredentialsConnectionFactoryAdapter" lazy-init="true">
<property name="targetConnectionFactory">
<ref bean="connectionFactoryDestination" />
</property>
<property name="username" value="${destination.username}" />
<property name="password" value="${destination.passwd}" />
</bean>
Here's the integration part :
<!-- In bridge -->
<jms:message-driven-channel-adapter
id="StoreStockMotionSourceJmsAdapter" channel="bridgeChannelStoreStockMotionEnricher"
container="jmsContainerSourceStoreStockMotion" />
<!-- Channel -->
<si:channel id="bridgeChannelStoreStockMotionEnricher" />
<jms:outbound-channel-adapter id="StoreStockMotionDestinationJmsAdapter"
channel="bridgeChannelStoreStockMotionEnricher" jms-template="jmsTemplateStoreStockMotionDestination" />
<bean id="jmsTemplateStoreStockMotionDestination" class="org.springframework.jms.core.JmsTemplate">
<property name="transactionManager" ref ="transactionManager"/>
<property name="connectionFactory" ref="userCredentialsConnectionFactoryDestination" />
<property name="defaultDestinationName" value="${StoreStockMotion.destination.topic}" />
<property name="defaultDestination" ref="StoreStockMotionDestinationTopic" />
<property name="pubSubDomain" value="true"/>
</bean>
<!-- Topic JMS for published message -->
<bean id="StoreStockMotionDestinationTopic" class="org.springframework.jndi.JndiObjectFactoryBean" lazy-init="true">
<property name="jndiTemplate">
<ref bean="jndiTemplateDestination" />
</property>
<property name="jndiName">
<value>${StoreStockMotion.destination.topic}</value>
</property>
</bean>
<!-- Topic JMS for Subscribing Message -->
<bean id="jmsContainerSourceStoreStockMotion"
class="org.springframework.jms.listener.DefaultMessageListenerContainer"
lazy-init="true">
<property name="connectionFactory" ref="connectionFactorySource" />
<property name="destinationName" value="${StoreStockMotion.source.topic}" />
<property name="subscriptionDurable" value="true" />
<!-- 2 is client acknowledge -->
<property name="sessionAcknowledgeMode" value="2" />
<property name="durableSubscriptionName" value="${StoreStockMotion.source.subname}" />
<property name="sessionTransacted" value="false" />
<property name="pubSubDomain" value="true"/>
</bean>
Source and Destination are both encapsulated in XA connection factories and transactionManager handles the two transactions. Any idea what's missing ?
i'm trying to read a csv file and write data in 2 output csv file.
I am reading lines from csv file rows and i write thoses lines in two csv output files :
But I want to write product grouped by productName in each output files.
A small example to explain
INPUT FILE
TOSHIBA PC 2000
HP PC 1000
SUMSUNG TEL 500
Nokia TEL 300
OUTPUT FILE tel.csv
TEL 800 (500 + 300 ) the sum of product which have the same type.
OUTPUT FILE pc.csv
PC 3000 ( 2000 + 3000 ) the sum of product which have the same type.
I don't know how to perform that can someone help me ?
Here is my configuration :
<batch:job id="exampleMultiWritersJob">
<batch:step id="stepMultiWriters">
<batch:tasklet transaction-manager="txManager">
<batch:chunk reader="exampleFileSourceReader" writer="exampleMultiWriters" commit-interval="10">
<batch:streams>
<batch:stream ref="telWriter" />
<batch:stream ref="pcWriter" />
</batch:streams>
</batch:chunk>
</batch:tasklet>
</batch:step>
<bean id="exampleFileSourceReader" class="org.springframework.batch.item.file.FlatFileItemReader" scope="step">
<property name="resource" value="file:#{jobParameters['file']}" />
<property name="lineMapper">
<bean class="org.springframework.batch.item.file.mapping.DefaultLineMapper">
<!-- split it -->
<property name="lineTokenizer">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineTokenizer">
<!-- this is missing -->
<property name="delimiter" value=";"/>
<property name="names" value="name,productType,price" />
</bean>
</property>
<property name="fieldSetMapper">
<!-- map to an object -->
<bean class="org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper">
<property name="prototypeBeanName" value="exampleFileMapper" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="exampleFileMapper" class="ma.controle.gestion.modele.Product" scope="prototype"/>
<!-- This is to demo MultiWriters -->
<bean id="exampleMultiWriters" class="org.springframework.batch.item.support.ClassifierCompositeItemWriter" scope="step">
<property name="classifier" ref="classifier" />
</bean>
<bean id="classifier" class="org.springframework.batch.classify.BackToBackPatternClassifier">
<property name="routerDelegate">
<bean class="ma.controle.gestion.springbatch.ExampleWriterRouteImpl" />
</property>
<property name="matcherMap">
<map>
<entry key="pc" value-ref="pcWriter" />
<entry key="tel" value-ref="telWriter" />
</map>
</property>
</bean>
<bean id="pcWriter" class="org.springframework.batch.item.file.FlatFileItemWriter">
<!-- write to this csv file -->
<property name="resource" value="file:C:/output/pc.csv" />
<property name="shouldDeleteIfExists" value="true" />
<property name="shouldDeleteIfEmpty" value="true" />
<property name="appendAllowed" value="true" />
<property name="lineAggregator">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value=";" />
<property name="fieldExtractor">
<bean class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="productType,price" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="telWriter" class="org.springframework.batch.item.file.FlatFileItemWriter">
<!-- write to this csv file -->
<property name="resource" value="file:C:/output/tel.csv" />
<property name="shouldDeleteIfExists" value="true" />
<property name="shouldDeleteIfEmpty" value="true" />
<property name="appendAllowed" value="true" />
<property name="lineAggregator">
<bean class="org.springframework.batch.item.file.transform.DelimitedLineAggregator">
<property name="delimiter" value=";" />
<property name="fieldExtractor">
<bean class="org.springframework.batch.item.file.transform.BeanWrapperFieldExtractor">
<property name="names" value="productType,price" />
</bean>
</property>
</bean>
</property>
</bean>
<bean id="itemProcessor" class="ma.controle.gestion.springbatch.BatchItemProcessor" />
<!-- Optional JobExecutionListener to perform business logic before and after the job -->
<bean id="jobListener" class="ma.controle.gestion.springbatch.BatchJobItemListener" />
And here is the classifier
public class ExampleWriterRouteImpl {
#Classifier
public String classify(Product product){
if(product.getProductType().equals("TEL"))
return "tel";
else if(product.getProductType().equals("PC"))
return "pc";
return null;
}
}
hi actually springbatch jobrepository uses jdbctemplate for the CRUD operations of springbatch metadata
and my requirement is to replace jdbctemplate with jpa and do the crud operations...
the actual configuration of springbatch job repository is as follows...
can anyone kindly help regarding this issue...i searched alot but got nothing......
<property name="driverClassName" value="com.mysql.jdbc.Driver" />
<property name="url" value="jdbc:mysql://localhost:3307/test" />
<property name="username" value="root" />
<property name="password" value="root" />
</bean>
<!-- create job-meta tables automatically -->
<jdbc:initialize-database data-source="jobRepository-dataSource">
<jdbc:script location="classpath:/org/springframework/batch/core/schema-drop-mysql.sql" />
<jdbc:script location="classpath:/org/springframework/batch/core/schema-mysql.sql" />
</jdbc:initialize-database>
<bean id="transactionManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager"
lazy-init="true">
<property name="dataSource" ref="jobRepository-dataSource" />
</bean>
<bean id="jobRepository-transactionManager"
class="org.springframework.jdbc.datasource.DataSourceTransactionManager"
lazy-init="true">
<property name="dataSource" ref="jobRepository-dataSource" />
</bean>
<bean id="jobRepository" class="org.springframework.batch.core.repository.support.JobRepositoryFactoryBean" >
<property name="dataSource" ref="jobRepository-dataSource" />
<property name="transactionManager" ref="jobRepository-transactionManager" />
<property name="databaseType" value="mysql" />
<property name="isolationLevelForCreate" value="ISOLATION_DEFAULT" />
<property name="tablePrefix" value="batch_" />
</bean>
<bean id="jobLauncher"
class="org.springframework.batch.core.launch.support.SimpleJobLauncher">
<property name="jobRepository" ref="jobRepository" />
</bean>
<bean id="jobExplorer"
class="org.springframework.batch.core.explore.support.JobExplorerFactoryBean"
p:dataSource-ref="jobRepository-dataSource" p:tablePrefix="batch_" />
<bean id="jobRegistryBeanPostProcessor"
class="org.springframework.batch.core.configuration.support.JobRegistryBeanPostProcessor">
<property name="jobRegistry" ref="jobRegistry" />
</bean>
<bean id="jobRegistry" class="org.springframework.batch.core.configuration.support.MapJobRegistry" />
<bean id="jobOperator" class="org.springframework.batch.core.launch.support.SimpleJobOperator">
<property name="jobRepository" ref="jobRepository" />
<property name="jobLauncher" ref="jobLauncher" />
<property name="jobRegistry" ref="jobRegistry" />
<property name="jobExplorer" ref="jobExplorer" />
</bean>