Using Hibernate + Spring with 3 Transactional Databases - java

Greetings from Ecuador:
Currently I have created a project in which I must perform operations on 3 different databases. For this purpose I decided to use Hibernate ORM 5.2.7, Spring Framework 4.3.6 and other libraries for implementation of connection pools among others. For the implementation of the configuration of the context of Spring support me in annotations which I show below:
#Configuration
#ComponentScan("fttg.*")
#EnableTransactionManagement
#EnableScheduling
#PropertySources({
#PropertySource("classpath:application.properties"),
#PropertySource("classpath:schedule.properties")
})
public class ApplicationConfig {
#Autowired
private Environment environment;
#Bean(destroyMethod = "close")
public BasicDataSource dataSourceBitacora() {
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName(environment.getRequiredProperty("postgres.jdbc.driver"));
dataSource.setUrl(environment.getRequiredProperty("bitacora.jdbc.url"));
dataSource.setUsername(environment.getRequiredProperty("bitacora.jdbc.username"));
dataSource.setPassword(environment.getRequiredProperty("bitacora.jdbc.password"));
dataSource.setPoolPreparedStatements(true);
dataSource.setInitialSize(4);
dataSource.setMaxTotal(4);
dataSource.setMaxIdle(2);
dataSource.setMinIdle(1);
dataSource.setDefaultAutoCommit(Boolean.FALSE);
return dataSource;
}
#Bean(destroyMethod = "close")
public BasicDataSource dataSourceFacturacion() {
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName(environment.getRequiredProperty("postgres.jdbc.driver"));
dataSource.setUrl(environment.getRequiredProperty("facturacion.jdbc.url"));
dataSource.setUsername(environment.getRequiredProperty("facturacion.jdbc.username"));
dataSource.setPassword(environment.getRequiredProperty("facturacion.jdbc.password"));
dataSource.setPoolPreparedStatements(true);
dataSource.setInitialSize(1);
dataSource.setMaxTotal(4);
dataSource.setDefaultAutoCommit(Boolean.FALSE);
return dataSource;
}
#Bean(destroyMethod = "close")
public BasicDataSource dataSourceSietab() {
BasicDataSource dataSource = new BasicDataSource();
dataSource.setDriverClassName(environment.getRequiredProperty("postgres.jdbc.driver"));
dataSource.setUrl(environment.getRequiredProperty("sietab.jdbc.url"));
dataSource.setUsername(environment.getRequiredProperty("sietab.jdbc.username"));
dataSource.setPassword(environment.getRequiredProperty("sietab.jdbc.password"));
dataSource.setPoolPreparedStatements(true);
dataSource.setInitialSize(1);
dataSource.setMaxTotal(2);
dataSource.setDefaultAutoCommit(Boolean.FALSE);
return dataSource;
}
#Bean
public LocalSessionFactoryBean sessionFactoryBitacora() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSourceBitacora());
sessionFactory.setPackagesToScan(environment.getRequiredProperty("bitacora.sessionFactory.packagesToScan"));
Properties properties = new Properties();
properties.put("hibernate.dialect", environment.getRequiredProperty("postgres.hibernate.dialect"));
properties.put("hibernate.show_sql", environment.getRequiredProperty("hibernate.show_sql"));
sessionFactory.setHibernateProperties(properties);
return sessionFactory;
}
#Bean
public LocalSessionFactoryBean sessionFactoryFacturacion() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSourceFacturacion());
sessionFactory.setPackagesToScan(environment.getRequiredProperty("facturacion.sessionFactory.packagesToScan"));
Properties properties = new Properties();
properties.put("hibernate.dialect", environment.getRequiredProperty("postgres.hibernate.dialect"));
properties.put("hibernate.show_sql", environment.getRequiredProperty("hibernate.show_sql"));
sessionFactory.setHibernateProperties(properties);
return sessionFactory;
}
#Bean
public LocalSessionFactoryBean sessionFactorySietab() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSourceSietab());
sessionFactory.setPackagesToScan(environment.getRequiredProperty("sietab.sessionFactory.packagesToScan"));
Properties properties = new Properties();
properties.put("hibernate.dialect", environment.getRequiredProperty("postgres.hibernate.dialect"));
properties.put("hibernate.show_sql", environment.getRequiredProperty("hibernate.show_sql"));
sessionFactory.setHibernateProperties(properties);
return sessionFactory;
}
#Bean
public HibernateTransactionManager transactionManagerBitacora() {
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactoryBitacora().getObject());
return txManager;
}
#Bean
public HibernateTransactionManager transactionManagerFacturacion() {
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactoryFacturacion().getObject());
return txManager;
}
#Bean
public HibernateTransactionManager transactionManagerSietab() {
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactorySietab().getObject());
return txManager;
}
}
The DAOS configuration is the same for all objects in the database:
#Repository
public class BitacoraFacturasDetalleDao extends GenericDaoImpl<BitacoraFacturasDetalle, Integer>{
private final static Logger LOGGER = Logger.getLogger(BitacoraFacturasDetalleDao.class);
#Qualifier("sessionFactoryBitacora")
#Autowired
private SessionFactory sessionFactory;
public BitacoraFacturasDetalleDao() {
super(BitacoraFacturasDetalle.class);
}
public BitacoraFacturasDetalle findByEstablecimientoAndPuntoEmisionAndSecuencial(String establecimiento, String puntoEmision, String secuencial) {
LOGGER.info("evento findByEstablecimientoAndPuntoEmisionAndSecuencial");
BitacoraFacturasDetalle ret = (BitacoraFacturasDetalle) getCurrentSession().createNamedQuery("BitacoraFacturasDetalle.findByEstablecimientoAndPuntoEmisionAndSecuencial").setParameter("establecimiento", establecimiento).setParameter("puntoEmision", puntoEmision).setParameter("secuencial", secuencial).uniqueResult();
return ret;
}
#Override
protected Session getCurrentSession() {
return this.sessionFactory.getCurrentSession();
}
}
Transactional objects are implemented as follows:
#Service("facturasService")
#Transactional(value="transactionManagerFacturacion", readOnly = false)
public class FacturasServiceImpl implements FacturasService, Serializable {
private static final long serialVersionUID = 1L;
private final static Logger LOGGER = Logger.getLogger(FacturasServiceImpl.class);
#Autowired
private FacturasCabeceraDao facturasCabeceraDao;
#Override
public boolean save(FacturasCabecera factura) {
LOGGER.info("evento save");
return facturasCabeceraDao.save(factura);
}
}
#Service("bitacoraFacturasDetalleService")
#Transactional(readOnly = false, value = "transactionManagerBitacora")
public class BitacoraFacturasDetalleServiceImpl implements BitacoraFacturasDetalleService, Serializable {
private static final long serialVersionUID = 1L;
private final static Logger LOGGER = Logger.getLogger(BitacoraFacturasDetalleServiceImpl.class);
#Autowired
private BitacoraFacturasDetalleDao bitacoraFacturasDetalleDao;
#Override
public boolean save(BitacoraFacturasDetalle b) {
LOGGER.info("evento save");
return bitacoraFacturasDetalleDao.save(b);
}
#Override
public boolean edit(BitacoraFacturasDetalle b) {
LOGGER.info("evento edit");
return bitacoraFacturasDetalleDao.edit(b);
}
#Override
#Transactional(readOnly = true, value = "transactionManagerBitacora")
public BitacoraFacturasDetalle findByEstablecimientoAndPuntoEmisionAndSecuencial(String establecimiento, String puntoEmision, String secuencial) {
LOGGER.info("evento findByEstablecimientoAndPuntoEmisionAndSecuencial");
return bitacoraFacturasDetalleDao.findByEstablecimientoAndPuntoEmisionAndSecuencial(establecimiento, puntoEmision, secuencial);
}
}
And in a service that implements Quartz I invoke the 3 different types of services through which:
I retrieve the information from a database, I generate a few xmls, I insert records for bitacora in the second database and if this action is correct I update a state of the records retrieved from the first base, then I make a digital signature on the generated xmls And if this action is executed correctly I make a change of state in the records of the second database and insert two tables type master and detail of the third database
Then the code with which I make the invocation:
#Service
public class ScheduleService implements Serializable {
#Autowired
private LocalidadService localidadService;
#Autowired
private CooperativaService cooperativaService;
#Autowired
private BoletoTasaService boletoTasaService;
#Autowired
private BitacoraFacturasDetalleService bitacoraFacturasDetalleService;
#Autowired
private InformacionTributariaService informacionTributariaService;
#Autowired
private ClientesService clientesService;
#Autowired
private FacturasService facturasService;
#Scheduled(cron = "${schedule.cronExpresion}")
public void start() {
if(XMLUtil.generarXML(factura, XML_GENERADO)) {
LOGGER.info("XML para la factura " + SECUENCIAL_DOCUMENTO + " generado correctamente");
//code that fills a javaBean
//Execution of service that inserts in the database # 2
if(bitacoraFacturasDetalleService.save(bitacoraFacturaDetalle)) {
LOGGER.info("Factura " + SECUENCIAL_DOCUMENTO + " registrada en bitacora correctamente");
// object retrieved from database # 1 to be changed status not to be taken into account in future
tasa.setStatusFacturacionElectronica("P");
if(boletoTasaService.update(tasa)) {
//Other post-upgrade operations
}
}
}
}
The case is that this code works until a certain amount of registers (approximately 700 or 800 of the database 1), after the next action of insertion or update of the different bases the code goes to "sleep" for after much Time to run again
For the tests carried out prior to the transition to production, make copies of the 3 databases which for the purposes of this scenario did not have concurrent connections of the systems and / or interfaces that interact with them.
I do not know if the cause of the "problem" is: the programming code used, the strategy of defining the transactional objects (I have read and been advised to use JTA but from what I have read this mechanism uses only a transactional thread [a service that Controls the operations on the databases]) or if this inconvenience is presented by the concurences of other applications to the tables of the different databases
Please help if there is anything wrong with the spring configuration, the definition of transactional services or if you definitely need to use JTA for this purpose.
It is possible to indicate that previously I have used this scheme where I have one or several databases from which I extract information and only a database on which I make insertions, therefore I do not have problems; On the other hand, on the three databases is written given certain circumstances

As for the problem described, it's hard to tell exactly what might be wrong. Nevertheless, I can give you some tips:
You're not using any connection pooling at all. Try replacing the BasicDataSource with a HikariCP connection pool.
Instead of having a long-running transaction. Try to split it in chunks. Use Spring Integration to build a pipeline instead. Each database transaction should process only a small subset of data at a time. This way, the VACUUM has a better chance to run than if you're using a very long-running transaction.

Related

#Sql Failed SQL scripts: The configured DataSource [*] (named 'fooDS') is not the one associated with transaction manager [*] (named 'fooTM')

Update 1 (scroll down)
The setup is as follows:
Our application database is constructed and used by two separate users:
SCHEMA - User that has authority to create and grant permissions on tables and
APP - User who is granted permissions (INSERT, UPDATE, DELETE, SELECT) (by SCHEMA) for above tables to be used.
This enables us to lock any schema changes until needed so no profound changes happen through the app user.
I am running integration tests with a live Oracle database that contains both these users. on the class itself, I use the #SqlConfig(dataSource = "schemaDataSource", transactionManager = "transactionManagerSchema").
On the test method I place two #Sql that fail because in the SqlScriptsTestExecutionListener class, the transaction is not managing the same datasource. (hence the error message further below).
I've tried setting the datasource to the transaction manager manually as shown in my config class below, however some unknown process seems to override it every time. (My best guess is through the #DataJpaTest annotation but I don't know exactly which of the 11 Auto Configurations does it, as you can see I've already disabled a couple with no effect).
Test Class:
#RunWith(SpringRunner.class)
#DataJpaTest(excludeAutoConfiguration = {TestDatabaseAutoConfiguration.class, DataSourceAutoConfiguration.class})
#FlywayTest
#SqlConfig(dataSource = TestDataSourceConfig.SCHEMA_DATA_SOURCE, transactionManager = "transactionManagerSchema")
#SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, classes = {TestDataSourceConfig.class, TestFlywayConfig.class})
#EntityScan(basePackageClasses = BaseEnum.class)
public class NotificationTypeEnumTest {
#Autowired
private EntityManager em;
#Test
#Sql(statements = {"INSERT INTO MYAPP_ENUM (ENUM_ID, \"TYPE\", \"VALUE\") VALUES (MYAPP_ENUM_ID_SEQ.nextval, '" + NotificationTypeEnum.DTYPE + "', 'foo')"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
#Sql(statements = {"DELETE FROM MYAPP_ENUM"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
public void canFetchNotificationTypeEnum() throws Exception {
TypedQuery<NotificationTypeEnum> query = em.createQuery("select a from NotificationTypeEnum a", NotificationTypeEnum.class);
NotificationTypeEnum result = query.getSingleResult();
assertEquals("foo", result.getValue());
assertEquals(NotificationTypeEnum.DTYPE, result.getConfigType());
}
}
DataSource and TM config:
#Slf4j #Configuration #EnableTransactionManagement
public class TestDataSourceConfig {
public static final String SCHEMA_DATA_SOURCE = "schemaDataSource";
public static final String SCHEMA_TRANSACTION_MANAGER = "schemaTransactionManager";
/*Main Datasource and supporting beans*/
#Bean #Primary #ConfigurationProperties(prefix = "spring.datasource")
public DataSource dataSource() { return new DriverManagerDataSource(); }
#Bean #Primary #Autowired
public PlatformTransactionManager transactionManager(EntityManagerFactory emf) { return new JpaTransactionManager(emf); }
#Bean(name = SCHEMA_DATA_SOURCE) #ConfigurationProperties(prefix = "myapp.datasource.test_schema")
public DataSource schemaDataSource() { return new DriverManagerDataSource(); }
#Bean(name = SCHEMA_TRANSACTION_MANAGER) #Autowired
public PlatformTransactionManager transactionManagerSchema(#Qualifier(SCHEMA_DATA_SOURCE) DataSource dataSource) {
JpaTransactionManager jpaTransactionManager = new JpaTransactionManager();
jpaTransactionManager.setDataSource(dataSource);
return jpaTransactionManager;
}
}
The full error that I couldn't fit in the title is:
java.lang.IllegalStateException: Failed to execute SQL scripts for test context
...
SOME LONG STACK TRACE
...
the configured DataSource [org.springframework.jdbc.datasource.DriverManagerDataSource] (named 'schemaDataSource') is not the one associated with transaction manager [org.springframework.orm.jpa.JpaTransactionManager] (named 'transactionManagerSchema').
When there is a single DataSource, it appears the Spring auto-configuration model works fine, however, as soon as there are 2 or more, the assumptions break down and the programmer needs to manually fill in the sudden (plentiful) gaps in configuration required.
Am I missing some fundamental understanding surrounding DataSources and TransactionManagers?
Update 1
After some debugging, I have discovered the afterPropertiesSet() method is being called on the bean I created when the TransactionManager is retrieved for use with the #Sql script annotation. This causes whatever EntityManagerFactory it owns (i.e. JpaTransactionManager.entityManagerFactory) to set the datasource according to its configured EntityManagerFactoryInfo.getDataSource(). The EntityManagerFactory itself is being set as a result of the JpaTransactionManager.setBeanFactory method being called (as it implements BeanFactoryAware).
here is the spring code:
// JpaTransactionManager.java
#Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
if (getEntityManagerFactory() == null) {
if (!(beanFactory instanceof ListableBeanFactory)) {
throw new IllegalStateException("Cannot retrieve EntityManagerFactory by persistence unit name " +
"in a non-listable BeanFactory: " + beanFactory);
}
ListableBeanFactory lbf = (ListableBeanFactory) beanFactory;
setEntityManagerFactory(EntityManagerFactoryUtils.findEntityManagerFactory(lbf, getPersistenceUnitName()));
}
}
I then tried creating my own EntityManagerFactory bean to attempt to inject it into my created transaction manager but this seems to be opening up Hibernate Specific classes and I wish to stay abstracted at the JPA level. As well as it being difficult to configure at first glance.
Finally, a JPA only solution!
The Solution was to control the creation of the EntityManagerFactoryBeans using the provided spring EntityManagerFactoryBuilder component and inject the EntityManager into the test using the #PersistenceContext annotation.
#SqlConfig(dataSource = TestDataSourceConfig.SCHEMA_DATA_SOURCE, transactionManager = SCHEMA_TRANSACTION_MANAGER, transactionMode = SqlConfig.TransactionMode.ISOLATED)
...
public class MyJUnitTest {
#PersistenceContext(unitName = "pu")
private EntityManager em;
...
#Test
#Sql(statements = {"SOME SQL USING THE PRIVILEGED SCHEMA CONNECTION"}, ...)
public void myTest() {
em.createQuery("...").getResultList() // uses the APP database user.
}
}
Below is the configuration for both datasources. The application related DataSource beans all have #Primary in their definition to disambiguate any #Autowired dependencies. there are no Hibernate specific classes needed other than the Automatic hibernate config done through the #DataJpaTest class.
#Configuration
#EnableTransactionManagement
#EnableConfigurationProperties(JpaProperties.class)
public class TestDataSourceConfig {
public static final String SCHEMA_DATA_SOURCE = "schemaDS";
public static final String SCHEMA_TRANSACTION_MANAGER = "schemaTM";
public static final String SCHEMA_EMF = "schemaEMF";
/*Main Datasource and supporting beans*/
#Bean
#Primary
#ConfigurationProperties(prefix = "spring.datasource")
public DataSource dataSource() {
return new DriverManagerDataSource();
}
#Bean #Primary #Autowired
public PlatformTransactionManager transactionManager(EntityManagerFactory emf) { return new JpaTransactionManager(emf); }
#Bean #Primary
public LocalContainerEntityManagerFactoryBean emfBean(
EntityManagerFactoryBuilder entityManagerFactoryBuilder,
DataSource datasource,
JpaProperties jpaProperties) {
return entityManagerFactoryBuilder
.dataSource(datasource)
.jta(false)
.packages(CourseOffering.class)
.persistenceUnit("pu")
.properties(jpaProperties.getProperties())
.build();
}
#Bean(name = SCHEMA_EMF)
public LocalContainerEntityManagerFactoryBean emfSchemaBean(
EntityManagerFactoryBuilder entityManagerFactoryBuilder,
#Qualifier(SCHEMA_DATA_SOURCE) DataSource schemaDataSource,
JpaProperties jpaProperties) {
return entityManagerFactoryBuilder
.dataSource(schemaDataSource)
.jta(false)
.packages(CourseOffering.class)
.persistenceUnit("spu")
.properties(jpaProperties.getProperties())
.build();
}
#Bean(name = SCHEMA_DATA_SOURCE)
#ConfigurationProperties(prefix = "myapp.datasource.test_schema")
public DataSource schemaDataSource() { return new DriverManagerDataSource(); }
#Bean(name = SCHEMA_TRANSACTION_MANAGER)
public PlatformTransactionManager transactionManagerSchema(
#Qualifier(SCHEMA_EMF) EntityManagerFactory emfSchemaBean) {
JpaTransactionManager jpaTransactionManager = new JpaTransactionManager();
jpaTransactionManager.setEntityManagerFactory(emfSchemaBean);
return jpaTransactionManager;
}
}
Actual Test Class:
#RunWith(SpringRunner.class) // required for all spring tests
#DataJpaTest(excludeAutoConfiguration = {TestDatabaseAutoConfiguration.class, DataSourceAutoConfiguration.class}) // this stops the default data source and database being configured.
#SqlConfig(dataSource = TestDataSourceConfig.SCHEMA_DATA_SOURCE, transactionManager = SCHEMA_TRANSACTION_MANAGER, transactionMode = SqlConfig.TransactionMode.ISOLATED) // make sure the #Sql statements are run using the SCHEMA datasource and txManager in an isolated way so as not to cause problems when running test methods requiring these statements to be run.
#SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.NONE, classes = {TestDataSourceConfig.class})
#TestExecutionListeners({
SqlScriptsTestExecutionListener.class, // enables the #Sql script annotations to work.
SpringBootDependencyInjectionTestExecutionListener.class, // injects spring components into the test (i.e. the EntityManager)
TransactionalTestExecutionListener.class}) // I have this here even though the #Transactional annotations don't exist yet as I plan on using them in further tests.
public class NotificationTypeEnumTest {
#PersistenceContext(unitName = "pu") // required to inject the correct EntityManager
private EntityManager em;
// these statements are
#Test
#Sql(statements = {"INSERT INTO MYAPP_ENUM (ENUM_ID, \"TYPE\", \"VALUE\") VALUES (MYAPP_ENUM_ID_SEQ.nextval, '" + NotificationTypeEnum.DTYPE + "', 'foo')"}, executionPhase = Sql.ExecutionPhase.BEFORE_TEST_METHOD)
#Sql(statements = {"DELETE FROM MYAPP_ENUM"}, executionPhase = Sql.ExecutionPhase.AFTER_TEST_METHOD)
public void canFetchNotificationTypeEnum() throws Exception {
TypedQuery<NotificationTypeEnum> query = em.createQuery("select a from NotificationTypeEnum a", NotificationTypeEnum.class); // notification type is just a subclass of the BaseEnum type
NotificationTypeEnum result = query.getSingleResult();
assertEquals("foo", result.getValue());
assertEquals(NotificationTypeEnum.DTYPE, result.getConfigType());
}
}
noteworthy classes:
EntityManagerFactoryBuilder - I don't like factory factories, but this one served me well in creating the correct implementation of EntityManagerFactory without depending on any hibernate specific classes. may be injected with #Autowired. The builder bean itself is configured through the HibernateJpaAutoConfiguration class (extends JpaBaseConfiguration) (imported by #DataJpaTest).
JpaProperties - useful for maintaining application.properties config in the resulting entitymanagerfactories. enabled through the #EnableConfigurationProperties(JpaProperties.class) annotation above this config class.
#PersistenceContext(unitName = "...") - I can inject the correct EntityManager in my test class with this annotation.

Spring and Hibernate with multiple databases

Good evening,
what is the correct and common approach of handling two or more databases?
Consider this HibernateConfiguration class configuring only one datasource:
#Configuration #EnableTransactionManagement
#PropertySource(value = { "classpath:hibernate.properties" })
public class HibernateConfiguration {
#Autowired
private Environment env;
#Bean
public DataSource getDataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
// ... setting data source
return dataSource;
}
private Properties getHibernateProperties() {
Properties properties = new Properties();
// ... setting Hibernate properties
return properties;
}
#Bean
public LocalSessionFactoryBean getSessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(getDataSource());
sessionFactory.setPackagesToScan(new String[] { "POJOs'" });
sessionFactory.setHibernateProperties(getHibernateProperties());
return sessionFactory;
}
#Bean public HibernateTransactionManager transactionManager(SessionFactory sf) {
HibernateTransactionManager htm = new HibernateTransactionManager();
htm.setSessionFactory(sf);
return htm;
}
}
Is recommended to let one class configure one datasource? Or is enough to configure all at once? How do I specify in Dao class which SessionFactory will be used and what is the recommended approach in case of switching two exact same databases on two different hosting servers?
The example DAOs. First I need to switch between Foo and Bar.
#Repository
public class RepositoryImpl implements RepositoryDao {
#Autowired // Here I need to switch between databases "foo" and "bar"
private SessionFactory sessionFactory;
...
The second one I need fixed on example database Foo.
#Repository
public class FooImpl implements FooDao {
#Autowired // Here I need fixed on "Foo"
private SessionFactory sessionFactory;
One approach
#Bean
#Primary
#ConfigurationProperties("app.datasource.foo")
public DataSourceProperties fooDataSourceProperties() {
return new DataSourceProperties();
}
#Bean
#Primary
#ConfigurationProperties("app.datasource.foo")
public DataSource fooDataSource() {
return fooDataSourceProperties().initializeDataSourceBuilder().build();
}
#Bean
#ConfigurationProperties("app.datasource.bar")
public BasicDataSource barDataSource() {
return (BasicDataSource) DataSourceBuilder.create()
.type(BasicDataSource.class).build();
}
Spring multiple datasources config
Other approach could be : loading different mapping (orm.xml) from persistence.xml or refer to different schemas in Entity classes.

Spring & Hibernate without jpa

For my new project I plan to use Hibernate 5 and Spring 4 and as always like to separate into different layers / projects.
Gradle dependencies:
"org.springframework:spring-webmvc:4.2.1.RELEASE",
"org.springframework:spring-orm:4.2.1.RELEASE",
'org.hibernate:hibernate-core:5.0.2.Final',
'mysql:mysql-connector-java:5.1.36'
There is an API project, that contains a User.class. From my opinion this user class must not use any annotations for a database layer. It must not specify #Table(name = "users") or other things. It should be a simple Objects with getters and setters.
The database layer should decide how to store the data and this depends strongly on the database (e.g. MongoDB or MySQL).
I followed some tutorials for Hibernate and ended up with the following #Configuration class
#Configuration
#ComponentScan("de.pentos.proto")
#EnableWebMvc
#EnableTransactionManagement
public class AppConfig {
private static final Logger log = LoggerFactory.getLogger(AppConfig.class);
private static Properties getHibernateProperties() {
final Properties properties = new Properties();
properties.put("hibernate.show_sql", "true");
// properties.put("hibernate.dialect", "org.hibernate.dialect.MySQLDialect");
properties.put("hibernate.dialect", "org.hibernate.dialect.MySQL5Dialect");
properties.put("hbm2ddl.auto", "create");
return properties;
}
{
log.debug("Here am I: {}");
}
#Bean(name = "dataSource")
public DataSource getDataSource() {
final DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName("com.mysql.jdbc.Driver");
dataSource.setUrl("jdbc:mysql://localhost:3306/myschema");
dataSource.setUsername("user");
dataSource.setPassword("password");
return dataSource;
}
#Inject
#Bean(name = "sessionFactory")
public SessionFactory getSessionFactory(final DataSource dataSource) {
final LocalSessionFactoryBuilder sessionBuilder = new LocalSessionFactoryBuilder(dataSource);
sessionBuilder.addAnnotatedClasses(User.class);
sessionBuilder.addProperties(getHibernateProperties());
return sessionBuilder.buildSessionFactory();
}
#Inject
#Bean(name = "transactionManager")
public HibernateTransactionManager getTransactionManager(final SessionFactory sessionFactory) {
final HibernateTransactionManager transactionManager = new HibernateTransactionManager(
sessionFactory);
return transactionManager;
}
}
It works very well, except it uses an annotated class.
How can I add my hbm/user.xml to the sessionBuilder?
I tried with Configuration class, that I found in some examples, but the Method buildSessionFactory() is deprecated.
I also tried the ServiceRegistry described here but then lost my datasource approach and without the datasource the system was not able to setup the HibernateTransactionManager.
Without HibernateTransactionManager I was not able to use #Transactional and I don't like to open and close me transactions manually.
Currently I'm spinning in circles and really need help to get it to work. I already thought about throwing Hibernate away and use my good old MyBatis approach, but you know, I like to learn something new...
Add xml files as a resource to SessionFactory, as follows:
#Inject
#Bean(name = "sessionFactory")
public SessionFactory getSessionFactory(final DataSource dataSource) {
final LocalSessionFactoryBuilder sessionBuilder = new LocalSessionFactoryBuilder(dataSource);
sessionBuilder.addResource("/path-to-/hbm/user.xml");
sessionBuilder.addAnnotatedClasses(User.class);
sessionBuilder.addProperties(getHibernateProperties());
return sessionBuilder.buildSessionFactory();
}

Spring Batch Multiple Threads

I am writing a Spring Batch with idea of scaling it when required.
My ApplicationContext looks like this
#Configuration
#EnableBatchProcessing
#EnableTransactionManagement
#ComponentScan(basePackages = "in.springbatch")
#PropertySource(value = {"classpath:springbatch.properties"})
public class ApplicationConfig {
#Autowired
Environment environment;
#Autowired
private JobBuilderFactory jobs;
#Autowired
private StepBuilderFactory stepBuilderFactory;
#Bean
public Job job() throws Exception {
return jobs.get("spring_batch")
.flow(step()).end()
.build();
}
#Bean(name = "dataSource", destroyMethod = "close")
public DataSource dataSource() {
BasicDataSource basicDataSource = new BasicDataSource();
return basicDataSource;
}
#Bean
public JobRepository jobRepository() throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setTransactionManager(transactionManager());
jobRepositoryFactoryBean.setDataSource(dataSource());
return jobRepositoryFactoryBean.getObject();
}
#Bean(name = "batchstep")
public Step step() throws Exception {
return stepBuilderFactory.get("batchstep").allowStartIfComplete(true).
transactionManager(transactionManager()).
chunk(2).reader(batchReader()).processor(processor()).writer(writer()).build();
}
#Bean
ItemReader batchReader() throws Exception {
System.out.println(Thread.currentThread().getName()+"reader");
HibernateCursorItemReader<Source> hibernateCursorItemReader = new HibernateCursorItemReader<>();
hibernateCursorItemReader.setQueryString("from Source");
hibernateCursorItemReader.setFetchSize(2);
hibernateCursorItemReader.setSessionFactory(sessionFactory().getObject());
hibernateCursorItemReader.close();
return hibernateCursorItemReader;
}
#Bean
public ItemProcessor processor() {
return new BatchProcessor();
}
#Bean
public ItemWriter writer() {
return new BatchWriter();
}
public TaskExecutor taskExecutor(){
SimpleAsyncTaskExecutor asyncTaskExecutor=new SimpleAsyncTaskExecutor("spring_batch");
asyncTaskExecutor.setConcurrencyLimit(5);
return asyncTaskExecutor;
}
#Bean
public LocalSessionFactoryBean sessionFactory() {
LocalSessionFactoryBean sessionFactory = new LocalSessionFactoryBean();
sessionFactory.setDataSource(dataSource());
sessionFactory.setPackagesToScan(new String[]{"in.springbatch.entity"});
sessionFactory.setHibernateProperties(hibernateProperties());
return sessionFactory;
}
#Bean
public PersistenceExceptionTranslationPostProcessor exceptionTranslation() {
return new PersistenceExceptionTranslationPostProcessor();
}
#Bean
#Autowired
public HibernateTransactionManager transactionManager() {
HibernateTransactionManager txManager = new HibernateTransactionManager();
txManager.setSessionFactory(sessionFactory().getObject());
return txManager;
}
Properties hibernateProperties() {
return new Properties() {
{
setProperty("hibernate.hbm2ddl.auto", environment.getProperty("hibernate.hbm2ddl.auto"));
setProperty("hibernate.dialect", environment.getProperty("hibernate.dialect"));
setProperty("hibernate.globally_quoted_identifiers", "false");
}
};
}
}
With above configuration I am able to read from DB , process the data and write to DB.
I am using chunk size as 2 and reading 2 records from cursor using
HibernateCusrsorItem reader and my query to read from DB is based on
date to pick current date records.
So far I am able to achieve desired behavior as well as restart
ability with job only picking records which were not processed
due to failure in previous run.
Now my requirement is to make batch use multiple threads to process data and write to DB.
My Processor and writer looks like this
#Component
public class BatchProcessor implements ItemProcessor<Source,DestinationDto>{
#Override
public DestinationDto process(Source source) throws Exception {
System.out.println(Thread.currentThread().getName()+":"+source);
DestinationDto destination=new DestinationDto();
destination.setName(source.getName());
destination.setValue(source.getValue());
destination.setSourceId(source.getSourceId().toString());
return destination;
}
#Component
public class BatchWriter implements ItemWriter<DestinationDto>{
#Autowired
IBatchDao batchDao;
#Override
public void write(List<? extends DestinationDto> list) throws Exception {
System.out.println(Thread.currentThread().getName()+":"+list);
batchDao.saveToDestination((List<DestinationDto>)list);
}
I updated my step and added a ThreadPoolTaskExecutor as follows
#Bean(name = "batchstep")
public Step step() throws Exception {
return stepBuilderFactory.get("batchstep").allowStartIfComplete(true).
transactionManager(transactionManager()).chunk(1).reader(batchReader()).
processor(processor()).writer(writer()).taskExecutor(taskExecutor()).build();
}
After this my processor is getting called by multiple threads but with same source data.
Is there anything extra i need to do?
This is a big question
Your best bet at getting a good answers would be to look through the Scaling and Parallel Processing chapter in the Spring Batch Documentation (Here)
There might be some multi-threading samples in the spring batch examples (Here)
An easy way to thread the Spring batch job is to Create A Future Processor - you put all your Processing Logic in a Future Object and you spring-processor class only adds Objects to the future. You writer class then wait on the future to finish before performing the write process. Sorry I don't have a sample to point you too for this - but if you have specific questions I can try and answer!

Spring Data CrudRepository on java based configuration - EntityManager - no transaction is in progress

I've read I believe tried all of the posts on this, but no luck in finding the right answer.
I am using java based configuration with my spring mvc project, and wanted to try Spring CrudRepository, to get away from DAOs, and that is when the whole hell broke loose:
started with "no transaction is in progress" on flush after persist:
- tried adding #Transactional to the method - none of the variations found here worked
- tried changing configuration, but since it is java based, most of the answers are xml based. no luck either.
So finally I have to ask:
How to configure my project to make CrudRepository persist, or how to create Spring EntityManager using java configuration.
This is the last version of my configuration file:
#Configuration
#ComponentScan(basePackages = { "ba.fit.vms" })
#ImportResource(value = "classpath:spring-security-context.xml")
#EnableTransactionManagement
#EnableJpaRepositories
public class AppConfig {
#Bean
public static PropertyPlaceholderConfigurer propertyPlaceholderConfigurer() {
PropertyPlaceholderConfigurer ppc = new PropertyPlaceholderConfigurer();
ppc.setLocation(new ClassPathResource("/persistence.properties"));
return ppc;
}
// Security Configuration
#Bean
public KorisnickiServis korisnickiServis(){
return new KorisnickiServis();
}
#Bean
public TokenBasedRememberMeServices rememberMeServices() {
return new TokenBasedRememberMeServices("remember-me-key", korisnickiServis());
}
#Bean
public PasswordEncoder passwordEncoder() {
return new StandardPasswordEncoder();
}
// Jpa Configuration
#Value("${dataSource.driverClassName}")
private String driver;
#Value("${dataSource.url}")
private String url;
#Value("${dataSource.username}")
private String username;
#Value("${dataSource.password}")
private String password;
#Value("${hibernate.dialect}")
private String dialect;
#Value("${hibernate.hbm2ddl.auto}")
private String hbm2ddlAuto;
#Bean
public DataSource configureDataSource() {
DriverManagerDataSource dataSource = new DriverManagerDataSource();
dataSource.setDriverClassName(driver);
dataSource.setUrl(url);
dataSource.setUsername(username);
dataSource.setPassword(password);
return dataSource;
}
#Bean
public LocalContainerEntityManagerFactoryBean configureEntityManagerFactory() {
LocalContainerEntityManagerFactoryBean entityManagerFactoryBean = new LocalContainerEntityManagerFactoryBean();
entityManagerFactoryBean.setDataSource(configureDataSource());
entityManagerFactoryBean.setPackagesToScan("ba.fit.vms");
entityManagerFactoryBean.setJpaVendorAdapter(new HibernateJpaVendorAdapter());
Properties jpaProperties = new Properties();
jpaProperties.put(org.hibernate.cfg.Environment.DIALECT, dialect);
jpaProperties.put(org.hibernate.cfg.Environment.HBM2DDL_AUTO, hbm2ddlAuto);
//jpaProperties.put(org.hibernate.cfg.Environment.SHOW_SQL, true);
entityManagerFactoryBean.setJpaProperties(jpaProperties);
return entityManagerFactoryBean;
}
#Bean
public PlatformTransactionManager transactionManager() {
final JpaTransactionManager transactionManager = new JpaTransactionManager();
transactionManager.setEntityManagerFactory(configureEntityManagerFactory().getObject());
return transactionManager;
}
}
I've tried number of variations, but was always receiving same "no transaction is in progress" error.
Also, just a glimpse at the repos:
LokacijaRepository:
#Transactional
public interface LokacijaRepository extends CrudRepository<Lokacija, Long> {
}
And LokacijaRepositoryImpl:
#Repository
public class LokacijaRepositoryImpl implements LokacijaRepository {
protected static Logger logger = Logger.getLogger("repo");
#PersistenceContext // tried this as well(type= PersistenceContextType.EXTENDED)
private EntityManager entityManager;
#Override
#Transactional// tried number of variations here as well, like REQUIRED...
public <S extends Lokacija> S save(S entity) {
logger.debug("trying to save!");
try {
entityManager.persist(entity);
entityManager.flush();
return entity;
} catch (Exception e) {
logger.debug("error: "+ e.toString());
return null;
}
}
If you need anything else to help me figure this one out, let me know.
The problem is that you are attempting to create an implementation of LokacijaRepository (in LokacijaRepositoryImpl) while Spring Data JPA (which you have configured) is trying to do the same.
What you need to do is:
totally remove LokacijaRepositoryImpl
Either change configureEntityManagerFactory to entityManagerFactory or add entityManagerFactoryRef=configureEntityManagerFactory to #EnableJpaRepositories

Categories