Enabling mongo exception translation in spring-data-rest - java

Following this tutorial, working with complete code, how do I enable exception translation for mongo?
When my mongo db is down, i'm getting 500 error from com.mongodb.MongoServerSelectionException. Shouldn't this be translated into DataAccessResourceFailureException with MongoExceptionTranslator? Am i supposed to register this bean somehow? I've tried with:
#Bean
public MongoExceptionTranslator mongoExceptionTranslator() {
return new MongoExceptionTranslator();
}
but still no change
EDIT:
I've created a demo with the suggestions from Stackee007 but still can't get this to work

MongoExceptionTranslator is already registered if your configuration registers MongoFactoryBean or SimpleMongoDbFactory. You could configure mongo something like below which registers SimpleMongoDbFactory.
#Configuration
#EnableMongoRepositories
public class ApplicationConfig extends AbstractMongoConfiguration {
#Override
protected String getDatabaseName() {
return "yyy";
}
#Override
protected UserCredentials getUserCredentials() {
return new UserCredentials("abc", "***");
}
#Override
#Bean
public Mongo mongo() throws Exception {
List<ServerAddress> seeds = new ArrayList<ServerAddress>();
seeds.add(new ServerAddress("xxxx"));
seeds.add(new ServerAddress("xxx"));
seeds.add(new ServerAddress("xx"));
MongoClient mongo = new MongoClient(seeds);
mongo.setReadPreference(ReadPreference.secondaryPreferred());
mongo.setWriteConcern(WriteConcern.ACKNOWLEDGED);
return mongo;
}
#Bean
public GridFsTemplate gridFsTemplate() throws Exception {
return new GridFsTemplate(mongoDbFactory(), mappingMongoConverter());
}
}

Related

(FIXED) Multi-tenant application: Can't set the desired dataSource (Separated Schema, Shared Database)

I have an application where I want to use different DataSources. All the requests coming from the front-end will user the primary DataSource (this works so far), but I also have to perform operations every certain amount of minutes on another database with different schemas.
By looking in here, I found this approach:
Application.yml
datasource:
primary:
url: jdbc:mysql://SERVER_IP:3306/DATABASE?useSSL=false&useUnicode=true&useLegacyDatetimeCode=false&serverTimezone=UTC
username: MyUser
password: MyPassword
driver-class-name: com.mysql.cj.jdbc.Driver
secondary:
url: jdbc:mysql://localhost:3306/
urlEnd: ?useSSL=false&useUnicode=true&useLegacyDatetimeCode=false&serverTimezone=UTC
username: root
password: root
driver-class-name: com.mysql.cj.jdbc.Driver
Here i separate "url" and "urlEnd" because in the middle I will paste the name of the schema to use in each case as shown later.
ContextHolder
public abstract class ContextHolder {
private static final Logger logger = LoggerFactory.getLogger(ContextHolder.class);
private static final ThreadLocal<String> contextHolder = new ThreadLocal<>();
public static void setClient(String context) {
contextHolder.set(context);
}
public static String getClient() {
return contextHolder.get();
}
}
CustomRoutingDataSource
#Component
public class CustomRoutingDataSource extends AbstractRoutingDataSource {
private org.slf4j.Logger logger = LoggerFactory.getLogger(CustomRoutingDataSource.class);
#Autowired
DataSourceMap dataSources;
#Autowired
private Environment env;
public void setCurrentLookupKey() {
determineCurrentLookupKey();
}
#Override
protected Object determineCurrentLookupKey() {
String key = ContextHolder.getClient();
if(key == null || key.equals("primary")) {
DriverManagerDataSource ds = new DriverManagerDataSource();
ds.setDriverClassName(env.getProperty("spring.datasource.primary.driver-class-name"));
ds.setPassword(env.getProperty("spring.datasource.primary.password"));
ds.setUsername(env.getProperty("spring.datasource.primary.username"));
ds.setUrl(env.getProperty("spring.datasource.primary.url"));
dataSources.addDataSource("primary", ds);
setDataSources(dataSources);
afterPropertiesSet();
return "primary";
}
else {
DriverManagerDataSource ds = new DriverManagerDataSource();
ds.setDriverClassName(env.getProperty("spring.datasource.secondary.driver-class-name"));
ds.setPassword(env.getProperty("spring.datasource.secondary.password"));
ds.setUsername(env.getProperty("spring.datasource.secondary.username"));
ds.setUrl(env.getProperty("spring.datasource.secondary.url") + key + env.getProperty("spring.datasource.secondary.urlEnd"));
dataSources.addDataSource(key, ds);
setDataSources(dataSources);
afterPropertiesSet();
}
return key;
}
#Autowired
public void setDataSources(DataSourceMap dataSources) {
setTargetDataSources(dataSources.getDataSourceMap());
}
}
DatabaseSwitchInterceptor (Not used so far AFAIK)
#Component
public class DatabaseSwitchInterceptor implements HandlerInterceptor {
#Autowired
private CustomRoutingDataSource customRoutingDataSource;
private static final Logger logger = LoggerFactory
.getLogger(DatabaseSwitchInterceptor.class);
#Override
public boolean preHandle(HttpServletRequest request,
HttpServletResponse response, Object handler) throws Exception {
String hostname = request.getServerName();
ContextHolder.setClient(hostname);
return true;
}
}
DataSourceMap
#Component
public class DataSourceMap {
private static final Logger logger = LoggerFactory
.getLogger(DataSourceMap.class);
private Map<Object, Object> dataSourceMap = new ConcurrentHashMap<>();
public void addDataSource(String session, DataSource dataSource) {
this.dataSourceMap.put(session, dataSource);
}
public Map<Object, Object> getDataSourceMap() {
return dataSourceMap;
}
}
And last but not least, the controller where I am doing my test
#RestController
#RequestMapping("/open/company")
public class CompanyOpenController extends GenericCoreController<Company, Integer> {
#Autowired
private CompanyService companyService;
#Autowired
private CompltpvRepository compltpvRepository;
#Autowired
private CustomRoutingDataSource customRoutingDataSource;
#GetMapping("/prueba/{companyId}")
public List<CompltpvDTO> getAll(#PathVariable Integer companyId) throws ServiceException{
List<CompltpvDTO> response = new ArrayList<>();
ContextHolder.setClient(companyService.getById(companyId).getSchema());
for(Compltpv e : compltpvRepository.findAll()) {
response.add(new CompltpvDTO(e));
}
return response;
}
}
What I want all this to do is that, when I call "/open/company/test/3" it searches (in the main database) for the company with ID = 3. Then it retrieves its "schema" attribute value (let's say its "12345678" and then switches to the secondary datasource with the following url:
url = env.getProperty("spring.datasource.secondary.url") + key + env.getProperty("spring.datasource.secondary.urlEnd")
which is something like:
jdbc:mysql://localhost:3306/1245678?useSSL=false&useUnicode=true&useLegacyDatetimeCode=false&serverTimezone=UTC
When I try this and look into the DataSource pool, both exist with keys "primary" and "12345678", but it's always using the "primary" one.
How can I tell Spring to use the DataSource I need it to use?
EDIT: Found the solution
I finally got a deeper understaing of what was happening and also found the problem.
In case someone is interested on this approach, the problem I was having was this line in my application.yml:
spring:
jpa:
open-in-view: true
which does the following:
Default: true
Register OpenEntityManagerInViewInterceptor. Binds a JPA EntityManager to the thread for the entire processing of the request.
And that was the reason that, despite creating the datasource for every company (tenant), it wasn't using it. So if you are reading this and are in my situation, find that line and set it to false. If you don't find that property, notice that by default it'll be set to true.

Spring Boot-Log4J2: Is there any way to use the values in application.yml for JDBC appender?

I'm trying to create a JDBC appender to log into a DB table. I've succeeded so far by creating a ConnectionFactory and specifying it in log4j2-spring.xml. However, the DB credentials are all hardcoded and I would like to use the credentials I have in my application.yml but since logging seems to be loaded before Spring ApplicationContext, using the #Value annotation doesn't work.
I've also tried building the appender programmatically with this guide as a reference but it's requiring me to define the extended LifeCycle methods in ConnectionSource and I'm not sure what to do with that.
I've tried defining the ConnectionSource class as this but it returns a NullPointerException on this line
Appender jdbcAppender = JdbcAppender.newBuilder()
.setBufferSize(0)
.setColumnConfigs(columnConfigs)
.setColumnMappings()
.setConnectionSource(connectionSource)
.setTableName("LOG")
.setName("databaseAppender")
.setIgnoreExceptions(true)
.setFilter(null)
.build();
This is the ConnectionSource I made:
public class LoggerDatabaseSource implements ConnectionSource
{
private DataSource dataSource;
public LoggerDatabaseSource(String url, String userName, String password, String validationQuery)
{
Properties properties = new Properties();
properties.setProperty("user", userName);
properties.setProperty("password", password);
GenericObjectPool<PoolableConnection> pool = new GenericObjectPool<>();
DriverManagerConnectionFactory cf = new DriverManagerConnectionFactory(url, properties);
new PoolableConnectionFactory(cf, pool, null, validationQuery, 3, false, false, Connection.TRANSACTION_READ_COMMITTED);
this.dataSource = new PoolingDataSource(pool);
}
#Override
public Connection getConnection() throws SQLException
{
return dataSource.getConnection();
}
#Override
public State getState() {
return null;
}
#Override
public void initialize() {
}
#Override
public void start() {
}
#Override
public void stop() {
}
#Override
public boolean isStarted() {
return false;
}
#Override
public boolean isStopped() {
return false;
}
}
Any help would be appreciated. Thanks in advance!
Is there a reason you aren't using one of Log4j's database appenders? If you want access to properties defined in Spring's configuration, if you are using Spring Boot you can use the Log4j Spring Cloud Config Client which provides the Spring Lookup. This will let you reference properties from your application.yml file and use them in your log4j configuration.

Micronaut FunctionInitializer override application properties

#Singleton
public class TestFunction extends FunctionInitializer {
Logger log = LoggerFactory.getLogger(TestFunction.class);
public TestFunction() {
}
public String execute() {
return "Hello";
}
}
I want to override datasource properties in application.yml file programmatically, but without using bean created event listener. Is there a way to do that. Like creating a custom application context with properties.
I have used the below approach for Micronaut API gateway proxy.
public class StreamLambdaHandler implements RequestStreamHandler {
.......
public StreamLambdaHandler() {
try {
log.info("Initializing Lambda Container");
this.dbCredentialService = new DBCredentialService();
// Get updated database credential map
Map<String, Object> props = this.dbCredentialService.getDbCredential();
// Create application context builder with updated properties
// i.e Override datasources properties in application.yml
builder = ApplicationContext.build().properties(props);
handler = new MicronautLambdaContainerHandler(builder);
}....
........
}
Can we do something similar with FunctionInitializer?
If you plan to override only datasource credentials properties it could be done this way.
#Factory
public class HikariDataSourceFactory {
#Bean
#Primary
public DataSource dataSource(DBCredentialService credentialService) throws URISyntaxException {
Map<String, Object> credentials = this.dbCredentialService.getDbCredential();
String username = "user";
String password = credentials.get("username");
HikariConfig config = new HikariConfig();
config.setJdbcUrl("jdbc:postgresql://localhost:5432/postgres");
config.setUsername(username);
config.setPassword(password);
config.setDriverClassName("org.postgresql.Driver");
return new HikariUrlDataSource(config);
}
}

Spring data solr HttpSolrClient does not use core annotation from entity

With a configuration as follows
#Configuration
#EnableSolrRepositories(basePackages={"com.foo"}, multicoreSupport=true)
public class SolrConfig {
#Value("${solr.host}") String solrHost;
#Bean
public SolrClient solrClient() {
return new HttpSolrClient(solrHost);
}
#Bean
public SolrTemplate solrTemplate() {
return new SolrTemplate(solrClient());
}
}
I have a simple entity:
#SolrDocument(solrCoreName = "core1")
public class MyEntity implements Serializable {
If using SolrTemplate to execute queries, it does not use the coreName annotation on the document:
Page results = solrTemplate.queryForPage(search, MyEntity.class);
I get exception:
org.springframework.data.solr.UncategorizedSolrException: Error from server at http://localhost:8983/solr: Expected mime type application/octet-stream but got text/html.
[..]
Problem accessing /solr/select
[...]
<title>Error 404 Not Found</title>
Changing the SolrTemplate bean to:
#Bean
public SolrTemplate solrTemplate() {
return new SolrTemplate(solrClient(), "core1");
}
works
The guys over at spring-data confirmed this is expected behaviour and the template won't read the core from the entity annotation.
So in a multicoreSupport=true environment, if you want to use both the repository and the template you'll have to create 2 beans:
For the repository the base template:
#Bean
public SolrTemplate solrTemplate() {
return new SolrTemplate(solrClient());
}
and for injecting you will have another one:
#Bean
public SolrTemplate customTemplate() {
return new SolrTemplate(solrClient(), "core1");
}
Obviously if you don't need multicoreSupport=true none is needed!

Apache Camel Default Transaction Manager Data Not Seen in Route

I have two routes in Apache Camel. I am using Spring 3 and JPA annotations through Hibernate.
I have multiple data sources and have configured Apache Camel to use one default TransactionManager through SpringTransactionPolicy.
I save the data using DAO and can see the data within the test class by actually finding data. I then use the same DAO within Apache Camel Processor to find the same data, but the data is not found. I don't see why within the test class the data can be found and as soon as I try to look in the Apache Camel custom processor the data cannot be found.
Below is the sample code:
public class Config {
#Autowired
private TransactionManagerConfig transactionManagerConfig;
#Bean
public SpringTransactionPolicy defaultTransactionPolicy() throws Exception {
SpringTransactionPolicy transactionPolicy = new SpringTransactionPolicy();
transactionPolicy.setTransactionManager((PlatformTransactionManager) transactionManagerConfig.transactionManager());
return transactionPolicy;
}
}
public class CustomRouter extends SpringRouteBuilder {
public void configure() throws Exception {
ApplicationContext applicationContext = getApplicationContext();
from("someSource")).transacted().unmarshal().jaxb("com.sample.processor.entities").convertBodyTo(Entity.class).to("direct:x")
.end();
from("direct:x").transacted().process((CustomProcessor) applicationContext.getBean("customProcessor")).to(someSource);
}
}
public class CustomProcessor implements org.apache.camel.Processor {
#Resource
private StatusDAO statusDAO;
public CustomProcessor(StatusDAO statusDAO) {
this.statusDAO = statusDAO;
}
public void process(Exchange exchange) throws Exception {
Status status = null;
Message message = exchange.getIn();
Entity entity = (Entity) message.getBody();
if(entity.getId() != null) {
// find by id
status = statusDAO.findById(id);
}
status.setName("someName");
messageStatusDAO.update(status);
message.setBody(messageStatus);
exchange.setOut(message);
}
}
Data is received within the system as XML and changed to a Entity class. I find data using the statusDAO.findById() at this point the DAO returns no data even though the Entity exists.
I setup the test as below:
#Transactional
#TransactionalConfiguration(defaultRollback = true)
public class CustomTest {
#Transactional
#Before
public void setup throws Exception {
// setup the Status entity and save it using the DAO
}
#Test
public void testSomething() {
Status status = statusDAO.findById("id"); // returns the Status that was saved
MockEndpoint mockEndpoint = (MockEndpoint) this.context.getEndpoint(someDestination);
mockEndpoint.expectedMessageCount(1);
Status status = producerTemplate.requestBody(source, someXML, Status.class);
assertNotNull(status);
mockEndpoint.assertIsSatisfied();
}
}
Using the same transaction manager and the same statusDAO within the Custom Processor does not return the status.
I don't understand why.

Categories