Using #Primary in Spring Data JPA repositories - java

If someone ever needed to use #Primary on Spring Data repositories:
It looks like Spring Data JPA ignores #Primary annotations on repositories.
As a workaround I have created BeanFactoryPostProcessor which checks if given repository has #Primary annotation and sets that bean as primary.
This is the code:
#Component
public class SpringDataPrimaryPostProcessor implements BeanFactoryPostProcessor {
public static final String REPOSITORY_INTERFACE_PROPERTY = "repositoryInterface";
#Override
public void postProcessBeanFactory(ConfigurableListableBeanFactory beanFactory) throws BeansException {
makeRepositoriesPrimary(getRepositoryBeans(beanFactory));
}
protected List<BeanDefinition> getRepositoryBeans(ConfigurableListableBeanFactory beanFactory) {
List<BeanDefinition> springDataRepositoryDefinitions = Lists.newArrayList();
for (String beanName : beanFactory.getBeanDefinitionNames()) {
BeanDefinition beanDefinition = beanFactory.getBeanDefinition(beanName);
String beanClassName = beanDefinition.getBeanClassName();
try {
Class<?> beanClass = Class.forName(beanClassName);
if (isSpringDataJpaRepository(beanClass)) {
springDataRepositoryDefinitions.add(beanDefinition);
}
} catch (ClassNotFoundException e) {
throw new ApplicationContextException(String.format("Error when trying to create instance of %s", beanClassName), e);
}
}
return springDataRepositoryDefinitions;
}
protected void makeRepositoriesPrimary(List<BeanDefinition> repositoryBeans) {
for (BeanDefinition repositoryBeanDefinition : repositoryBeans) {
String repositoryInterface = (String) repositoryBeanDefinition.getPropertyValues().get(REPOSITORY_INTERFACE_PROPERTY);
if (isPrimary(repositoryInterface)) {
log.debug("Making site repository bean primary, class: {}", repositoryInterface);
repositoryBeanDefinition.setPrimary(true);
}
}
}
protected boolean isSpringDataJpaRepository(Class<?> beanClass) {
return RepositoryFactoryInformation.class.isAssignableFrom(beanClass);
}
private boolean isPrimary(String repositoryInterface) {
return AnnotationUtils.findAnnotation(getClassSafely(repositoryInterface), Primary.class) != null;
}
private Class<?> getClassSafely(String repositoryInterface) {
try {
return Class.forName(repositoryInterface);
} catch (ClassNotFoundException e) {
throw new ApplicationContextException(String.format("Error when trying to create instance of %s", repositoryInterface), e);
}
}

I tried applying the solution to spring boot application having two Mongo repositories.
But it was not able to find repositoryInterface in propertyValues.
Further investigation revealed that there is an attribute to identify the repository interface factoryBeanObjectType.
So changing the code in method makeRepositoriesPrimary()from:
String repositoryInterface = (String) repositoryBeanDefinition.getPropertyValues().get(REPOSITORY_INTERFACE_PROPERTY);
To:
String repositoryInterface = (String) repositoryBeanDefinition.getAttribute("factoryBeanObjectType");
worked as expected.
Hope this helps.

Related

Custom Spring validator not working properly

I'm trying to make artificial CONSTRAINT violation by Spring instead of throwing exception from DB (an expert sad DB-produced errors have high performance cost):
import javax.validation.ConstraintViolation;
import javax.validation.Validator;
#Component
public class AccountValidator implements org.springframework.validation.Validator {
#Autowired
private Validator validator;
private final AccountService accountService;
public AccountValidator(#Qualifier("accountServiceAlias")AccountService accountService) {
this.accountService = accountService;
}
#Override
public boolean supports(Class<?> clazz) {
return AccountRequestDTO.class.equals(clazz);
}
#Override
public void validate(Object target, Errors errors) {
Set<ConstraintViolation<Object>> validates = validator.validate(target);
for (ConstraintViolation<Object> constraintViolation : validates) {
String propertyPath = constraintViolation.getPropertyPath().toString();
String message = constraintViolation.getMessage();
errors.rejectValue(propertyPath, "", message);
}
AccountRequestDTO account = (AccountRequestDTO) target;
if(accountService.getPhone(account.getPhone()) != null){
errors.rejectValue("phone", "", "Validator in action! This number is already in use.");
}
}
}
However, second part of validate() method never works for reasons I cant understand and always pass a call from controller to be handled in try-catch block throwing exception from DB:
public void saveAccount(AccountRequestDTO accountRequestDTO) throws Exception {
LocalDate birthday = LocalDate.parse(accountRequestDTO.getBirthday());
if (LocalDate.from(birthday).until(LocalDate.now(), ChronoUnit.YEARS) < 18) {
throw new RegistrationException("You must be 18+ to register");
}
Account account = new Account(accountRequestDTO.getName(), accountRequestDTO.getSurname(),
accountRequestDTO.getPhone(), birthday, BCrypt.hashpw
(accountRequestDTO.getPassword(), BCrypt.gensalt(4)));
account.addRole(Role.CLIENT);
try {
accountRepository.save(account);
}
catch (RuntimeException exc) {
throw new PersistenceException("Database exception: this number is already in use.");
}
}
Here's a controller method:
#PostMapping("/confirm")
public String signIn(#ModelAttribute("account") #Valid AccountRequestDTO accountRequestDTO,
BindingResult result, Model model) {
accountValidator.validate(accountRequestDTO, result);
if(result.hasErrors()) {
return "/auth/register";
}
try {
accountService.saveAccount(accountRequestDTO);
}
catch (Exception exc) {
model.addAttribute("message", exc.getMessage());
return "/auth/register";
}
return "/auth/login";
}
At service:
#Transactional(readOnly = true)
public String getPhone(String phone){
return accountRepository.getPhone(phone);
}
JpaRepository query:
#Query("SELECT phone FROM Account accounts WHERE phone=:check")
String getPhone(String check);
Tests are green:
#BeforeAll
static void prepare() {
search = new String("0000000000");
}
#BeforeEach
void set_up() {
account = new Account
("Admin", "Adminov", "0000000000", LocalDate.of(2001, 01, 01), "superadmin");
accountRepository.save(account);
}
#Test
void check_if_phone_presents() {
assertThat(accountRepository.getPhone(search).equals(account.getPhone())).isTrue();
}
#Test
void check_if_phone_not_presents() {
String newPhone = "9999999999";
assertThat(accountRepository.getPhone(newPhone)).isNull();
}
#AfterEach
void tear_down() {
accountRepository.deleteAll();
account = null;
}
#AfterAll
static void clear() {
search = null;
}
You need to register your validator.
After we've defined the validator, we need to map it to a specific
event which is generated after the request is accepted.
This can be done in three ways:
Add Component annotation with name “beforeCreateAccountValidator“.
Spring Boot will recognize prefix beforeCreate which determines the
event we want to catch, and it will also recognize WebsiteUser class
from Component name.
#Component("beforeCreateAccountValidator")
public class AccountValidator implements Validator {
...
}
Create Bean in Application Context with #Bean annotation:
#Bean
public AccountValidator beforeCreateAccountValidator () {
return new AccountValidator ();
}
Manual registration:
#SpringBootApplication
public class SpringDataRestApplication implements RepositoryRestConfigurer {
public static void main(String[] args) {
SpringApplication.run(SpringDataRestApplication.class, args);
}
#Override
public void configureValidatingRepositoryEventListener(
ValidatingRepositoryEventListener v) {
v.addValidator("beforeCreate", new AccountValidator ());
}
}

spring data jdbc. Can't add custom converter for enum

I want to have enum as a field for my entity.
My application is look like:
Spring boot version
plugins {
id 'org.springframework.boot' version '2.6.2' apply false
repository:
#Repository
public interface MyEntityRepository extends PagingAndSortingRepository<MyEntity, UUID> {
...
entity:
#Table("my_entity")
public class MyEntity{
...
private FileType fileType;
// get + set
}
enum declaration:
public enum FileType {
TYPE_1(1),
TYPE_2(2);
int databaseId;
public static FileType byDatabaseId(Integer databaseId){
return Arrays.stream(values()).findFirst().orElse(null);
}
FileType(int databaseId) {
this.databaseId = databaseId;
}
public int getDatabaseId() {
return databaseId;
}
}
My attempt:
I've found following answer and try to follow it : https://stackoverflow.com/a/53296199/2674303
So I've added bean
#Bean
public JdbcCustomConversions jdbcCustomConversions() {
return new JdbcCustomConversions(asList(new DatabaseIdToFileTypeConverter(), new FileTypeToDatabaseIdConverter()));
}
converters:
#WritingConverter
public class FileTypeToDatabaseIdConverter implements Converter<FileType, Integer> {
#Override
public Integer convert(FileType source) {
return source.getDatabaseId();
}
}
#ReadingConverter
public class DatabaseIdToFileTypeConverter implements Converter<Integer, FileType> {
#Override
public FileType convert(Integer databaseId) {
return FileType.byDatabaseId(databaseId);
}
}
But I see error:
The bean 'jdbcCustomConversions', defined in class path resource
[org/springframework/boot/autoconfigure/data/jdbc/JdbcRepositoriesAutoConfiguration$SpringBootJdbcConfiguration.class],
could not be registered. A bean with that name has already been
defined in my.pack.Main and overriding is disabled.
I've tried to rename method jdbcCustomConversions() to myJdbcCustomConversions(). It helped to avoid error above but converter is not invoked during entity persistence and I see another error that application tries to save String but database type is bigint.
20:39:10.689 DEBUG [main] o.s.jdbc.core.StatementCreatorUtils: JDBC getParameterType call failed - using fallback method instead: org.postgresql.util.PSQLException: ERROR: column "file_type" is of type bigint but expression is of type character varying
Hint: You will need to rewrite or cast the expression.
Position: 174
I also tried to use the latest(currently) version of spring boot:
id 'org.springframework.boot' version '2.6.2' apply false
But it didn't help.
What have I missed ?
How can I map enum to integer column properly ?
P.S.
I use following code for testing:
#SpringBootApplication
#EnableJdbcAuditing
#EnableScheduling
public class Main {
public static void main(String[] args) {
ConfigurableApplicationContext applicationContext = SpringApplication.run(Main.class, args);
MyEntityRepositoryrepository = applicationContext.getBean(MyEntityRepository.class);
MyEntity entity = new MyEntity();
...
entity.setFileType(FileType.TYPE_2);
repository.save(entity);
}
#Bean
public ModelMapper modelMapper() {
ModelMapper mapper = new ModelMapper();
mapper.getConfiguration()
.setMatchingStrategy(MatchingStrategies.STRICT)
.setFieldMatchingEnabled(true)
.setSkipNullEnabled(true)
.setFieldAccessLevel(PRIVATE);
return mapper;
}
#Bean
public AbstractJdbcConfiguration jdbcConfiguration() {
return new MySpringBootJdbcConfiguration();
}
#Configuration
static class MySpringBootJdbcConfiguration extends AbstractJdbcConfiguration {
#Override
protected List<?> userConverters() {
return asList(new DatabaseIdToFileTypeConverter(), new FileTypeToDatabaseIdConverter());
}
}
}
UPDATE
My code is:
#SpringBootApplication
#EnableJdbcAuditing
#EnableScheduling
public class Main {
public static void main(String[] args) {
ConfigurableApplicationContext applicationContext = SpringApplication.run(Main.class, args);
MyEntityRepositoryrepository = applicationContext.getBean(MyEntityRepository.class);
MyEntity entity = new MyEntity();
...
entity.setFileType(FileType.TYPE_2);
repository.save(entity);
}
#Bean
public ModelMapper modelMapper() {
ModelMapper mapper = new ModelMapper();
mapper.getConfiguration()
.setMatchingStrategy(MatchingStrategies.STRICT)
.setFieldMatchingEnabled(true)
.setSkipNullEnabled(true)
.setFieldAccessLevel(PRIVATE);
return mapper;
}
#Bean
public AbstractJdbcConfiguration jdbcConfiguration() {
return new MySpringBootJdbcConfiguration();
}
#Configuration
static class MySpringBootJdbcConfiguration extends AbstractJdbcConfiguration {
#Override
protected List<?> userConverters() {
return asList(new DatabaseIdToFileTypeConverter(), new FileTypeToDatabaseIdConverter());
}
#Bean
public JdbcConverter jdbcConverter(JdbcMappingContext mappingContext,
NamedParameterJdbcOperations operations,
#Lazy RelationResolver relationResolver,
JdbcCustomConversions conversions,
Dialect dialect) {
JdbcArrayColumns arrayColumns = dialect instanceof JdbcDialect ? ((JdbcDialect) dialect).getArraySupport()
: JdbcArrayColumns.DefaultSupport.INSTANCE;
DefaultJdbcTypeFactory jdbcTypeFactory = new DefaultJdbcTypeFactory(operations.getJdbcOperations(),
arrayColumns);
return new MyJdbcConverter(
mappingContext,
relationResolver,
conversions,
jdbcTypeFactory,
dialect.getIdentifierProcessing()
);
}
}
static class MyJdbcConverter extends BasicJdbcConverter {
MyJdbcConverter(
MappingContext<? extends RelationalPersistentEntity<?>, ? extends RelationalPersistentProperty> context,
RelationResolver relationResolver,
CustomConversions conversions,
JdbcTypeFactory typeFactory,
IdentifierProcessing identifierProcessing) {
super(context, relationResolver, conversions, typeFactory, identifierProcessing);
}
#Override
public int getSqlType(RelationalPersistentProperty property) {
if (FileType.class.equals(property.getActualType())) {
return Types.BIGINT;
} else {
return super.getSqlType(property);
}
}
#Override
public Class<?> getColumnType(RelationalPersistentProperty property) {
if (FileType.class.equals(property.getActualType())) {
return Long.class;
} else {
return super.getColumnType(property);
}
}
}
}
But I experience error:
Caused by: org.postgresql.util.PSQLException: Cannot convert an instance of java.lang.String to type long
at org.postgresql.jdbc.PgPreparedStatement.cannotCastException(PgPreparedStatement.java:925)
at org.postgresql.jdbc.PgPreparedStatement.castToLong(PgPreparedStatement.java:810)
at org.postgresql.jdbc.PgPreparedStatement.setObject(PgPreparedStatement.java:561)
at org.postgresql.jdbc.PgPreparedStatement.setObject(PgPreparedStatement.java:931)
at com.zaxxer.hikari.pool.HikariProxyPreparedStatement.setObject(HikariProxyPreparedStatement.java)
at org.springframework.jdbc.core.StatementCreatorUtils.setValue(StatementCreatorUtils.java:414)
at org.springframework.jdbc.core.StatementCreatorUtils.setParameterValueInternal(StatementCreatorUtils.java:231)
at org.springframework.jdbc.core.StatementCreatorUtils.setParameterValue(StatementCreatorUtils.java:146)
at org.springframework.jdbc.core.PreparedStatementCreatorFactory$PreparedStatementCreatorImpl.setValues(PreparedStatementCreatorFactory.java:283)
at org.springframework.jdbc.core.PreparedStatementCreatorFactory$PreparedStatementCreatorImpl.createPreparedStatement(PreparedStatementCreatorFactory.java:241)
at org.springframework.jdbc.core.JdbcTemplate.execute(JdbcTemplate.java:649)
... 50 more
Caused by: java.lang.NumberFormatException: For input string: "TYPE_2"
at java.lang.NumberFormatException.forInputString(NumberFormatException.java:65)
at java.lang.Long.parseLong(Long.java:589)
at java.lang.Long.parseLong(Long.java:631)
at org.postgresql.jdbc.PgPreparedStatement.castToLong(PgPreparedStatement.java:792)
... 59 more
Try the following instead:
#Bean
public AbstractJdbcConfiguration jdbcConfiguration() {
return new MySpringBootJdbcConfiguration();
}
#Configuration
static class MySpringBootJdbcConfiguration extends AbstractJdbcConfiguration {
#Override
protected List<?> userConverters() {
return List.of(new DatabaseIdToFileTypeConverter(), new FileTypeToDatabaseIdConverter());
}
}
Explanation:
Spring complains that JdbcCustomConversions in auto-configuration class is already defined (by your bean) and you don't have bean overriding enabled.
JdbcRepositoriesAutoConfiguration has changed a few times, in Spring 2.6.2 it has:
#Configuration(proxyBeanMethods = false)
#ConditionalOnMissingBean(AbstractJdbcConfiguration.class)
static class SpringBootJdbcConfiguration extends AbstractJdbcConfiguration {
}
In turn, AbstractJdbcConfiguration has:
#Bean
public JdbcCustomConversions jdbcCustomConversions() {
try {
Dialect dialect = applicationContext.getBean(Dialect.class);
SimpleTypeHolder simpleTypeHolder = dialect.simpleTypes().isEmpty() ? JdbcSimpleTypes.HOLDER
: new SimpleTypeHolder(dialect.simpleTypes(), JdbcSimpleTypes.HOLDER);
return new JdbcCustomConversions(
CustomConversions.StoreConversions.of(simpleTypeHolder, storeConverters(dialect)), userConverters());
} catch (NoSuchBeanDefinitionException exception) {
LOG.warn("No dialect found. CustomConversions will be configured without dialect specific conversions.");
return new JdbcCustomConversions();
}
}
As you can see, JdbcCustomConversions is not conditional in any way, so defining your own caused a conflict. Fortunately, it provides an extension point userConverters() which can be overriden to provide your own converters.
Update
As discussed in comments:
FileType.byDatabaseId is broken - it ignores its input param
as the column type in db is BIGINT, your converters must convert from Long, not from Integer, this addresses read queries
for writes, there is an open bug https://github.com/spring-projects/spring-data-jdbc/issues/629 There is a hardcoded assumption that Enums are converted to Strings, and only Enum -> String converters are checked.
As we want to convert to Long, we need to make amendments to BasicJdbcConverter by subclassing it and registering subclassed converter with as a #Bean.
You need to override two methods
public int getSqlType(RelationalPersistentProperty property)
public Class<?> getColumnType(RelationalPersistentProperty property)
I hardcoded the Enum type and corresponding column types, but you may want to get more fancy with that.
#Bean
public AbstractJdbcConfiguration jdbcConfiguration() {
return new MySpringBootJdbcConfiguration();
}
#Configuration
static class MySpringBootJdbcConfiguration extends AbstractJdbcConfiguration {
#Override
protected List<?> userConverters() {
return List.of(new DatabaseIdToFileTypeConverter(), new FileTypeToDatabaseIdConverter());
}
#Bean
public JdbcConverter jdbcConverter(JdbcMappingContext mappingContext,
NamedParameterJdbcOperations operations,
#Lazy RelationResolver relationResolver,
JdbcCustomConversions conversions,
Dialect dialect) {
JdbcArrayColumns arrayColumns = dialect instanceof JdbcDialect ? ((JdbcDialect) dialect).getArraySupport()
: JdbcArrayColumns.DefaultSupport.INSTANCE;
DefaultJdbcTypeFactory jdbcTypeFactory = new DefaultJdbcTypeFactory(operations.getJdbcOperations(),
arrayColumns);
return new MyJdbcConverter(
mappingContext,
relationResolver,
conversions,
jdbcTypeFactory,
dialect.getIdentifierProcessing()
);
}
}
static class MyJdbcConverter extends BasicJdbcConverter {
MyJdbcConverter(
MappingContext<? extends RelationalPersistentEntity<?>, ? extends RelationalPersistentProperty> context,
RelationResolver relationResolver,
CustomConversions conversions,
JdbcTypeFactory typeFactory,
IdentifierProcessing identifierProcessing) {
super(context, relationResolver, conversions, typeFactory, identifierProcessing);
}
#Override
public int getSqlType(RelationalPersistentProperty property) {
if (FileType.class.equals(property.getActualType())) {
return Types.BIGINT;
} else {
return super.getSqlType(property);
}
}
#Override
public Class<?> getColumnType(RelationalPersistentProperty property) {
if (FileType.class.equals(property.getActualType())) {
return Long.class;
} else {
return super.getColumnType(property);
}
}
}

Spring aop advice failed to wrapped proxy when the advice bean is in creation

The issue is the same as this question
I am working with the spring aop at annotation aspect, one of my service bean cannot install annotation advice but others can.
Then I debug and found out the reason, the code in BeanFactoryAdvisorRetrievalHelper indicates that if an advice bean is in creation, the serviceBean will failed to install the advisor.
if (this.beanFactory.isCurrentlyInCreation(name))
public List<Advisor> findAdvisorBeans() {
// Determine list of advisor bean names, if not cached already.
String[] advisorNames = this.cachedAdvisorBeanNames;
if (advisorNames == null) {
// Do not initialize FactoryBeans here: We need to leave all regular beans
// uninitialized to let the auto-proxy creator apply to them!
advisorNames = BeanFactoryUtils.beanNamesForTypeIncludingAncestors(
this.beanFactory, Advisor.class, true, false);
this.cachedAdvisorBeanNames = advisorNames;
}
if (advisorNames.length == 0) {
return new ArrayList<>();
}
List<Advisor> advisors = new ArrayList<>();
for (String name : advisorNames) {
if (isEligibleBean(name)) {
if (this.beanFactory.isCurrentlyInCreation(name)) {
if (logger.isTraceEnabled()) {
logger.trace("Skipping currently created advisor '" + name + "'");
}
}
else {
try {
advisors.add(this.beanFactory.getBean(name, Advisor.class));
}
catch (BeanCreationException ex) {
Throwable rootCause = ex.getMostSpecificCause();
if (rootCause instanceof BeanCurrentlyInCreationException) {
BeanCreationException bce = (BeanCreationException) rootCause;
String bceBeanName = bce.getBeanName();
if (bceBeanName != null && this.beanFactory.isCurrentlyInCreation(bceBeanName)) {
if (logger.isTraceEnabled()) {
logger.trace("Skipping advisor '" + name +
"' with dependency on currently created bean: " + ex.getMessage());
}
// Ignore: indicates a reference back to the bean we're trying to advise.
// We want to find advisors other than the currently created bean itself.
continue;
}
}
throw ex;
}
}
}
}
return advisors;
}
The interceptor is
public class DemoInterceptor implements MethodInterceptor {
#Override
public Object invoke(MethodInvocation invocation) throws Throwable {
//do something
return new Object();
}
}
The advisor config is
config way 1
public class DemoAdvisor implements PointcutAdvisor {
private MethodInterceptor methodInterceptor;
private Pointcut pointcut;
public DemoAdvisor(MethodInterceptor methodInterceptor) {
this.methodInterceptor = methodInterceptor;
this.pointcut = new AnnotationMatchingPointcut(null, DemoAnno.class);
}
#Override
public Advice getAdvice() {
return this.methodInterceptor;
}
#Override
public boolean isPerInstance() {
return true;
}
#Override
public Pointcut getPointcut() {
return this.pointcut;
}
}
#Configuration
public class AnnoAspectConfig {
#Bean
public DemoAdvisor DemoAdvisor() {
DemoAdvisor advisor = new DemoAdvisor(demoInterceptor());
return advisor;
}
#Bean
public DemoInterceptor demoInterceptor() {
DemoInterceptor demoInterceptor = new DemoInterceptor();
return demoInterceptor;
}
}
Then I think it maybe the problem in the advice config, so I change the advice config to the below
config way 2
#Configuration
public class ValidationAdvisor {
#Bean
public DefaultPointcutAdvisor demoAdvisor() {
DemoInterceptor interceptor = new DemoInterceptor();
AnnotationMatchingPointcut annotationMatchingPointcut = AnnotationMatchingPointcut.forMethodAnnotation(DemoAnno.class);
DefaultPointcutAdvisor advisor = new DefaultPointcutAdvisor();
advisor.setPointcut(annotationMatchingPointcut);
advisor.setAdvice(interceptor);
return advisor;
}
}
Then everything goes find, so the question is, why the config way 1 cause the failed of advisor installation while the config way 2 did not?

Register BeanFactory in SpringBoot

First I'm not sure if it's a good idea to do all this.
Goal is to create some interfaces with annotations to hide legacy position based string access out of a configuration database, without implementing each interface.
Declarative configured Interface:
public interface LegacyConfigItem extends ConfigDbAccess{
#Subfield(length=3)
String BWHG();
#Subfield(start = 3, length=1)
int BNKST();
#Subfield(start = 4, length=1)
int BEINH();
:
}
Base interface for runtime identification
public interface ConfigDbAccess{
}
Dummy implementation without functionality, may change.
public class EmptyImpl {
}
Beanfactory and MethodInvocation interceptor, to handle the unimplemented methods.
#Component
public class InterfaceBeanFactory extends DefaultListableBeanFactory {
protected static final int TEXT_MAX = 400;
#Autowired
private EntityRepo entityRepo;
public <T> T getInstance(Class<T> legacyInterface, String key) {
ProxyFactory factory = new ProxyFactory(new EmptyImpl());
factory.setInterfaces(legacyInterface);
factory.setExposeProxy(true);
factory.addAdvice(new MethodInterceptor() {
#Override
public Object invoke(MethodInvocation invocation) throws Throwable {
KEY keyAnnotation = invocation.getThis().getClass().getAnnotation(Key.class);
String key= keyAnnotation.key().toUpperCase();
String ptart = invocation.getMethod().getDeclaringClass().getSimpleName();
Vpt result = entityRepo.getOne(new EntityId(ptart.toUpperCase(), schl.toUpperCase()));
Subfield sub = invocation.getMethod().getAnnotation(Subfield.class);
//TODO: Raise missing Subfield annotation
int start = sub.start();
int length = sub.length();
if (start + length > TEXT_MAX) {
//TODO: Raise invalid Subfield config
}
String value = result.getTextField().substring(start,start+length);
return value;
}
});
return (T) factory.getProxy();
}
#Override
protected Map<String, Object> findAutowireCandidates(String beanName, Class<?> requiredType, DependencyDescriptor descriptor) {
Map<String, Object> map = super.findAutowireCandidates(beanName, requiredType, descriptor);
if (ConfigDbAccess.class.isAssignableFrom(requiredType )) {
:
#SpringBootApplication
public class JpaDemoApplication {
#Autowired
private ApplicationContext context;
public static void main(String[] args) {
SpringApplication app = new SpringApplication(JpaDemoApplication.class);
// app.setApplicationContextClass(InterfaceInjectionContext .class);
app.run(args);
}
public class InterfaceInjectionContext extends AnnotationConfigApplicationContext {
public VptInjectionContext () {
super (new InterfaceBeanFactory ());
}
}
So far I got all this stuff working, except when I try to set the applications Context class to my DefaultListableBeanFactory, I'm killing the Spring boot starter web. The application starts, injects the the Autowired fields with my intercepted pseudo implementaition --- and ends.
I think I'm doing something wrong with registering the DefaultListableBeanFactory, but I've no idea how to do it right.
To get this answered:
M. Deinum pointed me to a much simpler solution:
Instead of creating a BeanFactory I installed a BeanPostProcessor with this functioniality.
#RestController
public class DemoRestController {
#Autowired
VptService vptService;
#ConfigItem(key="KS001")
private PrgmParm prgmKs001;
#ConfigItem(key="KS002")
private PrgmParm prgmKs002;
public DemoRestController() {
super();
}
Where the ConfigItem annotation defines the injection point.
Next I created a CustomBeanPostProcessor which scans all incoming beans for
fields having a ConfigItem annotation
#Component
public class CustomBeanPostProcessor implements BeanPostProcessor {
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
for (Field field : bean.getClass().getDeclaredFields()) {
SHL cfgDef = field.getAnnotation(ConfigItem.class);
if (cfgDef != null) {
Object instance = getlInstance(field.getType(), cfgDef.key());
boolean accessible = field.isAccessible();
field.setAccessible(true);
try {
field.set(bean, instance);
} catch (IllegalArgumentException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IllegalAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
field.setAccessible(accessible);
}
}
return bean;
}
The getInstnce(field.getType(),cfgDef.key()) creates a proxy with the MethodInterceptor, which does the work.
There are a lot of things to finalize, but all in all it looks good to me.

How to code spring custom annotation which works like ibatis Select annotation?

I am trying to make a custom annotation like ibatis #Select.
Anyway, In conclusion, the goal is
append some data into the parameter which the method has custom annotation
First take a look end point - ArtistNodeRepository.java
#Repository
public interface ArtistNodeRepository {
#CreateNode(tid = "artist")
public Node create(Map data) throws Exception;
}
What want to do with CreateNode annotation is put data.put("type", "artist") into parameter Map.
Here is the Annotation - CreateNode.java
#Retention(RetentionPolicy.RUNTIME)
#Target(ElementType.METHOD)
#Documented
public #interface CreateNode {
String[] values() default "";
String tid();
}
To controller annotation, I prepared this BeanPostProcessor - NodeAnnotationProcessor.java
#Component
public class NodeAnnotationProcessor implements BeanPostProcessor {
private ConfigurableListableBeanFactory configurableListableBeanFactory;
#Autowired
public NodeAnnotationProcessor(ConfigurableListableBeanFactory configurableListableBeanFactory) {
super();
this.configurableListableBeanFactory = configurableListableBeanFactory;
}
#Override
public Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException {
this.scanNodeAnnotation(bean, beanName);
return bean;
}
#Override
public Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException {
// this.scanNodeAnnotation(bean, beanName);
return bean;
}
protected void scanNodeAnnotation(Object bean, String beanName){
this.configureMethodAction(bean);
}
private void configureMethodAction(Object bean){
Class<?> managedBeanClass = bean.getClass();
ReflectionUtils.MethodCallback methodCallback = new NodeMethodCallback(configurableListableBeanFactory, bean);
ReflectionUtils.doWithMethods(managedBeanClass, methodCallback);
}
}
I am not clear where to put MethodCallback to postProcessBeforeInitialization or postProcessAfterInitialization. In my thought, it would be in after since I am trying to manipulate parameter of the method
Finally, this is the MethodCallback - NodeMethodCallback.java
public class NodeMethodCallback implements ReflectionUtils.MethodCallback {
private Logger logger = LoggerFactory.getLogger(NodeMethodCallback.class);
private ConfigurableListableBeanFactory beanFactory;
private Object bean;
private static int AUTOWIRE_MODE = AutowireCapableBeanFactory.AUTOWIRE_BY_NAME;
public NodeMethodCallback(ConfigurableListableBeanFactory beanFactory, Object bean) {
this.beanFactory = beanFactory;
this.bean = bean;
}
#Override
public void doWith(Method method) throws IllegalArgumentException, IllegalAccessException {
logger.info("doWith method info :: " + String.valueOf(bean) + "." + bean.getClass().getName());
/*
What I expected is Printing ArtistNodeRepository Class with create Method
But It prints something like ...
SessionFlashMapManager
DefaultRequestToViewNameTranslator
...
*/
try {
logger.info("When I call you :: " + method.getName()); // I expect method which contains #CreateNode annotation, but it is not ...
Annotation[] methodAnnotations = method.getDeclaredAnnotations();
boolean isTarget = false;
String tid = "";
for(Annotation anno : methodAnnotations) {
logger.info("annotation Class :: " + anno.getClass().getName());
if(isTarget) break;
if(anno instanceof CreateNode) {
logger.info("CreateNode annotation found");
CreateNode createNode = method.getDeclaredAnnotation(CreateNode.class);
tid = createNode.tid();
isTarget = true;
}
}
if(!isTarget) return;
ReflectionUtils.makeAccessible(method);
/*
Do Somthing with Parameter ...
Do Somthing with Parameter ...
Do Somthing with Parameter ...
Do Somthing with Parameter ...
Do Somthing with Parameter ...
*/
} catch (Exception e ){
logger.error("ERROR", e);
}
}
}
The problem is ... in doWith I could not find ArtistNodeRepository instance.
What should I do with MethodCallback and BeanPostProcessor to achieve the goal?
Good sample codes would be nice as well as good answers.
i think you misunderstood the useage of ReflectionUtils.doWithMethods. it means iterate the class method if match callback. rather than when invoke method callback.
public static void doWithMethods(Class<?> clazz, MethodCallback mc, MethodFilter mf) {
// Keep backing up the inheritance hierarchy.
Method[] methods = getDeclaredMethods(clazz);
for (Method method : methods) {
if (mf != null && !mf.matches(method)) {
continue;
}
try {
mc.doWith(method);
}
catch (IllegalAccessException ex) {
throw new IllegalStateException("Not allowed to access method '" + method.getName() + "': " + ex);
}
}
if (clazz.getSuperclass() != null) {
doWithMethods(clazz.getSuperclass(), mc, mf);
}
else if (clazz.isInterface()) {
for (Class<?> superIfc : clazz.getInterfaces()) {
doWithMethods(superIfc, mc, mf);
}
}
}
i think you can use aspect. like this.
#Around("execution(public * org.springframework.data.jpa.repository.JpaRepository+.*(..))")
// #Around("#annotation(Repository)")
public Object around(ProceedingJoinPoint joinPoint) throws Throwable {
MethodSignature signature = (MethodSignature) joinPoint.getSignature();
Method method = signature.getMethod();
CreateNode createNode = method.getAnnotation(CreateNode.class);
if(createNode != null) {
Object[] args = joinPoint.getArgs();
// do your business
}
return joinPoint.proceed();
}
hope can help you

Categories