JPA / Hibernate - persist enums as table of constants - java

Currently, I'm using the standard way to map enums with Hibernate, e.g.
#Entity
public class Job {
#Enumerated(EnumType.STRING)
protected State state;
}
public enum State{
NEW,OLD;
}
Now the requirements changed, and I have to create a table State, which contains all the valid values of my enum as String constants. Thus Job must refer to the State table. I don't have to migrate legacy data.
What options do I have to map this with JPA / Hibernate?
Is it possible, to let Hibernate create the State table with values ( 1->"new", 2->"old").
during DDL generation?

in ended up with a solution, which generates the DDL including enum constants and foreign key constraints.
e.g.
#Entity
public enum MyEnum{
#EnumValue
private String name;
#Id
private int id;
}
#Entity
public class MyEntity {
#EnumReference
protected MyEnum myEnum;
}
is sufficient with the following MetadataContributor (/src/main/resources/META-INF/services/org.hibernate.boot.spi.MetadataContributor):
public class EnumConstantsMetadataContributor implements MetadataContributor {
private final static Logger LOG = LoggerFactory.getLogger(EnumConstantsMetadataContributor.class);
private final static List<String> ENABLED_ON = Arrays.asList("validate", "update", "create", "create-drop");
private final static Integer DEFAULT_VARCHAR_SIZE = 255;
private final static Identifier DEFAULT_COLUMN_NAME = Identifier.toIdentifier("enum_constant", false);
#Override
public void contribute(InFlightMetadataCollector metadataCollector, IndexView jandexIndex) {
if (shouldRun(metadataCollector)) {
addEnumsAsTableConstantsAndFkConstraint(metadataCollector);
}
}
private boolean shouldRun(InFlightMetadataCollector metadataCollector) {
StandardServiceRegistry serviceRegistry = metadataCollector.getMetadataBuildingOptions().getServiceRegistry();
ConfigurationService config = serviceRegistry.getService(ConfigurationService.class);
String setting = config.getSetting(AvailableSettings.HBM2DDL_AUTO, String.class, null);
return (setting != null || ENABLED_ON.contains(setting));
}
private void addEnumsAsTableConstantsAndFkConstraint(InFlightMetadataCollector metadataCollector) {
for (PersistentClass persistentClass : metadataCollector.getEntityBindings()) {
Class<?> plainJavaClass = persistentClass.getMappedClass();
if (Enum.class.isAssignableFrom((plainJavaClass))) {
createEnumInsertsAndDbColumns(persistentClass, plainJavaClass, metadataCollector);
}
tryAddFkConstraint(persistentClass, metadataCollector);
}
}
private void tryAddFkConstraint(PersistentClass persistentClass, InFlightMetadataCollector metadataCollector) {
Consumer<Field> createEnumFkConstraintForField = field -> {
String fieldName = field.getName();
PersistentClass targetPersistentClass = metadataCollector.getEntityBinding(field.getType().getCanonicalName());
if (targetPersistentClass == null) {
LOG.error("Target (enum) class must be an #Entity: {}", field.getType().getCanonicalName());
System.exit(1);
}
Property enumReferenceAnnotatedProperty = persistentClass.getProperty(fieldName);
persistentClass.getTable().createForeignKey(null,
Arrays.asList(enumReferenceAnnotatedProperty.getColumnIterator().next()),
targetPersistentClass.getEntityName());
};
Field[] declaredFields = persistentClass.getMappedClass().getDeclaredFields();
of(declaredFields).filter(field -> field.isAnnotationPresent(EnumReference.class)).forEach(
createEnumFkConstraintForField);
}
private void createEnumInsertsAndDbColumns(PersistentClass persistentClass, Class<?> clazz,
InFlightMetadataCollector metadata) {
String tableName = persistentClass.getTable().getName();
Enum<?>[] enumJavaConstants = clazz.asSubclass(Enum.class).getEnumConstants();
ArrayList<String> insertCommandAccumulator = new ArrayList<String>(enumJavaConstants.length);
Optional<Field> enumValueAnnotatedField = of(enumJavaConstants.getClass().getComponentType().getDeclaredFields())
.filter(field -> field.isAnnotationPresent(EnumValue.class)).map(fieldWithEnumValue -> {
fieldWithEnumValue.setAccessible(true);
return fieldWithEnumValue;
}).findAny(); // just none or one is supported
if (enumValueAnnotatedField.isPresent()) {
setMinimalFieldLengthOfExitingColumn(enumValueAnnotatedField.get(), enumJavaConstants, persistentClass);
}
for (int i = 0; i < enumJavaConstants.length; i++) {
Enum<?> it = enumJavaConstants[i];
String constantEnumValue = enumValueAnnotatedField.map(v -> getInstanceValueOfEnumValueAnnotation(it, v))
.orElse(it.name());
if (!enumValueAnnotatedField.isPresent()) {
insertAdditionalColumn(persistentClass, metadata.getDatabase(), enumJavaConstants);
}
insertCommandAccumulator.add(createInsert(tableName, i, constantEnumValue));
}
InitCommand initCommand = new InitCommand(insertCommandAccumulator.toArray(new String[0]));
persistentClass.getTable().addInitCommand(initCommand);
}
private void setMinimalFieldLengthOfExitingColumn(Field field, Enum<?>[] enumJavaConstants,
PersistentClass persistentClass) {
Property property = persistentClass.getProperty(field.getName());
Column column = persistentClass.getTable().getColumn(Identifier.toIdentifier(property.getName()));
Integer maxLengthOfEnums = maxLengthOfEnums(enumJavaConstants,
e -> getInstanceValueOfEnumValueAnnotation(e, field));
column.setLength(maxLengthOfEnums);
}
private String getInstanceValueOfEnumValueAnnotation(Enum<?> myEnum, Field enumValueAnnotatedField) {
try {
return enumValueAnnotatedField.get(myEnum).toString();
} catch (Exception e) {
e.printStackTrace();
System.exit(1);
return null;
}
}
private static Integer maxLengthOfEnums(Enum<?>[] enums, Function<Enum<?>, String> enumConstantValueExtractor) {
return of(enums).map(it -> enumConstantValueExtractor.apply(it).length()).reduce(Math::max)
.orElse(DEFAULT_VARCHAR_SIZE);
};
private void insertAdditionalColumn(PersistentClass persistentClass, Database database, Enum<?>[] enumJavaConstants) {
Integer maxEnumStringLength = maxLengthOfEnums(enumJavaConstants, c -> c.name());
Column column = new Column(DEFAULT_COLUMN_NAME.render(database.getDialect()));
String typeName = database.getDialect().getTypeName(Types.VARCHAR, maxEnumStringLength, 0, 0);
column.setSqlType(typeName);
persistentClass.getTable().addColumn(column);
}
private String createInsert(String tableName, int position, String dbEnumValue) {
return ("insert into " + tableName + " values(" + position + ",\'" + dbEnumValue + "\')");
}
}
Works for MySQL 5.7 and Hibernate 5.
It is not possible to JPA query MyEnum and its consistency between #Enumerated(EnumType.ORDINAL) and getEnumConstants() order is implicitly assumed.

IMHO that does not have too much sense. The values of an enum are static and constant, while the values in a SQL table are dynamic. What if the DB does not contain exactly the values (no more, no less) for such enum?
EDIT: if you are forced to implement it, may something like this work?
public enum State{
int primaryKey;
NEW(0),OLD(1);
public State(int pk) {
primarykey = pk;
}
}
And then join by primary key....

Related

How to fix missing descriptor for class POJO after update server? [duplicate]

I'm using EclipseLink to run some Native SQL. I need to return the data into a POJO. I followed the instructions at EclipseLink Docs, but I receive the error Missing descriptor for [Class]
The query columns have been named to match the member variables of the POJO. Do I need to do some additional mapping?
POJO:
public class AnnouncementRecipientsFlattenedDTO {
private BigDecimal announcementId;
private String recipientAddress;
private String type;
public AnnouncementRecipientsFlattenedDTO() {
super();
}
public AnnouncementRecipientsFlattenedDTO(BigDecimal announcementId, String recipientAddress, String type) {
super();
this.announcementId = announcementId;
this.recipientAddress = recipientAddress;
this.type = type;
}
... Getters/Setters
Entity Manager call:
public List<AnnouncementRecipientsFlattenedDTO> getNormalizedRecipientsForAnnouncement(int announcementId) {
Query query = em.createNamedQuery(AnnouncementDeliveryLog.FIND_NORMALIZED_RECIPIENTS_FOR_ANNOUNCEMENT, AnnouncementRecipientsFlattenedDTO.class);
query.setParameter(1, announcementId);
return query.getResultList();
}
I found out you can put the results of a Native Query execution into a List of Arrays that hold Objects. Then one can iterate over the list and Array elements and build the desired Entity objects.
List<Object[]> rawResultList;
Query query =
em.createNamedQuery(AnnouncementDeliveryLog.FIND_NORMALIZED_RECIPIENTS_FOR_ANNOUNCEMENT);
rawResultList = query.getResultList();
for (Object[] resultElement : rawResultList) {
AnnouncementDeliveryLog adl = new AnnouncementDeliveryLog(getAnnouncementById(announcementId), (String)resultElement[1], (String)resultElement[2], "TO_SEND");
persistAnnouncementDeliveryLog(adl);
}
You can only use native SQL queries with a class if the class is mapped. You need to define the AnnouncementRecipientsFlattenedDTO class as an #Entity.
Otherwise just create the native query with only the SQL and get an array of the data back and construct your DTO yourself using the data.
Old question but may be following solution will help someone else.
Suppose you want to return a list of columns, data type and data length for a given table in Oracle. I have written below a native sample query for this:
private static final String TABLE_COLUMNS = "select utc.COLUMN_NAME, utc.DATA_TYPE, utc.DATA_LENGTH "
+ "from user_tab_columns utc "
+ "where utc.table_name = ? "
+ "order by utc.column_name asc";
Now the requirement is to construct a list of POJO from the result of above query.
Define TableColumn entity class as below:
#Entity
public class TableColumn implements Serializable {
#Id
#Column(name = "COLUMN_NAME")
private String columnName;
#Column(name = "DATA_TYPE")
private String dataType;
#Column(name = "DATA_LENGTH")
private int dataLength;
public String getColumnName() {
return columnName;
}
public void setColumnName(String columnName) {
this.columnName = columnName;
}
public String getDataType() {
return dataType;
}
public void setDataType(String dataType) {
this.dataType = dataType;
}
public int getDataLength() {
return dataLength;
}
public void setDataLength(int dataLength) {
this.dataLength = dataLength;
}
public TableColumn(String columnName, String dataType, int dataLength) {
this.columnName = columnName;
this.dataType = dataType;
this.dataLength = dataLength;
}
public TableColumn(String columnName) {
this.columnName = columnName;
}
public TableColumn() {
}
#Override
public int hashCode() {
int hash = 0;
hash += (columnName != null ? columnName.hashCode() : 0);
return hash;
}
#Override
public boolean equals(Object object) {
if (!(object instanceof TableColumn)) {
return false;
}
TableColumn other = (TableColumn) object;
if ((this.columnName == null && other.columnName != null) || (this.columnName != null && !this.columnName.equals(other.columnName))) {
return false;
}
return true;
}
#Override
public String toString() {
return getColumnName();
}
}
Now we are ready to construct a list of POJO. Use the sample code below to construct get your result as List of POJOs.
public List<TableColumn> findTableColumns(String table) {
List<TableColumn> listTables = new ArrayList<>();
EntityManager em = emf.createEntityManager();
Query q = em.createNativeQuery(TABLE_COLUMNS, TableColumn.class).setParameter(1, table);
listTables = q.getResultList();
em.close();
return listTables;
}
Also, don't forget to add in your POJO class in persistence.xml! It can be easy to overlook if you are used to your IDE managing that file for you.
Had the same kind of problem where I wanted to return a List of POJOs, and really just POJOs (call it DTO if you want) and not #Entity annotated Objects.
class PojoExample {
String name;
#Enumerated(EnumType.STRING)
SomeEnum type;
public PojoExample(String name, SomeEnum type) {
this.name = name;
this.type = type;
}
}
With the following Query:
String query = "SELECT b.name, a.newtype as type FROM tablea a, tableb b where a.tableb_id = b_id";
Query query = getEntityManager().createNativeQuery(query, "PojoExample");
#SuppressWarnings("unchecked")
List<PojoExample> data = query.getResultList();
Creates the PojoExample from the database without the need for an Entity annotation on PojoExample. You can find the method call in the Oracle Docs here.
edit:
As it turns out you have to use #SqlResultSetMapping for this to work, otherwise your query.getResultList() returns a List of Object.
#SqlResultSetMapping(name = "PojoExample",
classes = #ConstructorResult(columns = {
#ColumnResult(name = "name", type = String.class),
#ColumnResult(name = "type", type = String.class)
},
targetClass = PojoExample.class)
)
Just put this anywhere under your #Entity annotation (so in this example either in tablea or tableb because PojoExample has no #Entity annotation)

JPA Stored Procedure result set mappings and NonUniqueResultException

I'm fairly new to JPA and am trying to use a stored procedure to run a query and map its results to my java classes.
Here are the tables
CREATE TABLE dbo.Branding
(
Branding_ID INT IDENTITY NOT NULL
CONSTRAINT PK_Branding PRIMARY KEY CLUSTERED,
BrandingType_ID INT,
Reseller_ID INT NULL,
Host VARCHAR(MAX) NULL
)
CREATE TABLE dbo.BrandingResource
(
BrandingResource_ID INT IDENTITY NOT NULL
CONSTRAINT PK_BrandingResource PRIMARY KEY CLUSTERED,
Branding_ID INT NOT NULL,
Name VARCHAR(255) NOT NULL,
[Value] VARCHAR(MAX) NOT NULL
)
CREATE TABLE dbo.BrandingType
(
BrandingType_ID INT IDENTITY NOT NULL
CONSTRAINT PK_BrandingType PRIMARY KEY CLUSTERED,
Description VARCHAR(255)
)
Here are the entities:
#Table(name = "[Branding]")
#Entity
public class Branding extends CommonDomainBase
{
#Id
#Column(name = "branding_id")
private int id;
#OneToOne(optional = false)
#JoinColumn(name = "brandingtype_id", nullable = false)
private BrandingType type;
#OneToMany(fetch = FetchType.EAGER)
#JoinColumn(name = "branding_id", referencedColumnName = "branding_id")
private Set<BrandingResource> brandingResources;
#Column(name = "reseller_id", nullable = true)
private Integer resellerId;
#Column(name = "host", nullable = true)
private String host;
}
#Table(name = "[BrandingResource]")
#Entity
public class BrandingResource extends CommonDomainBase
{
#Id
#Column(name = "BrandingResource_Id")
private int id;
#Column(name = "Name")
private String name;
#Column(name = "Value")
private String value;
}
#Table(name = "[BrandingType]")
#Entity
public class BrandingType extends CommonDomainBase
{
#Id
#Column(name = "brandingtype_id")
private int id;
#Column(name = "description")
private String description;
}
I already know that the annotations on the entities are working correctly. When I use Spring Data JPA repositories to query the 3 tables to find one or find all of Branding, I get a generated query which retrieves all 3 tables in a single query.
I am now trying to extends this to allow me to do the same sort of result set mapping using a named stored procedure which I've configured in the following way:
#NamedStoredProcedureQuery(name = "Branding.getBrandingByHost", procedureName = "spGetBrandingByHost", parameters =
{ #StoredProcedureParameter(mode = ParameterMode.IN, name = "host", type = String.class) }, resultSetMappings =
{ "BrandingResults" })
#SqlResultSetMapping(name = "BrandingResults", entities =
{ #EntityResult(entityClass = Branding.class) })
The stored procedure is returning duplicate rows for each row in the branding table, due to the one to many relationship to BrandingResource.
The result set mapping when using the Spring Data JPA repository and it's generated query has duplicate rows in the same way as my procedure, and is able to handle this perfectly when mapping to the objects. When using the named stored procedure however I get the following exception:
javax.persistence.NonUniqueResultException: Call to stored procedure [spGetBrandingByHost] returned multiple results
I understand that I will probably need to include more result set mappings for this to work, but cannot find a example which demonstrates anything similar. Is what I'm after even possible?
Thanks in advance
In answer to my own question, no you can't. Which does make sense. When automatically generating queries, hibernate what column names to expect in the result set, including any that are duplicated from a one to many/many to one relationship. A stored procedure can return any column that hibernate does not know to expect, so setting them explicitly is required.
After much digging I did find the class org.hibernate.cfg.annotations.ResultsetMappingSecondPass which is called to map JPA annotations to native hibernate a org.hibernate.engine.ResultSetMappingDefinition and, reading the source code, I can see it completely ignores most of the annotations for columns, and joining.
It would be great if #NamedStoredProcedureQuery could support one to many/many to one joins. For now I've created my own solution:
public class EntityResultSetSecondPass implements QuerySecondPass
{
private static final String ALIAS = EntityResultSetSecondPass.class.getName() + "_alias";
private final InFlightMetadataCollector metadataCollector;
private int entityAliasIndex;
private final Map<Class<?>, String> aliasMap = new ConcurrentHashMap<>();
public EntityResultSetSecondPass(final InFlightMetadataCollector metadataCollector)
{
this.metadataCollector = metadataCollector;
}
#Override
public void doSecondPass(final Map persistentClasses) throws MappingException
{
for (final Object key : persistentClasses.keySet())
{
final String className = key.toString();
try
{
final Class<?> clazz = Class.forName(className);
final EntityResultSet entityResultSet = clazz.getDeclaredAnnotation(EntityResultSet.class);
if (entityResultSet == null)
{
continue;
}
else
{
createEntityResultDefinition(entityResultSet, clazz);
}
}
catch (final ClassNotFoundException e)
{
throw new HibernateException(e);
}
}
}
private void createEntityResultDefinition(final EntityResultSet entityResultSet, final Class<?> entityClass)
throws ClassNotFoundException
{
final List<NativeSQLQueryReturn> mappedReturns = new ArrayList<>();
final ResultSetMappingDefinition definition = new ResultSetMappingDefinition(entityResultSet.name());
final Map<Class<?>, FieldResult[]> returnedEntities = new ConcurrentHashMap<>();
returnedEntities.put(entityClass, entityResultSet.fields());
for (final EntityResult entityResult : entityResultSet.relatedEntities())
{
returnedEntities.put(entityResult.entityClass(), entityResultSet.fields());
}
definition.addQueryReturn(new NativeSQLQueryRootReturn(getOrCreateAlias(entityClass), entityClass.getName(),
getPropertyResults(entityClass, entityResultSet.fields(), returnedEntities, mappedReturns, ""),
LockMode.READ));
for (final EntityResult entityResult : entityResultSet.relatedEntities())
{
definition
.addQueryReturn(
new NativeSQLQueryRootReturn(getOrCreateAlias(entityResult.entityClass()),
entityResult.entityClass().getName(), getPropertyResults(entityResult.entityClass(),
entityResult.fields(), returnedEntities, mappedReturns, ""),
LockMode.READ));
}
for (final NativeSQLQueryReturn mappedReturn : mappedReturns)
{
definition.addQueryReturn(mappedReturn);
}
metadataCollector.addResultSetMapping(definition);
}
private Map<String, String[]> getPropertyResults(final Class<?> entityClass, final FieldResult[] fields,
final Map<Class<?>, FieldResult[]> returnedEntities, final List<NativeSQLQueryReturn> mappedReturns,
final String prefix) throws ClassNotFoundException
{
final Map<String, String[]> properties = new ConcurrentHashMap<>();
for (final Field field : entityClass.getDeclaredFields())
{
final Column column = field.getAnnotation(Column.class);
if (column != null)
{
properties.put(prefix + field.getName(), new String[]
{ column.name() });
}
final JoinColumn joinColumn = field.getAnnotation(JoinColumn.class);
if (joinColumn != null)
{
properties.putAll(handleJoinColumn(entityClass, field, joinColumn, returnedEntities, mappedReturns));
}
}
if (entityClass.getSuperclass() != null)
{
properties.putAll(
getPropertyResults(entityClass.getSuperclass(), fields, returnedEntities, mappedReturns, prefix));
}
return properties;
}
private Map<String, String[]> handleJoinColumn(final Class<?> sourceEntity, final Field field,
final JoinColumn joinColumn, final Map<Class<?>, FieldResult[]> returnedEntities,
final List<NativeSQLQueryReturn> mappedReturns) throws ClassNotFoundException
{
final Map<String, String[]> properties = new ConcurrentHashMap<>();
final OneToOne oneToOne = field.getAnnotation(OneToOne.class);
if (oneToOne != null)
{
properties.put(field.getName(), new String[]
{ joinColumn.name() });
}
final OneToMany oneToMany = field.getAnnotation(OneToMany.class);
if (oneToMany != null)
{
Class<?> fieldType;
if (field.getType().isArray())
{
fieldType = field.getType();
}
else if (Collection.class.isAssignableFrom(field.getType()))
{
fieldType = Class.forName(
ParameterizedType.class.cast(field.getGenericType()).getActualTypeArguments()[0].getTypeName());
}
else
{
throw new UnsupportedOperationException("One to many only supports collection and array types");
}
if (returnedEntities.keySet().contains(fieldType))
{
properties.put(field.getName(), new String[]
{ joinColumn.name() });
final Map<String, String[]> resolvedProperties = getPropertyResults(fieldType,
returnedEntities.get(fieldType), returnedEntities, mappedReturns, "element.");
resolvedProperties.put("key", new String[]
{ joinColumn.referencedColumnName() });
resolvedProperties.put("element", new String[]
{ joinColumn.name() });
mappedReturns.add(new NativeSQLQueryCollectionReturn(getOrCreateAlias(fieldType),
sourceEntity.getName(), field.getName(), resolvedProperties, LockMode.READ));
mappedReturns
.add(new NativeSQLQueryJoinReturn(getOrCreateAlias(fieldType),
getOrCreateAlias(sourceEntity), field.getName(), getPropertyResults(fieldType,
returnedEntities.get(fieldType), returnedEntities, mappedReturns, ""),
LockMode.READ));
}
}
return properties;
}
private String getOrCreateAlias(final Class<?> entityClass)
{
if (!aliasMap.containsKey(entityClass))
{
aliasMap.put(entityClass, ALIAS + entityAliasIndex++);
}
return aliasMap.get(entityClass);
}
}
and the accompanying annotation:
#Target(ElementType.TYPE)
#Retention(RetentionPolicy.RUNTIME)
public #interface EntityResultSet
{
/**
* The name of the result set
*
* #return
*/
String name();
/**
* The {#link FieldResult} to override those of the {#link Column}s on the
* current {#link Entity}
*
* #return
*/
FieldResult[] fields() default {};
/**
* The {#link EntityResult} that define related {#link Entity}s that are
* included in this result set.
*
* </p>Note: discriminatorColumn has no impact in this usage
*
* #return
*/
EntityResult[] relatedEntities() default {};
}
This is all registered with hibernate via a MetadataContributor
The code is a bit of a mess, but it is actually working. It basically looks for #EntityResultSet in which the entities for a particular result set are defined. The EntityResultSetSecondPass looks at these given entities and generates a ResultSetMappingDefinition which includes all the joining meta data for collection mapping. It runs from all the standard column annotations but can be overridden with FieldResult defined in #EntityResultSet
It seems a bit nasty, but it's working nicely.

java cassandra object mapping annotations

Can you provide an example for mapping collections using datastax api annotations to use Map.
class pojo {
#PartitionKey(value = 0)
#Column(name = "user_id")
String userId;
#Column(name = "attributes")
// How to map it
Map<String, String> attributes;
}
error log :
2015-08-03 16:33:34,568 INFO com.jpma.jpmc.slot.persistance.DAOFactory main - Cassandra Cluster Details: ConnectionCfg [userName=test, password=test, port=9042, seeds=[Ljava.lang.String;#1a85bd75, keySpace=test]
java.lang.Class
2015-08-03 16:33:34,646 DEBUG com.datastax.driver.mapping.EntityMapper main - Preparing query INSERT INTO "test"."user_event_date"("user_id","entry_date","entry_time","app","attributes","user_ip","user_authschemes") VALUES (?,?,?,?,?,?,?);
com.datastax.driver.core.exceptions.InvalidQueryException: Unknown identifier attributes
Based on the error message you are seeing, I'm guessing that attributes is not defined in your table definition. Would you mind editing your post with that?
But when I build my CQL table like this (note the compound partition key of itemid and version):
CREATE TABLE products.itemmaster (
itemid text,
version int,
productid uuid,
supplierskumap map<uuid, text>,
PRIMARY KEY ((itemid,version), productid)
);
...insert this row:
INSERT INTO products.itemmaster (itemid,version,productid,supplierskumap)
VALUES ('item1',1,26893749-dcfc-42c7-892c-bee8c9cff630,
{1351f82f-5dc5-4328-82f4-962429c92a2b:'86CCG123'});
...and I build my POJO like this:
#Table(keyspace = "products", name = "itemmaster")
public class Product {
#PartitionKey(0)
private String itemid;
#PartitionKey(1)
private int version;
#ClusteringColumn
private UUID productid;
#Column(name="supplierskumap")
private Map<UUID,String> suppliersku;
public UUID getProductid() {
return productid;
}
public void setProductid(UUID _productid) {
this.productid = _productid;
}
public int getVersion() {
return this.version;
}
public void setVersion(int _version)
{
this.version = _version;
}
public String getItemid() {
return itemid;
}
public void setItemid(String _itemid) {
this.itemid = _itemid;
}
public Map<UUID, String> getSuppliersku() {
return suppliersku;
}
public void setSuppliersku(Map<UUID, String> _suppliersku) {
this.suppliersku = _suppliersku;
}
}
...with this constructor and getProd method on my data access object (dao):
public ProductsDAO()
{
session = connect(CASSANDRA_NODES, USERNAME, PASSWORD);
prodMapper = new MappingManager(session).mapper(Product.class);
}
public Product getProd(String itemid, int version, UUID productid) {
return prodMapper.get(itemid,version,sku);
}
...then this main class successfully queries my table and maps my Map:
private static void main(String[] args) {
ProductsDAO dao = new ProductsDAO();
Product prod = dao.getProd("item1", 1, UUID.fromString("26893749-dcfc-42c7-892c-bee8c9cff630"));
System.out.println(
prod.getProductid() + " - " +
prod.getItemid() + " - " +
prod.getSuppliersku().get(UUID.fromString("1351f82f-5dc5-4328-82f4-962429c92a2b")));
dao.closeCassandra();
}
...and produces this output:
26893749-dcfc-42c7-892c-bee8c9cff630 - item1 - 86CCG123
NOTE: Edit made to the above example to support a compound partition key.

Hibernate 4.0: javax.persistence.criteria.Path.get fails for when field of a composite key is specified

Calling javax.persistence.criteria.Path.get(String name) fails for the simple class hierarchy detailed below. The call succeeds if #IdClass and 2nd id field (i.e. id2) are removed. Anyone know why this is so. Does this mean it is not possible to query on a single id field where that id field forms part of a composite key?
failing call is: Path<Object> path = entity.get(name);
private static final EntityManager em;
private final CriteriaBuilder cb = em.getCriteriaBuilder();
private final CriteriaQuery<Y> query = cb.createQuery(Y.class);
private final Root<Y> entity = query.from(Y.class);
static {
Map<String, Object> properties = new HashMap<String, Object>();
// initialise properties appropriately
EntityManagerFactory emf =
Persistence.createEntityManagerFactory("test", properties);
em = emf.createEntityManager();
}
interface PK {
Object getPK();
}
public static class YPK implements Serializable {
int id;
int id2;
YPK(int id, int id2) { }
// Override hashCode() and equals() appropriately
}
#IdClass(YPK.class)
#Entity
public static class Y implements Serializable, PK {
#Id
int id;
#Id
int id2;
protected Y() { }
public Y(int id) {
this.id = id;
}
#Override
public Object getPK() {
return id;
}
}
#Test
public void simpleTest() {
List<Y> yy = new ArrayList<Y>();
Y yX1 = new Y(5);
yy.add(yX1);
Y yX2 = new Y(6);
yy.add(yX2);
saveItems(yy);
String name = "id";
Path<Object> path = entity.get(name);
Predicate restriction = cb.conjunction();
restriction = cb.and(restriction, cb.and(new Predicate[]{cb.equal(path, 5)}));
TypedQuery<Y> tq = em.createQuery(this.query);
Y result = null;
try {
result = tq.getSingleResult();
} catch (NoResultException e) {
}
assertNotNull(result);
}
To access fields that are members of the IdClass you need to use metamodel.
I suggest to go with static metamodel, because it is cleaner and kind of type safe. You can generate it with the tools or write it by yourself. For class Y it will be something like:
import javax.persistence.metamodel.MapAttribute;
import javax.persistence.metamodel.SingularAttribute;
import javax.persistence.metamodel.StaticMetamodel;
#StaticMetamodel(Y.class)
public abstract class Y_ {
public static volatile SingularAttribute<Y, Integer> id;
public static volatile SingularAttribute<Y, Integer> id2;
// + similar definitions for other fields:
// <TYPE_OF_ENTITY, TYPE_OF_ATTRIBUTE> NAME_OF_FIELD_IN_ENTITY
}
Then you can use IdClass fields in criteria query:
Path<Integer> pathToId = entity.get(Entity2_.id);
Path<Integer> pathToId2 = entity.get(Entity2_.id2);
If you don't want to generate static metamodel, then there is still following rather bad way to access attributes of id:
//find set of all attribute that form id
Metamodel mm = em.getMetamodel();
EntityType et = mm.entity(Y.class);
Set<SingularAttribute> idAttributes = et.getIdClassAttributes();
SingularAttribute idAttribute = null;
//and pick out right one from [id, id2]
for (SingularAttribute candidate : idAttributes) {
if (candidate.getName().equals("id")) {
idAttribute = candidate;
break;
}
}
Path<Integer> path = entity.get(idAttribute);

Mapping array with Hibernate

Can you please help me to map this class using Hibernate?
public class MyClass{
private Long id;
private String name;
private int[] values;
...
}
I'm using PostgreSQL and the column type in the table is integer[]
How my array should be mapped?
Hibernate (and JPA) can't directly map the PostgreSQL array type. See this question for how to proceed if you really need to retain your database structure as it is. This thread has an example of the required custom type.
If you can change your schema, you can let hibernate create an additional table to handle the collection - List<Integer>. Then, depending on the version of hibernate you are using:
JPA 2.0 compliant - use #ElementCollection
JPA 1.0 compliant - use #CollectionOfElements
Hibernate can map only the primitive types. Check under the org.hibernate.type folder of hibernate jar package. int array is not one of them. So you would have to write a custom type that can implement the UserType interface.
public class MyClass{
private Long id;
private String name;
private Integer[] values;
#Type(type = "com.usertype.IntArrayUserType")
public Integer[] getValues(){
return values;
}
public void setValues(Integer[] values){
this.values = values;
}
}
IntArrayUserType.class
package com.usertype.IntArrayUserType;
public class IntArrayUserType implements UserType {
protected static final int[] SQL_TYPES = { Types.ARRAY };
#Override
public Object assemble(Serializable cached, Object owner) throws HibernateException {
return this.deepCopy(cached);
}
#Override
public Object deepCopy(Object value) throws HibernateException {
return value;
}
#Override
public Serializable disassemble(Object value) throws HibernateException {
return (Integer[]) this.deepCopy(value);
}
#Override
public boolean equals(Object x, Object y) throws HibernateException {
if (x == null) {
return y == null;
}
return x.equals(y);
}
#Override
public int hashCode(Object x) throws HibernateException {
return x.hashCode();
}
#Override
public boolean isMutable() {
return true;
}
#Override
public Object nullSafeGet(ResultSet resultSet, String[] names, SessionImplementor session, Object owner)
throws HibernateException, SQLException {
if (resultSet.wasNull()) {
return null;
}
if(resultSet.getArray(names[0]) == null){
return new Integer[0];
}
Array array = resultSet.getArray(names[0]);
Integer[] javaArray = (Integer[]) array.getArray();
return javaArray;
}
#Override
public void nullSafeSet(PreparedStatement statement, Object value, int index, SessionImplementor session)
throws HibernateException, SQLException {
Connection connection = statement.getConnection();
if (value == null) {
statement.setNull(index, SQL_TYPES[0]);
} else {
Integer[] castObject = (Integer[]) value;
Array array = connection.createArrayOf("integer", castObject);
statement.setArray(index, array);
}
}
#Override
public Object replace(Object original, Object target, Object owner) throws HibernateException {
return original;
}
#Override
public Class<Integer[]> returnedClass() {
return Integer[].class;
}
#Override
public int[] sqlTypes() {
return new int[] { Types.ARRAY };
}
When you query for the MyClass entity you can add something like this:
Type intArrayType = new TypeLocatorImpl(new TypeResolver()).custom(IntArrayUserType.class);
Query query = getSession().createSQLQuery("select values from MyClass")
.addScalar("values", intArrayType);
List<Integer[]> results = (List<Integer[]>) query.list();
Maven dependency
The first thing you need to do is to set up the following Hibernate Types Maven dependency in your project pom.xml configuration file:
<dependency>
<groupId>com.vladmihalcea</groupId>
<artifactId>hibernate-types-52</artifactId>
<version>${hibernate-types.version}</version>
</dependency>
Maven ARRAY columns
Assuming you have this table in your database:
create table event (
id int8 not null,
version int4,
sensor_names text[],
sensor_values integer[],
primary key (id)
)
And you want to map it like this:
#Entity(name = "Event")
#Table(name = "event")
#TypeDefs({
#TypeDef(
name = "string-array",
typeClass = StringArrayType.class
),
#TypeDef(
name = "int-array",
typeClass = IntArrayType.class
)
})
public static class Event extends BaseEntity {
#Type( type = "string-array" )
#Column(
name = "sensor_names",
columnDefinition = "text[]"
)
private String[] sensorNames;
#Type( type = "int-array" )
#Column(
name = "sensor_values",
columnDefinition = "integer[]"
)
private int[] sensorValues;
//Getters and setters omitted for brevity
}
The string-array and int-array are custom types which can be defined in the BaseEntity superclass:
#TypeDefs({
#TypeDef(
name = "string-array",
typeClass = StringArrayType.class
),
#TypeDef(
name = "int-array",
typeClass = IntArrayType.class
)
})
#MappedSuperclass
public class BaseEntity {
#Id
private Long id;
#Version
private Integer version;
//Getters and setters omitted for brevity
}
The StringArrayType and IntArrayType are classes offered by the Hibernate Types project.
Testing time
Now, when you insert a couple of entities;
Event nullEvent = new Event();
nullEvent.setId(0L);
entityManager.persist(nullEvent);
Event event = new Event();
event.setId(1L);
event.setSensorNames(
new String[] {
"Temperature",
"Pressure"
}
);
event.setSensorValues(
new int[] {
12,
756
}
);
entityManager.persist(event);
Hibernate is going to generate the following SQL statements:
INSERT INTO event (
version,
sensor_names,
sensor_values,
id
)
VALUES (
0,
NULL(ARRAY),
NULL(ARRAY),
0
)
INSERT INTO event (
version,
sensor_names,
sensor_values,
id
)
VALUES (
0,
{"Temperature","Pressure"},
{"12","756"},
1
)
I have never mapped arrays to hibernate. I always use collections. So, I have slightly changed you class:
public class MyClass{
private Long id;
private String name;
private List<Integer> values;
#Id
// this is only if your id is really auto generated
#GeneratedValue(strategy=GenerationType.AUTO)
public Long getId() {
return id;
}
#OneToMany(cascade=CascadeType.ALL, fetch=FetchType.LAZY)
public List<Integer> getValues() {
return values;
}
...
From Hibernate 6.1 Final, basic arrays and collections may now be mapped to database ARRAY types if possible, or alternatively JSON/XML types.
https://in.relation.to/2022/06/14/orm-61-final/

Categories