javax.persistence.PersistenceException: org.hibernate.TransactionException: Already have an associated managed connection
Caused by: org.hibernate.TransactionException: Already have an associated managed connection
I am using parallelStream().foreach() Works on multithreading concept :
private void loadHcpDataIntoCassandra(ResultSet srcHcpData) throws SQLException {
List<Map<String, Object>> hcpDataList = resultSetToList(srcHcpData);
Cluster cluster = HcpDao.getCassandraConnection();
List<String> tblSpecs = HcpDao.getTableSpecs();
Session session = cluster.connect(tblSpecs.get(0));
//Call prepareHcpAndLoad() method :-
hcpDataList.parallelStream().forEach(hcpMap -> prepareHcpAndLoad(hcpMap, session));
cluster.close();
}
I got above mention Exception, and i replace parallelStream().forEach to
for (Map<String,Object> hcpMap : hcpDataList) {
prepareHcpAndLoad(hcpMap, session);
}
The enhance for loop is perfectly work for me. But I need multi-thread concept. How to solve this problem even i use parallelStream().foreach()
private static void prepareHcpAndLoad(Map<String, Object> hcpMap, Session session) {
String hcpHceId = "";
for (Map.Entry<String, Object> entry : hcpMap.entrySet()) {
String colName = entry.getKey();
Object hcpTableRow = entry.getValue();
hcpMasterData.setHcpHceId(hcpTableRow.toString());
hcpHceId = hcpTableRow.toString();
}
/** Get MDM_id */
MdmId mdm = new MdmId();
mdm.setHcpHceId(hcpHceId);
String mdmId = getMdmId(mdm);
/** update mdmId */
hcpMasterData.setMdmId(mdmId);
mapper.save(hcpMasterData);
}
//#PersistenceContext
#PersistenceContext(type = PersistenceContextType.TRANSACTION)
private static EntityManager em = getEntityManager();
public static String getMdmId(MdmId mdm) {
if(em == null) {
em = getEntityManager();
}
String mdmId = "";
EntityTransaction tr = em.getTransaction();
try {
tr.begin(); //Error Line
em.persist(mdm);
em.flush();
mdmId = Long.toString(mdm.getId());
tr.commit();
} catch (Exception error) {
logger.error(error.getMessage());
error.printStackTrace();
}
return mdmId;
}
private static EntityManager getEntityManager() {
return Persistence.createEntityManagerFactory("sql-connection").createEntityManager();
}
Related
I am using Hibernate and I have Many methods of this type:
public static void modifySchemeEvents(String schmCode, String username) {
Session session = HibernateUtil.getSessionFactory().openSession();
Transaction tx = null;
try {
tx = session.beginTransaction();
Criteria cr = session.createCriteria(SchmEventsTable.class);
cr.add(Restrictions.eq("schmCode", schmCode));
SchmEventsTable evt = (SchmEventsTable) cr.uniqueResult();
evt.setLchgUserId(username);
tx.commit();
} catch (HibernateException asd) {
log.debug(asd.getMessage());
if (tx != null) {
tx.rollback();
}
} finally {
session.close();
}
}
This updated the database with the username where schmCode is <schmCode> I am attempting to convert to generic so that I do not have to write a separate method for all updates so I have come up with this generic method:
public static <T> T getUpdateObject(Class c, Map<String, ?> params) {
Session session = HibernateUtil.getSessionFactory().openSession();
Transaction tx = null;
T value = null;
try {
tx = session.beginTransaction();
Criteria cr = session.createCriteria(c);
for (Map.Entry<String, ?> entry : params.entrySet()) {
cr.add(Restrictions.eq(entry.getKey(), entry.getValue()));
}
value = (T) cr.uniqueResult();
////How to set the values here?***
tx.commit();
} catch (HibernateException asd) {
log.debug(asd.getMessage());
if (tx != null) {
tx.commit();
}
} finally {
session.close();
}
return value;
}
I am attempting to use it this way:
Map<String, String> schemeEventMap = new HashMap();
schemeEventMap.put("schmCode", "CA201");
SchmEventsTable evt = DataOperation.getUpdateObject(SchmEventsTable.class, schemeEventMap);
evt.setLchgUserId(username);
This does not update the table like the first method. I am wondering how to pass the parameters in the generic Method Dynamically.
Itry to aggregate data from a file in HDFS.
I need to add some details from those datas with value on a specific Table in hbase.
but I have the exception :
org.apache.spark.SparkException: Task not serializable
at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:166)
at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:158)
at org.apache.spark.SparkContext.clean(SparkContext.scala:1623)
at org.apache.spark.rdd.RDD.map(RDD.scala:286)
at org.apache.spark.api.java.JavaRDDLike$class.mapToPair(JavaRDDLike.scala:113)
at org.apache.spark.api.java.AbstractJavaRDDLike.mapToPair(JavaRDDLike.scala:46)
at ......
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:497)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:577)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:174)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:197)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:112)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.io.NotSerializableException: org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation
Serialization stack:
at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:38)
at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:47)
at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:80)
at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:164)
I know that the problem occured when we try to access to the hbase during the map function.
My question is: how to complete my RDDs with the value contains on the hbase Table.
for example:
file in hdfs are csv:
Name;Number1;Number2
toto;1;2
in hbase we have data associate to the name toto.
i need to retrieve the sum of Number1 and Number 2 (that the easiest part)
and aggregate with the data in the table.
for example:
the key for the reducer will be tata and be retrieve by get the rowkey toto in the hbase table.
Any suggestions?
Finally a colleague did it, thanks to yours advice:
so this is the code of the map that permits to aggregate a file with datas from the hbase table.
private final Logger LOGGER = LoggerFactory.getLogger(AbtractGetSDMapFunction.class);
/**
* Namespace name
*/
public static final String NAMESPACE = "NameSpace";
private static final String ID = "id";
private Connection connection = null;
private static final String LINEID = "l";
private static final String CHANGE_LINE_ID = "clid";
private static final String CHANGE_LINE_DATE = "cld";
private String constClientPortHBase;
private String constQuorumHBase;
private int constTimeOutHBase;
private String constZnodeHBase;
public void initConnection() {
Configuration conf = HBaseConfiguration.create();
conf.setInt("timeout", constTimeOutHBase);
conf.set("hbase.zookeeper.quorum", constQuorumHBase);
conf.set("hbase.zookeeper.property.clientPort", constClientPortHBase);
conf.set("zookeeper.znode.parent", constZnodeHBase);
try {
connection = HConnectionManager.createConnection(conf);
} catch (Exception e) {
LOGGER.error("Error in the configuration of the connection with HBase.", e);
}
}
public Tuple2<String, myInput> call(String row) throws Exception {
//this is where you need to init the connection for hbase to avoid serialization problem
initConnection();
....do your work
State state = getCurrentState(myInput.getKey());
....do your work
}
public AbtractGetSDMapFunction( String constClientPortHBase, String constQuorumHBase, String constZnodeHBase, int constTimeOutHBase) {
this.constClientPortHBase = constClientPortHBase;
this.constQuorumHBase = constQuorumHBase;
this.constZnodeHBase = constZnodeHBase;
this.constTimeOutHBase = constTimeOutHBase;
}
/***************************************************************************/
/**
* Table Name
*/
public static final String TABLE_NAME = "Table";
public state getCurrentState(String key) throws TechnicalException {
LOGGER.debug("start key {}", key);
String buildRowKey = buildRowKey(key);
State currentState = new State();
String columnFamily = State.getColumnFamily();
if (!StringUtils.isEmpty(buildRowKey) && null != columnFamily) {
try {
Get scan = new Get(Bytes.toBytes(buildRowKey));
scan.addFamily(Bytes.toBytes(columnFamily));
addColumnsToScan(scan, columnFamily, ID);
Result result = getTable().get(scan);
currentState.setCurrentId(getLong(result, columnFamily, ID));
} catch (IOException ex) {
throw new TechnicalException(ex);
}
LOGGER.debug("end ");
}
return currentState;
}
/***********************************************************/
private Table getTable() throws IOException, TechnicalException {
Connection connection = getConnection();
// Table retrieve
if (connection != null) {
Table table = connection.getTable(TableName.valueOf(NAMESPACE, TABLE_NAME));
return table;
} else {
throw new TechnicalException("Connection to Hbase not available");
}
}
/****************************************************************/
private Long getLong(Result result, String columnFamily, String qualifier) {
Long toLong = null;
if (null != columnFamily && null != qualifier) {
byte[] value = result.getValue(Bytes.toBytes(columnFamily), Bytes.toBytes(qualifier));
toLong = (value != null ? Bytes.toLong(value) : null);
}
return toLong;
}
private String getString(Result result, String columnFamily, String qualifier) {
String toString = null;
if (null != columnFamily && null != qualifier) {
byte[] value = result.getValue(Bytes.toBytes(columnFamily), Bytes.toBytes(qualifier));
toString = (value != null ? Bytes.toString(value) : null);
}
return toString;
}
public Connection getConnection() {
return connection;
}
public void setConnection(Connection connection) {
this.connection = connection;
}
private void addColumnsToScan(Get scan, String family, String qualifier) {
if (org.apache.commons.lang.StringUtils.isNotEmpty(family) && org.apache.commons.lang.StringUtils.isNotEmpty(qualifier)) {
scan.addColumn(Bytes.toBytes(family), Bytes.toBytes(qualifier));
}
}
private String buildRowKey(String key) throws TechnicalException {
StringBuilder rowKeyBuilder = new StringBuilder();
rowKeyBuilder.append(HashFunction.makeSHA1Hash(key));
return rowKeyBuilder.toString();
}
I'm using this way to call the class and object still can't figure out what is wrong in getting the concept right
public class MMTUtil{
private static Map<String, String> domainDocumentationMap = null;
static{
domainDocumentationMap = new TreeMap<String, String>();
}
public static Map<String, String> getDomainDocumentationMap() {
return domainDocumentationMap;
}
public static void setDomainDocumentationMap(Map<String, String> domainDocumentationMap) {
MMTUtil.domainDocumentationMap = domainDocumentationMap;
}
How shall I use the setDomainDocumentation and getDomainDocumentation effectively with the code so that my Treemap has the following set of values
(objectType + objectname,DocumentationLink)
public UMRResultObject insertDocumentation(UMRDocumentationDTO documentationDTO)
{
Session session = UMRHibernateUtil.getUmrSession();
Transaction tx = null;
documentationLink = null;
objectName = null;
objectType = null;
try{
tx = session.beginTransaction();
dao.insertDocumentation(documentationDTO, session);
MMTUtil.getDomainDocumentationMap().put(objectName.getDomainName()+objectType.getDomainType(),documentationLink.getDocumentationLink());
tx.commit();
ro.setSuccess(true);
ro.getMessages().add("Record Inserted Successfully");
}
}
main()
{
createEntityManagerFactory(model.getDriver(), model.getUrl(), model.getUser(), model.getPassword());
}
#SuppressWarnings("unused")
public static void createEntityManagerFactory(String driver, String url, String username, String password) {
Map<String, String> persistenceMap = new HashMap<String, String>();
persistenceMap.put("javax.persistence.jdbc.driver", driver);
persistenceMap.put("javax.persistence.jdbc.url", url);
persistenceMap.put("javax.persistence.jdbc.user", username);
persistenceMap.put("javax.persistence.jdbc.password", password);
try {
if (entityManagerFactory == null)
entityManagerFactory = Persistence.createEntityManagerFactory("XYZAPI", persistenceMap);
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
public static EntityManagerFactory getEntityManagerFactory() {
return entityManagerFactory;
}
This is my code which call on server Startup.
And this my get Entity Manager function.
public static EntityManager getDBManager() {
EntityManager entityManager = null;
EntityManagerFactory entityManagerFactory =
Configuration.getEntityManagerFactory();
if(entityManagerFactory != null){
entityManager = entityManagerFactory.createEntityManager();
entityManager.getTransaction().begin();
}
return entityManager;
}
But when i hit first time then i am getting data and response able connect database but in second time when i hit then it give exception java.lang.IllegalStateException: EntityManagerFactory is closed
And i am getting Exception here
entityManager = entityManagerFactory.createEntityManager();
At this line please suggest me how to fix this issue i have to load mysql user id and password at server startup time .
While developing the backend for a web app, we used hibernate to handle the database. So, while testing, we had a test failing.
package com.app.db.hibernate.test;
import java.util.HashMap;
import com.app.db.hibernate.UserManager;
import com.app.db.utils.TestUtils;
import org.junit.*;
import static org.junit.Assert.*;
public class HibernateUserTest {
public final UserManager um = new UserManager();
public Integer id;
public HashMap<String, Object> userParams;
#Before
public void setup(){
this.userParams = TestUtils.loadParams();
this.id = this.um.agregar(this.userParams);
}
#Test
public void deleteUserTest(){
//Se elimina el usuario de la db:
um.deleteUser(userParams);
//Se comprueba que no esta:
Integer id_ = um.search(userParams);
assertNull(id_);
}
#After
public void cleanUp(){
um.reset();
}
}
So here we add a user to the database, call the deleteUser() method, and check if it works (search(userParams) should return null). The thing is that the test fails since, even though um.deleteUser(userParams) deletes the dummy user from the database, um.search(userParams) still "finds" it (by returning the id of the user just deleted, I checked that by debugging the code), I do not know from where, thus failing. Here's the code for delete() and search():
public void deleteUser(HashMap<String, Object> params){
Session sesion = sessionFactory.openSession();
try{
sesion.beginTransaction();
User u = (User)sesion.get(User.class, search(params));
sesion.delete(u);
sesion.getTransaction().commit();
}catch(HibernateException he){
he.printStackTrace();
}finally{sesion.close();}
}
public Integer search(HashMap<String, Object> params) {
Session sesion = sessionFactory.openSession();
Integer id = null;
try{
List lista = sesion.createCriteria(User.class).add(
Restrictions.eq("name",(String)params.get("name"))).list();
if(lista.size() == 1){id = ((User)lista.get(0)).getId();}
}catch(HibernateException he){
he.printStackTrace();
}finally{sesion.close();}
return id;
}
reset() just cleans the users table:
public void reset() {
Session session = sessionFactory.openSession();
session.beginTransaction();
Query q = session.createQuery("delete from User");
q.executeUpdate();
session.getTransaction().commit();
}
So, my question is: Is there something i'm missing here to make the test pass? Thanks in advance.
well, the trick was to add some lines, to get the transaction from the session, and commiting, to go from this:
public Integer search(HashMap<String, Object> params) {
Session sesion = sessionFactory.openSession();
Integer id = null;
try{
List lista = sesion.createCriteria(User.class).add(
Restrictions.eq("name",(String)params.get("name"))).list();
if(lista.size() == 1){id = ((User)lista.get(0)).getId();}
}catch(HibernateException he){
he.printStackTrace();
}finally{sesion.close();}
return id;
}
to this:
public Integer search(HashMap<String, Object> params) {
Session sesion = sessionFactory.openSession();
Integer id = null;
//get the transaction object:
Transaction tx = sesion.beginTransaction();
try{
//a new restriction added to the search:
Criteria criteria = sesion.createCriteria(User.class);
criteria.add(Restrictions.eq("name", (String)params.get("name")));
criteria.add(Restrictions.eq("email", (String)params.get("email")));
List lista = criteria.list();
//commit:
tx.commit();
//only one should be found, this is yet to be implemented:
if(lista.size() == 1){id = ((User)lista.get(0)).getId();}
}catch(HibernateException he){
//in case of an exception thrown while committing, roll back transaction:
tx.rollback();
he.printStackTrace();
}finally{sesion.close();}
return id;
}
That made the test pass :)