Example usage for org.hibernate Session setCacheMode

List of usage examples for org.hibernate Session setCacheMode

Introduction

In this page you can find the example usage for org.hibernate Session setCacheMode.

Prototype

void setCacheMode(CacheMode cacheMode);

Source Link

Document

Set the cache mode.

Usage

From source file:org.alfresco.repo.workflow.jbpm.JBPMEngine.java

License:Open Source License

@SuppressWarnings("unchecked")
public List<WorkflowTask> getStartTasks(final List<String> workflowInstanceIds, final boolean sameSession) {
    try {/*from  ww w  .  j  ava  2 s . c o  m*/
        return (List<WorkflowTask>) jbpmTemplate.execute(new JbpmCallback() {
            public Object doInJbpm(JbpmContext context) {
                List<Long> jbpmIds = new ArrayList<Long>(workflowInstanceIds.size());
                Set<String> startTaskNames = new TreeSet<String>();
                for (String workflowInstanceId : workflowInstanceIds) {
                    // retrieve process instance
                    GraphSession graphSession = context.getGraphSession();
                    ProcessInstance processInstance = getProcessInstanceIfExists(graphSession,
                            workflowInstanceId);
                    if (processInstance != null) {
                        jbpmIds.add(processInstance.getId());
                        Task startTask = processInstance.getProcessDefinition().getTaskMgmtDefinition()
                                .getStartTask();
                        startTaskNames.add(startTask.getName());
                    }
                }

                if (jbpmIds.isEmpty()) {
                    return Collections.emptyList();
                }

                // retrieve tasks
                Session session = context.getSession();
                Criteria taskCriteria = session.createCriteria(TaskInstance.class);
                taskCriteria.add(Restrictions.in("name", startTaskNames));
                Criteria process = taskCriteria.createCriteria("processInstance");
                process.add(Restrictions.in("id", jbpmIds));

                // Bypass the cache making sure not to flush it
                CacheMode cacheMode = session.getCacheMode();
                try {
                    session.setCacheMode(CacheMode.GET);
                    List<TaskInstance> tasks = process.list();
                    return getWorkflowTasks(tasks, sameSession);
                } finally {
                    session.setCacheMode(cacheMode);
                }
            }
        });
    } catch (JbpmException e) {
        String msg = messageService.getMessage(ERR_QUERY_TASKS, workflowInstanceIds);
        throw new WorkflowException(msg, e);
    }
}

From source file:org.castafiore.persistence.LoadHibernateSessionFilter.java

License:Open Source License

public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain)
        throws IOException, ServletException {

    SessionFactory factory = SpringUtil.getBeanOfType(SessionFactory.class);
    Map castaSession = SpringUtil.getBean("casta_session");
    Session s = null;
    if (!castaSession.containsKey("hibernate.session")) {
        s = factory.openSession();/*w  ww . j av  a  2s.  c  o m*/
        s.setFlushMode(FlushMode.COMMIT);
        s.setCacheMode(CacheMode.IGNORE);
        castaSession.put("hibernate.session", s);

    } else {
        s = (Session) castaSession.get("hibernate.session");
    }

    //t = null;
    try {
        if (t == null) {
            t = s.beginTransaction();
            creatorHash = request.hashCode();
        }

        if (!t.isActive()) {
            t.begin();
            creatorHash = request.hashCode();
        }

        chain.doFilter(request, response);

        //s.flush();
        if (t.isActive()) {
            if (creatorHash == request.hashCode()) {
                s.flush();
                t.commit();
                s.clear();

            }
        }

    } catch (Exception e) {
        e.printStackTrace();
        if (t != null && t.isActive()) {
            if (creatorHash == request.hashCode()) {
                t.rollback();
                s.clear();
            }
        }

        if (s != null) {

        }
    }

}

From source file:org.compass.gps.device.hibernate.indexer.PaginationHibernateIndexEntitiesIndexer.java

License:Apache License

public void performIndex(CompassSession session, IndexEntity[] entities) {
    for (IndexEntity entity : entities) {
        EntityInformation entityInfo = (EntityInformation) entity;
        int fetchCount = device.getFetchCount();
        int current = 0;
        while (true) {
            if (!device.isRunning()) {
                return;
            }//  w  w  w  . ja  va 2 s  .  c  o  m
            Session hibernateSession = device.getSessionFactory().openSession();
            hibernateSession.setCacheMode(CacheMode.IGNORE);
            Transaction hibernateTransaction = null;
            try {
                hibernateTransaction = hibernateSession.beginTransaction();
                if (log.isDebugEnabled()) {
                    log.debug(device.buildMessage("Indexing entity [" + entityInfo.getName() + "] range ["
                            + current + "-" + (current + fetchCount) + "]"));
                }
                List values;
                Criteria criteria = entityInfo.getQueryProvider().createCriteria(hibernateSession, entityInfo);
                if (criteria != null) {
                    criteria.setFetchSize(device.getFetchCount());
                    criteria.setFirstResult(current);
                    criteria.setMaxResults(fetchCount);
                    values = criteria.list();
                } else {
                    Query query = entityInfo.getQueryProvider().createQuery(hibernateSession, entityInfo)
                            .setFirstResult(current).setMaxResults(fetchCount);
                    values = query.list();
                }
                for (Object value : values) {
                    session.create(value);
                }
                session.evictAll();
                hibernateTransaction.commit();
                session.close();
                current += fetchCount;
                if (values.size() < fetchCount) {
                    break;
                }
            } catch (Exception e) {
                log.error(device.buildMessage("Failed to index the database"), e);
                if (hibernateTransaction != null) {
                    try {
                        hibernateTransaction.rollback();
                    } catch (Exception e1) {
                        log.warn("Failed to rollback Hibernate", e1);
                    }
                }
                if (!(e instanceof HibernateGpsDeviceException)) {
                    throw new HibernateGpsDeviceException(device.buildMessage("Failed to index the database"),
                            e);
                }
                throw (HibernateGpsDeviceException) e;
            } finally {
                hibernateSession.close();
            }
        }
    }
}

From source file:org.compass.gps.device.hibernate.indexer.ScrollableHibernateIndexEntitiesIndexer.java

License:Apache License

public void performIndex(CompassSession session, IndexEntity[] entities) {
    for (IndexEntity entity : entities) {
        EntityInformation entityInformation = (EntityInformation) entity;
        if (device.isFilteredForIndex(entityInformation.getName())) {
            continue;
        }// w  w w  .  j  a v a 2s  .  co m
        if (!device.isRunning()) {
            return;
        }
        ScrollableResults cursor = null;
        Session hibernateSession = device.getSessionFactory().openSession();
        hibernateSession.setCacheMode(CacheMode.IGNORE);
        Transaction hibernateTransaction = null;
        try {
            hibernateTransaction = hibernateSession.beginTransaction();
            if (log.isDebugEnabled()) {
                log.debug(device.buildMessage("Indexing entities [" + entityInformation.getName()
                        + "] using query [" + entityInformation.getQueryProvider() + "]"));
            }

            Criteria criteria = entityInformation.getQueryProvider().createCriteria(hibernateSession,
                    entityInformation);
            if (criteria != null) {
                if (performOrderById) {
                    Boolean performOrder = performOrderByPerEntity.get(entityInformation.getName());
                    if (performOrder == null || performOrder) {
                        ClassMetadata metadata = hibernateSession.getSessionFactory()
                                .getClassMetadata(entityInformation.getName());
                        String idPropName = metadata.getIdentifierPropertyName();
                        if (idPropName != null) {
                            criteria.addOrder(Order.asc(idPropName));
                        }
                    }
                }
                criteria.setFetchSize(device.getFetchCount());
                cursor = criteria.scroll(ScrollMode.FORWARD_ONLY);
            } else {
                Query query = entityInformation.getQueryProvider().createQuery(hibernateSession,
                        entityInformation);
                cursor = query.scroll(ScrollMode.FORWARD_ONLY);
            }

            // store things in row buffer to allow using batch fetching in Hibernate
            RowBuffer buffer = new RowBuffer(session, hibernateSession, device.getFetchCount());
            Object prev = null;
            while (true) {
                try {
                    if (!cursor.next()) {
                        break;
                    }
                } catch (ObjectNotFoundException e) {
                    continue;
                }
                Object item = cursor.get(0);
                if (prev != null && item != prev) {
                    buffer.put(prev);
                }
                prev = item;
                if (buffer.shouldFlush()) {
                    // put also the item/prev since we are clearing the session
                    // in the flush process
                    buffer.put(prev);
                    buffer.flush();
                    prev = null;
                }
            }
            if (prev != null) {
                buffer.put(prev);
            }
            buffer.close();
            cursor.close();

            hibernateTransaction.commit();
        } catch (Exception e) {
            log.error(device.buildMessage("Failed to index the database"), e);
            if (cursor != null) {
                try {
                    cursor.close();
                } catch (Exception e1) {
                    log.warn(device.buildMessage("Failed to close cursor on error, ignoring"), e1);
                }
            }
            if (hibernateTransaction != null) {
                try {
                    hibernateTransaction.rollback();
                } catch (Exception e1) {
                    log.warn("Failed to rollback Hibernate", e1);
                }
            }
            if (!(e instanceof HibernateGpsDeviceException)) {
                throw new HibernateGpsDeviceException(device.buildMessage("Failed to index the database"), e);
            }
            throw (HibernateGpsDeviceException) e;
        } finally {
            hibernateSession.close();
            session.close();
        }
    }
}

From source file:org.gbif.portal.dao.impl.hibernate.SimpleQueryDAOImpl.java

License:Open Source License

/**
 * @see org.gbif.portal.dao.SimpleQueryDAO#outputResultsForQuery(java.lang.String, java.util.List, java.lang.Integer, java.lang.Integer)
 *///ww w  .j  a v a  2 s  .  c o  m
public void outputResultsForQuery(final String queryString, final List<Object> parameters,
        final Integer startIndex, final Integer maxResults, final ResultsOutputter resultsOutputter)
        throws IOException {

    Session session = getSession();
    session.setCacheMode(CacheMode.IGNORE);

    if (logger.isDebugEnabled())
        logger.debug("getByQuery queryString " + queryString);
    Query query = createQuery(queryString, parameters, startIndex, maxResults, session);
    DAOUtils.scrollResults(resultsOutputter, session, query, associationTraverser, batchSize);
}

From source file:org.hyperic.hq.events.server.session.EventLogDAO.java

License:Open Source License

/**
 * Insert the event logs in batch, with batch size specified by the
 * <code>hibernate.jdbc.batch_size</code> configuration property.
 * //w  w w .  j a v a2 s  .  co  m
 * @param eventLogs The event logs to insert.
 */
void insertLogs(EventLog[] eventLogs) {
    Session session = getSession();

    FlushMode flushMode = session.getFlushMode();
    CacheMode cacheMode = session.getCacheMode();

    try {
        session.setFlushMode(FlushMode.MANUAL);

        // We do not want to update the 2nd level cache with these event
        // logs
        session.setCacheMode(CacheMode.IGNORE);

        for (int i = 0; i < eventLogs.length; i++) {
            create(eventLogs[i]);
        }

        session.flush();
        session.clear();
    } finally {
        session.setFlushMode(flushMode);
        session.setCacheMode(cacheMode);
    }
}

From source file:org.life.sl.importers.CalculateODMatrix.java

License:Open Source License

public CalculateODMatrix() {
    Timer timer = new Timer();
    timer.init();/* ww w . j a  va 2s .  co  m*/
    ids_edges = this.loadEdgesFromOSM();
    timer.getRunTime(true, "Edges read from database: " + ids_edges.size());
    ids_nodes = this.loadNodesFromOSM();
    timer.getRunTime(true, "Nodes read from database: " + ids_nodes.size());

    psg = new PathSegmentGraph(1); // 1 = read from database...
    timer.getRunTime(true, "PathSegmentGraph initialized");
    psg.calculateDistances();

    //HashMap<Node, HashMap<Node, Double>> distances = psg.getAPSDistances();
    float dist[][] = psg.getAPSDistancesArr();//new double[nodes.size()][nodes.size()];

    Session session = HibernateUtil.getSessionFactory().getCurrentSession();
    // first, empty the database table:
    session.beginTransaction();
    session.setCacheMode(CacheMode.IGNORE);
    int nDel = session.createQuery("delete ShortestPathLength").executeUpdate();
    session.flush();
    //session.getTransaction().commit();
    System.out.println("Deleted " + nDel + " records from shortestpathlength");

    //session.beginTransaction();

    int osmNodeID1 = 0, osmNodeID2 = 0;
    float length = 0.f;
    //HashMap<Node, Double> hm1;

    System.out.println("Starting database export...");
    Integer batchSize = Integer.getInteger(new Configuration().getProperty("hibernate.jdbc.batch_size"), 30);
    System.out.println("Database batch size: " + batchSize);

    timer.init(2.5, 50.);
    double nn = dist.length * dist.length / 2; // approximate number of steps
    long n = 0, nc = 0;
    for (int i = 0; i < dist.length - 1; i++) { // outer loop over all nodes
        osmNodeID1 = i;//getOSMNodeIDForNode(n1);
        if (osmNodeID1 >= 0) { // check if node exists in OSM network at all...
            for (int j = i + 1; j < dist.length; j++) { // inner loop over all nodes
                if (i != j) { // no connection to self!
                    length = dist[i][j]; // the path length
                    if (length > 1.e-8 && length < .5f * Float.MAX_VALUE) { // ignore 0 (= self) and infinity (= no connection)
                        osmNodeID2 = j;//getOSMNodeIDForNode(n2);

                        // store length(n1, n2)
                        if (osmNodeID2 >= 0) {
                            // TODO: can this be optimized by reusing sPl1 instead of creating it (new)?
                            ShortestPathLength sPl1 = new ShortestPathLength(osmNodeID1, osmNodeID2, length);
                            session.save(sPl1);
                            if (++n % batchSize == 0) {
                                session.flush();
                                session.clear();
                            }

                            // the same path in reverse direction: not necessary
                            /*ShortestPathLength sPl2 = new ShortestPathLength(osmNodeID2, osmNodeID1, length);
                            session.save(sPl2);*/
                        }
                    }
                    nc++;
                }
            }
        }
        timer.showProgress(nc / nn);
    }
    //      for (Node n1 : distances.keySet()) {   // outer loop over all nodes
    //         osmNodeID1 = getOSMNodeIDForNode(n1);
    //         if (osmNodeID1 != 0) {            // check if node exists in OSM network at all...
    //            hm1 = distances.get(n1);      // a HashMap<Node, Double>
    //            for (Node n2 : hm1.keySet()) {   // inner loop over all nodes
    //               if (n2 != n1) {            // no connection to self!
    //                  length = hm1.get(n2);   // the path length
    //                  if (length > 1.e-8 && length < 1.e100) {   // ignore 0 (= self) and infinity (= no connection)
    //                     osmNodeID2 = getOSMNodeIDForNode(n2);
    //
    //                     // store length(n1, n2)
    //                     if (osmNodeID2 != 0) {
    //                        ShortestPathLength sPl1 = new ShortestPathLength(osmNodeID1, osmNodeID2, length);
    //                        session.save(sPl1);
    //                        
    //                        // the same path in reverse direction: not necessary
    //                        /*ShortestPathLength sPl2 = new ShortestPathLength(osmNodeID2, osmNodeID1, length);
    //                        session.save(sPl2);*/
    //                     }
    //                  }
    //               }
    //            }
    //         }
    //      }
    session.getTransaction().commit(); // TODO: complete the transaction in the outer loop above, to prevent it from getting too big?
    timer.getRunTime(true, "... finished");
    System.out.println("YEAH !");

}

From source file:org.life.sl.importers.ShapeFileImporter.java

License:Open Source License

public void dumpToPostgresql() {
    ArrayList<Node> nodes = psg.getNodes();
    Iterator<Node> iter = nodes.iterator();

    session = HibernateUtil.getSessionFactory().getCurrentSession();
    session.beginTransaction();/*from   w  w  w. j a  va 2  s  . c  om*/
    Session session = HibernateUtil.getSessionFactory().getCurrentSession();

    // first, empty the database table:
    session.beginTransaction();
    session.setCacheMode(CacheMode.IGNORE);
    int nDel = session.createQuery("delete OSMNode").executeUpdate();
    session.flush();
    logger.info("Deleted " + nDel + " records from OSMNode");
    nDel = session.createQuery("delete OSMEdge").executeUpdate();
    session.flush();
    logger.info("Deleted " + nDel + " records from OSMEdge");

    logger.info("Writing nodes...");
    Integer batchSize = Integer.getInteger(new Configuration().getProperty("hibernate.jdbc.batch_size"), 50);
    logger.info("Database batch size: " + batchSize);

    Timer timer = new Timer();
    timer.init();

    int nNodes = 0; // will be used as node ID
    double nNodesMax = nodes.size();
    HashMap<Node, Integer> node__nodeId = new HashMap<Node, Integer>();
    while (iter.hasNext()) {
        Node n = iter.next();
        nNodes++;

        OSMNode osmNode = new OSMNode();
        osmNode.setGeometry(fact.createPoint(n.getCoordinate()));
        // @SuppressWarnings("unchecked")
        // HashMap<String, Integer> data = (HashMap<String, Integer>) n.getData();
        // int id = data.get("id");
        osmNode.setId(nNodes); //  todo set this properly

        node__nodeId.put(n, nNodes);
        session.save(osmNode);
        if (nNodes % batchSize == 0) {
            session.flush();
            session.clear();
        }
        timer.showProgress((double) nNodes / nNodesMax);
    }
    if (nNodes % batchSize == 0) {
        session.flush();
        session.clear();
    }
    System.out.println();

    Collection<Edge> edges = psg.getEdges();
    Iterator<Edge> iter2 = edges.iterator();

    logger.info("Writing edges...");
    int nEdges = 0; // will be used as edge ID
    double nEdgesMax = edges.size();

    while (iter2.hasNext()) {
        Edge e = iter2.next();
        nEdges++;

        OSMEdge osmEdge = new OSMEdge();
        @SuppressWarnings("unchecked")
        HashMap<String, Object> data2 = (HashMap<String, Object>) e.getData();
        Object geom = data2.get("geometry");

        /*Object bctyp_s = data2.get("BICYCLETYP");
        if (bctyp_s != null) {
           Integer bint = new Integer((Integer) bctyp_s);
           Short bicycle = bint.shortValue();
           osmEdge.setBicycletype(bicycle);
        }
                
        Object cycleway_s = data2.get("CYCLEWAYTY");
        if (cycleway_s != null) {
           Integer cwint = new Integer((Integer) cycleway_s);
           Short cycleway = cwint.shortValue();
           osmEdge.setCyclewaytype(cycleway);
        }
                
        Object foottype_s = data2.get("FOOTTYPE");
        if (foottype_s != null) {
           Integer fint = new Integer((Integer) foottype_s);
           Short foot = fint.shortValue();
           osmEdge.setFoottype(foot);
        }
                
        Object hwt_s = data2.get("HIGHWAYTYP");
        if (hwt_s != null) {
           Integer hwt = new Integer((Integer) hwt_s);
           Short highway = hwt.shortValue();
           osmEdge.setHighwaytype(highway);
        }
                
        Object sgrd_s = data2.get("SEGREGATED");
        if (sgrd_s != null) {
           Integer sgrd = new Integer((Integer) sgrd_s);
           Short segregated = sgrd.shortValue();
           osmEdge.setSegregatedtype(segregated);
        }
                
        Object roadname_o = data2.get("ROADNAME");
        if (roadname_o != null) {
           String roadname = (String) roadname_o;
           osmEdge.setRoadname(roadname);
        }*/

        // The HSP-specific stuff

        Integer envtp = new Integer((Integer) data2.get("EnvType"));
        Short envType = envtp.shortValue();

        Integer cyktp = new Integer((Integer) data2.get("CykType"));
        Short cykType = cyktp.shortValue();

        Double groenPct = new Double((Double) data2.get("GroenPct"));
        Double groenM = new Double((Double) data2.get("GroenM"));

        LineString ls = (LineString) geom;

        osmEdge.setGeometry(ls);
        osmEdge.setId(nEdges);
        osmEdge.setEnvtype(envType);
        osmEdge.setCyktype(cykType);
        osmEdge.setGroenpct(groenPct.floatValue());
        osmEdge.setGroenm(groenM.floatValue());

        Node from_node = e.getDirEdge(0).getFromNode();
        Node to_node = e.getDirEdge(0).getToNode();

        Integer from_node_id = node__nodeId.get(from_node);
        Integer to_node_id = node__nodeId.get(to_node);

        osmEdge.setFromnode(from_node_id);
        osmEdge.setTonode(to_node_id);
        osmEdge.setLength((float) ls.getLength());

        session.save(osmEdge);
        if (nEdges % batchSize == 0) {
            session.flush();
            session.clear();
        }
        timer.showProgress((double) nEdges / nEdgesMax);
    }
    if (nEdges % batchSize == 0) {
        session.flush();
        session.clear();
    }
    System.out.println();

    session.getTransaction().commit();
    logger.info("Imported " + nNodes + " nodes, " + nEdges + " edges");
    timer.getRunTime(true, "... import finished");
}

From source file:org.openmrs.module.idcards.db.hibernate.HibernateIdcardsDAO.java

License:Open Source License

/**
 * @see org.openmrs.module.idcards.db.IdcardsDAO#generateIdentifiers(int, int)
 *//*from   w w w. java  2s.c  om*/
public void generateIdentifiers(int identifierStartValue, int quantityToGenerate) throws DAOException {
    Date generatedDate = new Date();
    String date = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(generatedDate);
    Integer currentUserId = Context.getAuthenticatedUser().getUserId();

    Session currentSession = sessionFactory.getCurrentSession();
    currentSession.setCacheMode(CacheMode.IGNORE);

    String insertPrefix = "insert into idcards_generated_identifier (id, generator, date_generated) values ";
    String valuesPrefix = "(";
    String valuesSuffix = ", " + currentUserId + ", '" + date + "')";

    // if we're in mysql, do extended inserts to speed things up
    SessionFactoryImplementor implementor = (SessionFactoryImplementor) sessionFactory;
    Dialect dialect = implementor.getDialect();
    boolean isMySQLDialect = MySQLDialect.class.getName().equals(dialect.getClass().getName());

    if (isMySQLDialect) {
        String sql = null;
        // loop over the list of numbers and get/insert the string identifier
        for (int x = identifierStartValue; x < identifierStartValue + quantityToGenerate; x++) {
            if (sql == null) // if its not the first element, add a comma
                sql = insertPrefix;
            else
                sql += ",";

            sql += valuesPrefix + x + valuesSuffix;

            // send to the database every 1001 entries or at the end 
            if (x % 100 == 0 || x == (identifierStartValue + quantityToGenerate - 1)) {
                try {
                    SQLQuery query = currentSession.createSQLQuery(sql);
                    query.executeUpdate();
                    sql = null; // reset the sql string
                    currentSession.flush();
                } catch (ConstraintViolationException cve) {
                    log.error("Sql: " + sql);
                    throw new DAOException("Error creating an identifier between " + x + " and " + (x - 1001)
                            + " because it already exists in the system", cve);
                }
            }
        }

    } else {
        for (int x = identifierStartValue; x < identifierStartValue + quantityToGenerate; x++) {
            String sql = insertPrefix + valuesPrefix + x + valuesSuffix;

            try {
                SQLQuery query = currentSession.createSQLQuery(sql);
                query.executeUpdate();
            } catch (ConstraintViolationException cve) {
                throw new DAOException(
                        "Unable to create identifier: " + x + " because it already exists in the system", cve);
            }

            // control the number of objects in memory
            if (x % 500 == 0 || x == (identifierStartValue + quantityToGenerate - 1)) {
                currentSession.flush();
                currentSession.clear();
            }
        }
    }

}

From source file:org.opentox.toxotis.persistence.db.DeleteTool.java

License:Open Source License

private synchronized Session getSession() {
    Session session = null;
    if (local != null) {
        session = local.get();/*w w w  .ja v a2  s  .  c o m*/
        if (session == null) {
            session = HibernateUtil.getSessionFactory().openSession();
            session.setCacheMode(CacheMode.IGNORE);
        }
        local.set(session);
    } else {
        session = HibernateUtil.getSessionFactory().openSession();
        session.setCacheMode(CacheMode.IGNORE);
    }
    return session;
}