Example usage for org.hibernate CacheMode IGNORE

List of usage examples for org.hibernate CacheMode IGNORE

Introduction

In this page you can find the example usage for org.hibernate CacheMode IGNORE.

Prototype

CacheMode IGNORE

To view the source code for org.hibernate CacheMode IGNORE.

Click Source Link

Document

The session will never interact with the cache, except to invalidate cache items when updates occur.

Usage

From source file:com.openkm.servlet.admin.RebuildIndexesServlet.java

License:Open Source License

/**
 * FlushToIndexes implementation/*  www  .  ja  v a  2  s. c om*/
 */
@SuppressWarnings("rawtypes")
private void luceneIndexesFlushToIndexes(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    log.debug("luceneIndexesFlushToIndexes({}, {})", request, response);
    PrintWriter out = response.getWriter();
    response.setContentType(MimeTypeConfig.MIME_HTML);
    header(out, "Rebuild Lucene indexes", breadcrumb);
    out.flush();

    FullTextSession ftSession = null;
    Session session = null;
    Transaction tx = null;

    // Activity log
    UserActivity.log(request.getRemoteUser(), "ADMIN_FORCE_REBUILD_INDEXES", null, null, null);

    try {
        Config.SYSTEM_MAINTENANCE = true;
        Config.SYSTEM_READONLY = true;
        out.println("<ul>");
        out.println("<li>System into maintenance mode</li>");
        FileLogger.info(BASE_NAME, "BEGIN - Rebuild Lucene indexes");

        session = HibernateUtil.getSessionFactory().openSession();
        ftSession = Search.getFullTextSession(session);
        ftSession.setFlushMode(FlushMode.MANUAL);
        ftSession.setCacheMode(CacheMode.IGNORE);
        tx = ftSession.beginTransaction();
        Map<String, Long> total = new HashMap<String, Long>();

        // Calculate number of entities
        for (Class cls : classes) {
            String nodeType = cls.getSimpleName();
            out.println("<li>Calculate " + nodeType + "</li>");
            out.flush();
            long partial = NodeBaseDAO.getInstance().getCount(nodeType);
            FileLogger.info(BASE_NAME, "Number of {0}: {1}", nodeType, partial);
            out.println("<li>Number of " + nodeType + ": " + partial + "</li>");
            out.flush();
            total.put(nodeType, partial);
        }

        // Rebuild indexes
        out.println("<li>Rebuilding indexes</li>");
        out.flush();

        // Scrollable results will avoid loading too many objects in memory
        for (Class cls : classes) {
            String nodeType = cls.getSimpleName();
            out.println("<li>Indexing " + nodeType + "</li>");
            out.flush();

            ProgressMonitor monitor = new ProgressMonitor(out, nodeType, total.get(nodeType));
            ScrollableResults results = ftSession.createCriteria(cls)
                    .setFetchSize(Config.HIBERNATE_INDEXER_BATCH_SIZE_LOAD_OBJECTS)
                    .scroll(ScrollMode.FORWARD_ONLY);
            int index = 0;

            while (results.next()) {
                monitor.documentsAdded(1);
                ftSession.index(results.get(0)); // Index each element

                if (index++ % Config.HIBERNATE_INDEXER_BATCH_SIZE_LOAD_OBJECTS == 0) {
                    ftSession.flushToIndexes(); // Apply changes to indexes
                    ftSession.clear(); // Free memory since the queue is processed
                }
            }
        }

        tx.commit();

        Config.SYSTEM_READONLY = false;
        Config.SYSTEM_MAINTENANCE = false;
        out.println("<li>System out of maintenance mode</li>");
        out.flush();

        // Finalized
        FileLogger.info(BASE_NAME, "END - Rebuild Lucene indexes");
        out.println("<li>Index rebuilding completed!</li>");
        out.println("</ul>");
        out.flush();
    } catch (Exception e) {
        tx.rollback();
        out.println("<div class=\"warn\">Exception: " + e.getMessage() + "</div>");
        out.flush();
    } finally {
        Config.SYSTEM_READONLY = false;
        Config.SYSTEM_MAINTENANCE = false;
        HibernateUtil.close(ftSession);
        HibernateUtil.close(session);
    }

    // End page
    footer(out);
    out.flush();
    out.close();

    log.debug("luceneIndexesFlushToIndexes: void");
}

From source file:com.oracle.coherence.hibernate.cachestore.HibernateCacheLoader.java

License:CDDL license

/**
 * Load a collection of Hibernate entities given a set of ids (keys)
 *
 * @param keys  the cache keys; specifically, the entity ids
 *
 * @return      the corresponding Hibernate entity instances
 *//*from  www.  ja v  a  2  s .c  o m*/
public Map loadAll(Collection keys) {
    ensureInitialized();

    Map results = new HashMap();

    Transaction tx = null;

    Session session = openSession();
    SessionImplementor sessionImplementor = (SessionImplementor) session;

    try {
        tx = session.beginTransaction();

        // Create the query
        String sQuery = getLoadAllQuery();
        Query query = session.createQuery(sQuery);

        // Prevent Hibernate from caching the results
        query.setCacheMode(CacheMode.IGNORE);
        query.setCacheable(false);
        query.setReadOnly(true);

        // Parameterize the query (where :keys = keys)
        query.setParameterList(PARAM_IDS, keys);

        // Need a way to extract the key from an entity that we know
        // nothing about.
        ClassMetadata classMetaData = getEntityClassMetadata();

        // Iterate through the results and place into the return map
        for (Iterator iter = query.list().iterator(); iter.hasNext();) {
            Object entity = iter.next();
            Object id = classMetaData.getIdentifier(entity, sessionImplementor);
            results.put(id, entity);
        }

        tx.commit();
    } catch (Exception e) {
        if (tx != null) {
            tx.rollback();
        }

        throw ensureRuntimeException(e);
    } finally {
        closeSession(session);
    }

    return results;
}

From source file:com.rdsic.pcm.common.GenericHql.java

/**
 * Execute a hql query/*from   ww  w.j  a  va2 s .c  o m*/
 *
 * @param hql
 * @param params list of parameters and values: par1,val1,par2,val2,...
 * @return
 */
public List<T> query(String hql, Object... params) {
    Query q = HibernateUtil.currentSession().createQuery(hql);
    HibernateUtil.currentSession().setCacheMode(CacheMode.IGNORE);
    if (params != null) {
        for (int i = 0; i < params.length; i++) {
            q.setParameter(params[i].toString(), params[++i]);
        }
    }
    return q.list();
}

From source file:com.rdsic.pcm.common.GenericHql.java

/**
 * Execute a native SQL query//ww  w  .ja  va 2s  .co m
 *
 * @param sql The SQL select query
 * @param maxRow Limit the number of rows to be returned
 * @param params List of parameters follow by name,value
 * @return
 */
public List<Map<String, T>> querySQL(String sql, int maxRow, Object... params) {
    Query q = HibernateUtil.currentSession().createSQLQuery(sql);
    q.setResultTransformer(AliasToEntityMapResultTransformer.INSTANCE);
    q.setMaxResults(maxRow);

    HibernateUtil.currentSession().setCacheMode(CacheMode.IGNORE);

    if (params != null) {
        for (int i = 0; i < params.length; i++) {
            q.setParameter(params[i].toString(), params[++i]);
        }
    }
    return q.list();
}

From source file:com.rdsic.pcm.common.GenericHql.java

/**
 * Execute an update SQL, including INSERT/UPDATE/DELETE
 *
 * @param sql/* w  w w . j  ava2 s .  c  o  m*/
 * @param autoCommit
 * @param params
 * @return
 */
public int updateSQL(String sql, boolean autoCommit, Object... params) {
    Query q = HibernateUtil.currentSession().createSQLQuery(sql);

    HibernateUtil.currentSession().setCacheMode(CacheMode.IGNORE);

    if (params != null) {
        for (int i = 0; i < params.length; i++) {
            q.setParameter(params[i].toString(), params[++i]);
        }
    }

    if (autoCommit) {
        HibernateUtil.beginTransaction();
    }
    int numOfRecords = 0;
    try {
        numOfRecords = q.executeUpdate();
        if (autoCommit) {
            HibernateUtil.commit();
        }
    } catch (Exception e) {
        if (autoCommit) {
            HibernateUtil.rollback();
        }
        throw e;
    }
    return numOfRecords;
}

From source file:com.rdsic.pcm.common.HibernateUtil.java

public static Session currentSession() {
    initSessionFactory("hibernate.cfg.xml");
    Session s = (Session) session.get();
    if ((s == null) || (!s.isConnected())) {
        s = sessionFactory.openSession();
        s.setFlushMode(FlushMode.COMMIT);
        s.setCacheMode(CacheMode.IGNORE);
        session.set(s);//  w w  w .java2  s .  c o m
    }
    return s;
}

From source file:com.rdsic.pcm.common.HibernateUtil.java

public static Session currentSession(String cfgFile) {
    initSessionFactory(cfgFile);//from w  ww  .  ja  va 2s  .  c o  m
    Session s = (Session) session.get();
    if ((s == null) || (!s.isConnected())) {
        s = sessionFactory.openSession();
        s.setFlushMode(FlushMode.COMMIT);
        s.setCacheMode(CacheMode.IGNORE);
        session.set(s);
    }
    return s;
}

From source file:com.sos.hibernate.layer.SOSHibernateDBLayer.java

License:Apache License

private void initSessionEx() throws Exception {
    session = SosHibernateSession.getInstance(SosHibernateSession.configurationFile);
    if (session == null) {
        String s = String.format("Could not initiate session for database using file %s",
                SosHibernateSession.configurationFile);
        throw new Exception(s);
    } else {//from w w  w.j a  v a  2s . c  om
        session.setCacheMode(CacheMode.IGNORE);
    }

}

From source file:com.square.core.agent.RebuildingIndexAgentJmxThead.java

License:Open Source License

/**
 * Lancement indexation manuelle sur requete.
 */// ww w .j a  v a2s.  c o  m
private void runManualIndexer(Session session) {
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    try {
        fullTextSession.setFlushMode(FlushMode.MANUAL);
        fullTextSession.setCacheMode(CacheMode.IGNORE);
        final Transaction transaction = fullTextSession.beginTransaction();
        // Scrollable results will avoid loading too many objects in memory
        final ScrollableResults results = fullTextSession.createQuery(agent.getRequete())
                .setFetchSize(agent.getBatchSizeToLoad()).scroll(ScrollMode.FORWARD_ONLY);
        int index = 0;
        while (results.next()) {
            index++;
            logger.debug(agent.getMessageSourceUtil().get(AgentJmxKeyUtil.MESSAGE_INDEXATION_DE) + " "
                    + results.get(0) + " (id = " + ((BaseModel) results.get(0)).getId() + ")");
            fullTextSession.index(results.get(0)); // index each element
            if (index % agent.getBatchSizeToLoad() == 0) {
                fullTextSession.flushToIndexes(); // apply changes to indexes
                fullTextSession.clear(); // free memory since the queue is processed
            }
        }
        transaction.commit();
    } catch (SearchException e) {
        e.printStackTrace();
    }
}

From source file:com.square.core.agent.RebuildingIndexAgentJmxThead.java

License:Open Source License

/**
 * Lancement indexation de masse./*from   w  w  w  . jav a  2  s.  c  o m*/
 */
private void runMassIndexer(Session session, List<Class<? extends ModelData>> entities) {
    FullTextSession fullTextSession = null;
    try {
        fullTextSession = Search.getFullTextSession(session);
        for (int indexEntite = 0; indexEntite < entities.size() && !agent.isStopping(); indexEntite++) {
            String etat = "Execute rebuild index to " + entities.get(indexEntite).getSimpleName();
            final MassIndexer indexer = fullTextSession.createIndexer(entities.get(indexEntite))
                    .batchSizeToLoadObjects(agent.getBatchSizeToLoad())
                    .threadsToLoadObjects(agent.getThreadsToLoad())
                    .threadsForSubsequentFetching(agent.getThreadsForSubsequentFetching())
                    .cacheMode(CacheMode.IGNORE);
            if (agent.getLimitIndexedObjectsTo() != null && agent.getLimitIndexedObjectsTo().intValue() > 0) {
                indexer.limitIndexedObjectsTo(agent.getLimitIndexedObjectsTo());
                etat += " (Limite : " + agent.getLimitIndexedObjectsTo() + " )";
            }
            agent.setEtat(etat);
            indexer.startAndWait();
        }
    } catch (InterruptedException e) {
        logger.error(e);
    } catch (SearchException e) {
        e.printStackTrace();
    }
}