Example usage for org.hibernate CacheMode IGNORE

List of usage examples for org.hibernate CacheMode IGNORE

Introduction

In this page you can find the example usage for org.hibernate CacheMode IGNORE.

Prototype

CacheMode IGNORE

To view the source code for org.hibernate CacheMode IGNORE.

Click Source Link

Document

The session will never interact with the cache, except to invalidate cache items when updates occur.

Usage

From source file:org.projectforge.core.HibernateSearchDependentObjectsReindexer.java

License:Open Source License

private void reindexDependents(final HibernateTemplate hibernateTemplate, final Session session,
        final BaseDO<?> obj, final Set<String> alreadyReindexed) {
    if (alreadyReindexed.contains(getReindexId(obj)) == true) {
        if (log.isDebugEnabled() == true) {
            log.debug("Object already re-indexed (skipping): " + getReindexId(obj));
        }//from ww  w . ja  v a 2 s  . c o  m
        return;
    }
    session.flush(); // Needed to flush the object changes!
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.AUTO);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    try {
        BaseDO<?> dbObj = (BaseDO<?>) session.get(obj.getClass(), obj.getId());
        if (dbObj == null) {
            dbObj = (BaseDO<?>) session.load(obj.getClass(), obj.getId());
        }
        fullTextSession.index(dbObj);
        alreadyReindexed.add(getReindexId(dbObj));
        if (log.isDebugEnabled() == true) {
            log.debug("Object added to index: " + getReindexId(dbObj));
        }
    } catch (final Exception ex) {
        // Don't fail if any exception while re-indexing occurs.
        log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage());
    }
    // session.flush(); // clear every batchSize since the queue is processed
    final List<Entry> entryList = map.get(obj.getClass());
    reindexDependents(hibernateTemplate, session, obj, entryList, alreadyReindexed);
}

From source file:org.projectforge.database.DatabaseDao.java

License:Open Source License

private long reindexObjects(final Class<?> clazz, final ReindexSettings settings) {
    final Session session = getSession();
    Criteria criteria = createCriteria(session, clazz, settings, true);
    final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from).
    final boolean scrollMode = number > MIN_REINDEX_ENTRIES_4_USE_SCROLL_MODE ? true : false;
    log.info("Starting re-indexing of " + number + " entries (total number) of type " + clazz.getName()
            + " with scrollMode=" + scrollMode + "...");
    final int batchSize = 1000;// NumberUtils.createInteger(System.getProperty("hibernate.search.worker.batch_size")
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    long index = 0;
    if (scrollMode == true) {
        // Scroll-able results will avoid loading too many objects in memory
        criteria = createCriteria(fullTextSession, clazz, settings, false);
        final ScrollableResults results = criteria.scroll(ScrollMode.FORWARD_ONLY);
        while (results.next() == true) {
            final Object obj = results.get(0);
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }//from   ww  w .  j a  v  a2s.co m
            fullTextSession.index(obj); // index each element
            if (index++ % batchSize == 0)
                session.flush(); // clear every batchSize since the queue is processed
        }
    } else {
        criteria = createCriteria(session, clazz, settings, false);
        final List<?> list = criteria.list();
        for (final Object obj : list) {
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }
            fullTextSession.index(obj);
            if (index++ % batchSize == 0)
                session.flush(); // clear every batchSize since the queue is processed
        }
    }
    final SearchFactory searchFactory = fullTextSession.getSearchFactory();
    searchFactory.optimize(clazz);
    log.info("Re-indexing of " + index + " objects of type " + clazz.getName() + " done.");
    return index;
}

From source file:org.projectforge.framework.persistence.database.DatabaseDao.java

License:Open Source License

private long reindexObjects(final Class<?> clazz, final ReindexSettings settings) {
    final Session session = sessionFactory.getCurrentSession();
    Criteria criteria = createCriteria(session, clazz, settings, true);
    final Long number = (Long) criteria.uniqueResult(); // Get number of objects to re-index (select count(*) from).
    final boolean scrollMode = number > MIN_REINDEX_ENTRIES_4_USE_SCROLL_MODE ? true : false;
    log.info("Starting re-indexing of " + number + " entries (total number) of type " + clazz.getName()
            + " with scrollMode=" + scrollMode + "...");
    final int batchSize = 1000;// NumberUtils.createInteger(System.getProperty("hibernate.search.worker.batch_size")
    final FullTextSession fullTextSession = Search.getFullTextSession(session);
    HibernateCompatUtils.setFlushMode(fullTextSession, FlushMode.MANUAL);
    HibernateCompatUtils.setCacheMode(fullTextSession, CacheMode.IGNORE);
    long index = 0;
    if (scrollMode == true) {
        // Scroll-able results will avoid loading too many objects in memory
        criteria = createCriteria(fullTextSession, clazz, settings, false);
        final ScrollableResults results = criteria.scroll(ScrollMode.FORWARD_ONLY);
        while (results.next() == true) {
            final Object obj = results.get(0);
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }/* www. jav  a2  s . c om*/
            HibernateCompatUtils.index(fullTextSession, obj);
            if (index++ % batchSize == 0) {
                session.flush(); // clear every batchSize since the queue is processed
            }
        }
    } else {
        criteria = createCriteria(session, clazz, settings, false);
        final List<?> list = criteria.list();
        for (final Object obj : list) {
            if (obj instanceof ExtendedBaseDO<?>) {
                ((ExtendedBaseDO<?>) obj).recalculate();
            }
            HibernateCompatUtils.index(fullTextSession, obj);
            if (index++ % batchSize == 0) {
                session.flush(); // clear every batchSize since the queue is processed
            }
        }
    }
    final SearchFactory searchFactory = fullTextSession.getSearchFactory();
    searchFactory.optimize(clazz);
    log.info("Re-indexing of " + index + " objects of type " + clazz.getName() + " done.");
    return index;
}

From source file:org.projectforge.framework.persistence.history.HibernateSearchDependentObjectsReindexer.java

License:Open Source License

private void reindexDependents(final Session session, final BaseDO<?> obj, final Set<String> alreadyReindexed) {
    if (alreadyReindexed.contains(getReindexId(obj)) == true) {
        if (log.isDebugEnabled() == true) {
            log.debug("Object already re-indexed (skipping): " + getReindexId(obj));
        }/*www  .ja  va 2s.  c  o  m*/
        return;
    }
    session.flush(); // Needed to flush the object changes!
    final FullTextSession fullTextSession = Search.getFullTextSession(session);

    HibernateCompatUtils.setFlushMode(fullTextSession, FlushMode.AUTO);
    HibernateCompatUtils.setCacheMode(fullTextSession, CacheMode.IGNORE);
    try {
        BaseDO<?> dbObj = session.get(obj.getClass(), obj.getId());
        if (dbObj == null) {
            dbObj = session.load(obj.getClass(), obj.getId());
        }
        HibernateCompatUtils.index(fullTextSession, dbObj);
        alreadyReindexed.add(getReindexId(dbObj));
        if (log.isDebugEnabled() == true) {
            log.debug("Object added to index: " + getReindexId(dbObj));
        }
    } catch (final Exception ex) {
        // Don't fail if any exception while re-indexing occurs.
        log.info("Fail to re-index " + obj.getClass() + ": " + ex.getMessage());
    }
    // session.flush(); // clear every batchSize since the queue is processed
    final List<Entry> entryList = map.get(obj.getClass());
    reindexDependents(session, obj, entryList, alreadyReindexed);
}

From source file:org.squashtest.tm.service.internal.advancedsearch.IndexationServiceImpl.java

License:Open Source License

private void indexEntities(Class<?>... T) {
    Session session = getCurrentSession();
    FullTextSession ftSession = Search.getFullTextSession(session);
    MassIndexerProgressMonitor monitor = new AdvancedSearchIndexingMonitor(Arrays.asList(T),
            this.configurationService);
    ftSession.createIndexer(T).purgeAllOnStart(true).threadsToLoadObjects(T.length)
            .typesToIndexInParallel(T.length).batchSizeToLoadObjects(MASS_INDEX_BATCH_SIZE)
            .cacheMode(CacheMode.IGNORE).progressMonitor(monitor).start();

}

From source file:org.squashtest.tm.service.internal.advancedsearch.IndexationServiceImpl.java

License:Open Source License

private FullTextSession getFullTextSession() {
    Session session = getCurrentSession();

    // get FullText session
    FullTextSession ftSession = Search.getFullTextSession(session);
    ftSession.setFlushMode(FlushMode.MANUAL);
    ftSession.setCacheMode(CacheMode.IGNORE);

    // Clear the lucene work queue to eliminate lazy init bug for batch processing.
    clearLuceneQueue(ftSession);/*from w w  w.j  a va  2s  .c o m*/

    return ftSession;
}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateMilestoneDao.java

License:Open Source License

private ScrollableResults scrollableResults(Query query) throws HibernateException {
    return query.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY);
}

From source file:org.thelq.stackexchange.dbimport.DatabaseWriter.java

License:Apache License

public DatabaseWriter(DumpContainer container, String table) {
    this.table = table;
    this.container = container;
    session = container.getSessionFactory().openSession();
    session.setCacheMode(CacheMode.IGNORE);
    session.setFlushMode(FlushMode.MANUAL);
    session.beginTransaction();//from ww  w.java 2 s.  com
}

From source file:org.unitime.timetable.backup.SessionBackup.java

License:Open Source License

@Override
public void backup(OutputStream out, Progress progress, Long sessionId) throws IOException {
    iOut = CodedOutputStream.newInstance(out);
    iProgress = progress;/*from w w  w  .  j  a  v  a 2s. c om*/
    iSessionId = sessionId;
    iHibSession = new _RootDAO().createNewSession();
    iHibSession.setCacheMode(CacheMode.IGNORE);
    iHibSessionFactory = iHibSession.getSessionFactory();
    try {
        iProgress.setStatus("Exporting Session");
        iProgress.setPhase("Loading Model", 3);
        TreeSet<ClassMetadata> allMeta = new TreeSet<ClassMetadata>(new Comparator<ClassMetadata>() {
            @Override
            public int compare(ClassMetadata m1, ClassMetadata m2) {
                return m1.getEntityName().compareTo(m2.getEntityName());
            }
        });
        allMeta.addAll(iHibSessionFactory.getAllClassMetadata().values());
        iProgress.incProgress();

        Queue<QueueItem> queue = new LinkedList<QueueItem>();

        queue.add(new QueueItem(iHibSessionFactory.getClassMetadata(Session.class), null, "uniqueId",
                Relation.None));

        Set<String> avoid = new HashSet<String>();
        // avoid following relations
        avoid.add(TimetableManager.class.getName() + ".departments");
        avoid.add(TimetableManager.class.getName() + ".solverGroups");
        avoid.add(DistributionType.class.getName() + ".departments");
        avoid.add(LastLikeCourseDemand.class.getName() + ".student");
        avoid.add(Student.class.getName() + ".lastLikeCourseDemands");

        Set<String> disallowedNotNullRelations = new HashSet<String>();
        disallowedNotNullRelations.add(Assignment.class.getName() + ".datePattern");
        disallowedNotNullRelations.add(Assignment.class.getName() + ".timePattern");
        disallowedNotNullRelations.add(LastLikeCourseDemand.class.getName() + ".student");
        disallowedNotNullRelations.add(OnlineSectioningLog.class.getName() + ".session");

        Map<String, List<QueueItem>> data = new HashMap<String, List<QueueItem>>();
        List<QueueItem> sessions = new ArrayList<QueueItem>();
        sessions.add(queue.peek());
        data.put(queue.peek().name(), sessions);

        QueueItem item = null;
        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (ClassMetadata meta : allMeta) {
                if (meta.hasSubclasses())
                    continue;
                for (int i = 0; i < meta.getPropertyNames().length; i++) {
                    String property = meta.getPropertyNames()[i];
                    if (disallowedNotNullRelations.contains(meta.getEntityName() + "." + property)
                            || meta.getPropertyNullability()[i])
                        continue;
                    Type type = meta.getPropertyTypes()[i];
                    if (type instanceof EntityType && type.getReturnedClass().equals(item.clazz())) {
                        QueueItem qi = new QueueItem(meta, item, property, Relation.Parent);
                        if (!data.containsKey(qi.name())) {
                            List<QueueItem> items = new ArrayList<QueueItem>();
                            data.put(qi.name(), items);
                            queue.add(qi);
                            items.add(qi);
                            if (qi.size() > 0)
                                iProgress.info("Parent: " + qi);
                        }
                    }
                }
            }
        }
        iProgress.incProgress();

        for (List<QueueItem> list : data.values())
            queue.addAll(list);

        // The following part is needed to ensure that instructor distribution preferences are saved including their distribution types 
        List<QueueItem> distributions = new ArrayList<QueueItem>();
        for (QueueItem instructor : data.get(DepartmentalInstructor.class.getName())) {
            QueueItem qi = new QueueItem(iHibSessionFactory.getClassMetadata(DistributionPref.class),
                    instructor, "owner", Relation.Parent);
            distributions.add(qi);
            queue.add(qi);
            if (qi.size() > 0)
                iProgress.info("Extra: " + qi);
        }
        data.put(DistributionPref.class.getName(), distributions);

        while ((item = queue.poll()) != null) {
            if (item.size() == 0)
                continue;
            for (int i = 0; i < item.meta().getPropertyNames().length; i++) {
                String property = item.meta().getPropertyNames()[i];
                Type type = item.meta().getPropertyTypes()[i];
                if (type instanceof EntityType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(type.getReturnedClass());
                    if (item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.One);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("One: " + qi);
                }
                if (type instanceof CollectionType) {
                    if (avoid.contains(item.name() + "." + property))
                        continue;

                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(((CollectionType) type)
                            .getElementType((SessionFactoryImplementor) iHibSessionFactory).getReturnedClass());
                    if (meta == null || item.contains(meta.getEntityName()))
                        continue;

                    QueueItem qi = new QueueItem(meta, item, property, Relation.Many);
                    List<QueueItem> items = data.get(qi.name());
                    if (items == null) {
                        items = new ArrayList<QueueItem>();
                        data.put(qi.name(), items);
                    }
                    queue.add(qi);
                    items.add(qi);

                    if (qi.size() > 0)
                        iProgress.info("Many: " + qi);
                }
            }
        }
        iProgress.incProgress();

        Map<String, Set<Serializable>> allExportedIds = new HashMap<String, Set<Serializable>>();
        for (String name : new TreeSet<String>(data.keySet())) {
            List<QueueItem> list = data.get(name);
            Map<String, TableData.Table.Builder> tables = new HashMap<String, TableData.Table.Builder>();
            for (QueueItem current : list) {
                if (current.size() == 0)
                    continue;
                iProgress.info("Loading " + current);
                List<Object> objects = current.list();
                if (objects == null || objects.isEmpty())
                    continue;
                iProgress.setPhase(current.abbv() + " [" + objects.size() + "]", objects.size());
                objects: for (Object object : objects) {
                    iProgress.incProgress();

                    // Get meta data (check for sub-classes)
                    ClassMetadata meta = iHibSessionFactory.getClassMetadata(object.getClass());
                    if (meta == null)
                        meta = current.meta();
                    if (meta.hasSubclasses()) {
                        for (Iterator i = iHibSessionFactory.getAllClassMetadata().entrySet().iterator(); i
                                .hasNext();) {
                            Map.Entry entry = (Map.Entry) i.next();
                            ClassMetadata classMetadata = (ClassMetadata) entry.getValue();
                            if (classMetadata.getMappedClass().isInstance(object)
                                    && !classMetadata.hasSubclasses()) {
                                meta = classMetadata;
                                break;
                            }
                        }
                    }

                    // Get unique identifier
                    Serializable id = meta.getIdentifier(object, (SessionImplementor) iHibSession);

                    // Check if already exported
                    Set<Serializable> exportedIds = allExportedIds.get(meta.getEntityName());
                    if (exportedIds == null) {
                        exportedIds = new HashSet<Serializable>();
                        allExportedIds.put(meta.getEntityName(), exportedIds);
                    }
                    if (!exportedIds.add(id))
                        continue;

                    // Check relation to an academic session (if exists)
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        if (type instanceof EntityType && type.getReturnedClass().equals(Session.class)) {
                            Session s = (Session) meta.getPropertyValue(object, property);
                            if (s != null && !s.getUniqueId().equals(iSessionId)) {
                                iProgress.warn(meta.getEntityName()
                                        .substring(meta.getEntityName().lastIndexOf('.') + 1) + "@" + id
                                        + " belongs to a different academic session (" + s + ")");
                                continue objects; // wrong session
                            }
                        }
                    }

                    // Get appropriate table
                    TableData.Table.Builder table = tables.get(meta.getEntityName());
                    if (table == null) {
                        table = TableData.Table.newBuilder();
                        tables.put(meta.getEntityName(), table);
                        table.setName(meta.getEntityName());
                    }

                    // Export object
                    TableData.Record.Builder record = TableData.Record.newBuilder();
                    record.setId(id.toString());
                    for (String property : meta.getPropertyNames()) {
                        Type type = meta.getPropertyType(property);
                        Object value = meta.getPropertyValue(object, property);
                        if (value == null)
                            continue;
                        TableData.Element.Builder element = TableData.Element.newBuilder();
                        element.setName(property);
                        if (type instanceof PrimitiveType) {
                            element.addValue(((PrimitiveType) type).toString(value));
                        } else if (type instanceof StringType) {
                            element.addValue(((StringType) type).toString((String) value));
                        } else if (type instanceof BinaryType) {
                            element.addValueBytes(ByteString.copyFrom((byte[]) value));
                        } else if (type instanceof TimestampType) {
                            element.addValue(((TimestampType) type).toString((Date) value));
                        } else if (type instanceof DateType) {
                            element.addValue(((DateType) type).toString((Date) value));
                        } else if (type instanceof EntityType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                            iHibSession.evict(value);
                        } else if (type instanceof CustomType && value instanceof Document) {
                            if (object instanceof CurriculumClassification && property.equals("students"))
                                continue;
                            StringWriter w = new StringWriter();
                            XMLWriter x = new XMLWriter(w, OutputFormat.createCompactFormat());
                            x.write((Document) value);
                            x.flush();
                            x.close();
                            element.addValue(w.toString());
                        } else if (type instanceof CollectionType) {
                            List<Object> ids = current.relation(property, id, false);
                            if (ids != null)
                                for (Object i : ids)
                                    element.addValue(i.toString());
                        } else if (type instanceof EmbeddedComponentType
                                && property.equalsIgnoreCase("uniqueCourseNbr")) {
                            continue;
                        } else {
                            iProgress.warn("Unknown data type: " + type + " (property " + meta.getEntityName()
                                    + "." + property + ", class " + value.getClass() + ")");
                            continue;
                        }
                        record.addElement(element.build());

                    }
                    table.addRecord(record.build());
                    iHibSession.evict(object);
                }
                current.clearCache();
            }

            for (TableData.Table.Builder table : tables.values()) {
                add(table.build());
            }
        }

        /*
        // Skip ConstraintInfo
        if (!iData.containsKey(ConstraintInfo.class.getName()))
           iData.put(ConstraintInfo.class.getName(), new QueueItem(iHibSessionFactory.getClassMetadata(ConstraintInfo.class), null, null, Relation.Empty));
                
        for (String name: items)
           export(iData.get(name));
                    
        while (true) {
         List<Object> objects = new ArrayList<Object>();
         ClassMetadata meta = null;
         for (Entity e: iObjects) {
        if (e.exported()) continue;
        if (objects.isEmpty() || meta.getEntityName().equals(e.name())) {
           meta = e.meta();
           objects.add(e.object());
           e.notifyExported();
        }
         }
         if (objects.isEmpty()) break;
         export(meta, objects, null);
        }
        */
        iProgress.setStatus("All done.");
    } finally {
        iHibSession.close();
    }
}

From source file:org.unitime.timetable.backup.SessionRestore.java

License:Open Source License

public void restore(InputStream input, Progress progress)
        throws IOException, InstantiationException, IllegalAccessException, DocumentException {
    iIn = input;//from   ww  w.  j  av  a  2 s.  c  o  m
    iProgress = progress;
    iHibSession = new _RootDAO().createNewSession();
    iHibSession.setCacheMode(CacheMode.IGNORE);
    iHibSessionFactory = iHibSession.getSessionFactory();
    try {
        CodedInputStream cin = CodedInputStream.newInstance(iIn);
        cin.setSizeLimit(1024 * 1024 * 1024); // 1 GB

        iProgress.setPhase("Loading data", 1);
        TableData.Table t = null;
        while ((t = readTable(cin)) != null) {
            if (iDebug != null) {
                iDebug.println("## " + t.getName() + " ##");
                iDebug.print(t.toString());
                iDebug.flush();
            }
            create(t);
        }
        iProgress.incProgress();

        iHibSession.setFlushMode(FlushMode.MANUAL);
        iProgress.setPhase("Fixing", iAllEntitites.size());
        for (Iterator<Entity> i = iAllEntitites.iterator(); i.hasNext();) {
            iProgress.incProgress();
            if (!fix(i.next()))
                i.remove();
        }

        iProgress.setPhase("Saving (not-null)", iAllEntitites.size());
        List<Entity> save = new ArrayList<Entity>(iAllEntitites);
        boolean saved = true;
        while (!save.isEmpty() && saved) {
            saved = false;
            for (Iterator<Entity> i = save.iterator(); i.hasNext();) {
                Entity e = i.next();
                if (e.canSave() == null) {
                    iProgress.incProgress();
                    e.fixRelationsNullOnly();
                    iHibSession.save(e.getObject());
                    i.remove();
                    saved = true;
                }
            }
            iHibSession.flush();
        }

        iProgress.setPhase("Saving (all)", iAllEntitites.size());
        for (Entity e : iAllEntitites) {
            iProgress.incProgress();
            String property = e.canSave();
            if (property == null) {
                e.fixRelations();
                iHibSession.update(e.getObject());
            } else {
                message("Skipping " + e.getAbbv() + " (missing not-null relation " + property + ")", e.getId());
                continue;
            }
        }

        iProgress.setPhase("Flush", 1);
        iHibSession.flush();
        iProgress.incProgress();

        printMessages();

        iProgress.setStatus("All done.");
    } finally {
        iHibSession.close();
    }
}