Example usage for org.hibernate CacheMode IGNORE

List of usage examples for org.hibernate CacheMode IGNORE

Introduction

In this page you can find the example usage for org.hibernate CacheMode IGNORE.

Prototype

CacheMode IGNORE

To view the source code for org.hibernate CacheMode IGNORE.

Click Source Link

Document

The session will never interact with the cache, except to invalidate cache items when updates occur.

Usage

From source file:org.gbif.portal.dao.impl.hibernate.SimpleQueryDAOImpl.java

License:Open Source License

/**
 * @see org.gbif.portal.dao.SimpleQueryDAO#outputResultsForQuery(java.lang.String, java.util.List, java.lang.Integer, java.lang.Integer)
 *///  w w w  .j  ava 2 s  .c om
public void outputResultsForQuery(final String queryString, final List<Object> parameters,
        final Integer startIndex, final Integer maxResults, final ResultsOutputter resultsOutputter)
        throws IOException {

    Session session = getSession();
    session.setCacheMode(CacheMode.IGNORE);

    if (logger.isDebugEnabled())
        logger.debug("getByQuery queryString " + queryString);
    Query query = createQuery(queryString, parameters, startIndex, maxResults, session);
    DAOUtils.scrollResults(resultsOutputter, session, query, associationTraverser, batchSize);
}

From source file:org.glite.security.voms.admin.persistence.dao.VOMSUserDAO.java

License:Apache License

public ScrollableResults findAllWithCursor() {

    Query q = HibernateFactory.getSession().createQuery("select u from VOMSUser u order by u.surname asc");

    q.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY);

    return q.scroll();

}

From source file:org.goobi.production.chart.HibernateProjectTaskList.java

License:Open Source License

private synchronized void calculate(Project inProject, List<IProjectTask> myTaskList, Boolean countImages,
        Integer inMax) {/*  w  ww.  j a  va2  s  .  co m*/
    Session session = Helper.getHibernateSession();
    Criteria crit = session.createCriteria(Task.class);
    crit.addOrder(Order.asc("ordering"));
    crit.createCriteria("process", "proz");
    crit.add(Restrictions.eq("proz.template", Boolean.FALSE));
    crit.add(Restrictions.eq("proz.project", inProject));

    ScrollableResults list = crit.setCacheMode(CacheMode.IGNORE).scroll(ScrollMode.FORWARD_ONLY);

    while (list.next()) {
        Task step = (Task) list.get(0);
        String shorttitle = (step.getTitle().length() > 60 ? step.getTitle().substring(0, 60) + "..."
                : step.getTitle());

        IProjectTask pt = null;
        for (IProjectTask task : myTaskList) {
            if (task.getTitle().equals(shorttitle)) {
                pt = task;
                break;
            }
        }
        if (pt == null) {
            pt = new ProjectTask(shorttitle, 0, 0);
            myTaskList.add(pt);
        }

        if (step.getProcessingStatusEnum() == TaskStatus.DONE) {
            if (countImages) {
                pt.setStepsCompleted(pt.getStepsCompleted() + step.getProcess().getSortHelperImages());
            } else {
                pt.setStepsCompleted(pt.getStepsCompleted() + 1);
            }
        }

        if (countImages) {
            pt.setStepsMax(pt.getStepsMax() + step.getProcess().getSortHelperImages());
        } else {
            pt.setStepsMax(pt.getStepsMax() + 1);
        }
    }
}

From source file:org.grouter.common.hibernatesearch.FullIndexHandler.java

License:Apache License

/**
 * Creates index/*  w  w w. ja v a  2s.  c om*/
 *
 * @param batchSize batch index parameter
 * @param session the Hibernate session
 * @param theIndexClass classes to index
 */
public void doFullIndex(int batchSize, Session session, Class... theIndexClass) {
    FullTextSession fullTextSession = Search.createFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    //Scrollable results will avoid loading too many objects in memory
    for (Class theClass : theIndexClass) {
        ScrollableResults results = fullTextSession.createCriteria(theClass).scroll(ScrollMode.FORWARD_ONLY);
        int index = 0;
        while (results.next()) {
            index++;
            fullTextSession.index(results.get(0)); //index each element
            if (index % batchSize == 0) {
                session.clear(); //clear every batchSize since the queue is processed
            }
        }
    }
}

From source file:org.grouter.domain.dao.spring.SystemDAOImpl.java

License:Apache License

public void doFullIndex(int batchSize, Class theIndexClass, Session session)// Class... clazzes)
{
    FullTextSession fullTextSession = Search.createFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);
    //Transaction transaction = fullTextSession.beginTransaction();
    //Scrollable results will avoid loading too many objects in memory
    ScrollableResults results = fullTextSession.createCriteria(theIndexClass).scroll(ScrollMode.FORWARD_ONLY);
    int index = 0;
    while (results.next()) {
        index++;//from  ww w .  j  a va2  s  . c o m
        fullTextSession.index(results.get(0)); //index each element
        if (index % batchSize == 0) {
            session.clear(); //clear every batchSize since the queue is processed
        }
    }
    //transaction.commit();
}

From source file:org.headsupdev.agile.app.search.Reindex.java

License:Open Source License

public void run() {
    Task reindex = new ReindexTask();
    Manager.getInstance().addTask(reindex);

    try {//from  w  ww .j a  v  a 2 s.  c  o m
        for (String className : HibernateUtil.getEntityClassNames()) {
            Session session = HibernateUtil.getCurrentSession();
            FullTextSession fullTextSession = org.hibernate.search.Search
                    .createFullTextSession(((SessionProxy) session).getRealSession());
            Transaction tx = fullTextSession.beginTransaction();

            fullTextSession.setFlushMode(FlushMode.MANUAL);
            fullTextSession.setCacheMode(CacheMode.IGNORE);

            Manager.getLogger(getClass().getName()).debug("  object type " + className);

            //Scrollable results will avoid loading too many objects in memory
            ScrollableResults results = fullTextSession.createCriteria(className).setFetchSize(BATCH_SIZE)
                    .scroll(ScrollMode.FORWARD_ONLY);

            int index = 0;
            while (results.next()) {
                Object o = results.get(0);

                index++;
                if (o.getClass().isAnnotationPresent(Indexed.class)) {
                    if (HeadsUpConfiguration.isDebug()) {
                        System.out.print(".");
                    }
                    fullTextSession.index(o); //index each element
                }
                if (index % BATCH_SIZE == 0) {
                    fullTextSession.flushToIndexes(); //apply changes to indexes
                    fullTextSession.clear(); //clear since the queue is processed
                }
            }
            tx.commit();

            if (HeadsUpConfiguration.isDebug()) {
                System.out.println();
            }
        }
    } catch (Exception e) {
        Manager.getLogger(getClass().getName()).error("Failed to reindex search data", e);
    }

    Manager.getInstance().removeTask(reindex);
}

From source file:org.hyperic.hq.events.server.session.EventLogDAO.java

License:Open Source License

/**
 * Insert the event logs in batch, with batch size specified by the
 * <code>hibernate.jdbc.batch_size</code> configuration property.
 * //  w ww .j a v  a  2  s .co m
 * @param eventLogs The event logs to insert.
 */
void insertLogs(EventLog[] eventLogs) {
    Session session = getSession();

    FlushMode flushMode = session.getFlushMode();
    CacheMode cacheMode = session.getCacheMode();

    try {
        session.setFlushMode(FlushMode.MANUAL);

        // We do not want to update the 2nd level cache with these event
        // logs
        session.setCacheMode(CacheMode.IGNORE);

        for (int i = 0; i < eventLogs.length; i++) {
            create(eventLogs[i]);
        }

        session.flush();
        session.clear();
    } finally {
        session.setFlushMode(flushMode);
        session.setCacheMode(cacheMode);
    }
}

From source file:org.inbio.neoportal.index.Importer.java

License:Open Source License

/**
 * /*from   w w  w. j ava  2 s .  c om*/
 */
@Transactional
public void indexOccurrences() {

    Session session = sessionFactory.getCurrentSession();
    //      FullTextSession fullTextSession = Search.getFullTextSession(session);

    // config session for bash job
    session.setFlushMode(FlushMode.MANUAL);
    //      fullTextSession.setFlushMode(FlushMode.MANUAL);

    logger.log(org.apache.log4j.Level.DEBUG, "Starting importOccurrences process");

    //get current date for dateLastModified field
    DateFormat dateFormat = new SimpleDateFormat("dd/MM/yyyy");
    Date date = new Date();
    String dateLastModified = dateFormat.format(date);

    DateFormat sourceDateFormat = new SimpleDateFormat("dd-MMM-yy", Locale.ENGLISH);

    int firstResult = 0;
    int setCounter = 0; //every 100 (the jdbc batch size) call flush 

    DataProvider dp = dataProviderDAO.findAll().get(0);

    logger.log(org.apache.log4j.Level.DEBUG, "importOccurrences Begin Transaction");

    ScrollableResults scroll = session.createCriteria(ImportDwc.class).setFetchSize(BATCH_SIZE)
            .setCacheMode(CacheMode.IGNORE).setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY);

    boolean update;

    int batch = 0;
    int rowsCounter = 0;

    while (scroll.next()) {
        batch++;
        rowsCounter++;

        ImportDwc importDwc = (ImportDwc) scroll.get(0);
        logger.trace("ImportDwc after scroll.get");

        try {

            //avoid repeated occurrenceId
            OccurrenceDwc occurrence = occurrenceDAO
                    .findByCatalogNumberHql(importDwc.getCatalogNumber().replace("\"", ""));
            logger.trace("OccurrenceDwc after findByCatalogNumber "
                    + importDwc.getCatalogNumber().replace("\"", ""));

            if (occurrence != null) {

                update = true;
                //                    continue;
            } else {
                update = false;
                occurrence = new OccurrenceDwc();
            }

            Taxon taxon = null;
            //check if taxonId is empty (unidentify specimens)
            if (importDwc.getTaxonId().isEmpty()) {
                taxon = null;
            } else {
                // check if is the same taxon already associated with the occurrence
                if (update && occurrence.getTaxonId().equals(importDwc.getTaxonId().replace("\"", ""))) {
                    taxon = occurrence.getTaxon();
                    logger.trace("Occurrence update with same taxon");
                } else {

                    // find taxon entity
                    // taxon = taxonNewDAO.findById(new BigDecimal(importDwc.getTaxonId().replace("\"", "")));
                    List<Taxon> taxonList = taxonDAO.findByDefaultName(importDwc.getScientificName());
                    logger.trace("Taxon after findByDefaultName");
                    if (taxonList.size() == 1)
                        taxon = taxonList.get(0);
                    else if (taxonList.size() > 1) {
                        for (Taxon taxon2 : taxonList) {
                            if (taxon2.getKingdom().equals(importDwc.getKingdom())) {
                                taxon = taxon2;
                                break;
                            }
                        }
                    }
                }
            }

            // TODO: fix, use specimenId instead
            occurrence.setOccurrenceId(importDwc.getCatalogNumber().replace("\"", ""));

            occurrence.setDataProvider(dp);
            occurrence.setTaxon(taxon);

            //find or create location
            Location location = locationDAO.findById(new BigDecimal(importDwc.getLocationId()));
            logger.trace("Location after findById");

            if (location == null) {
                location = new Location(new BigDecimal(importDwc.getLocationId()));
                location.setHigherGeographyId(importDwc.getHigherGeographyId());
                location.setHigherGeography(importDwc.getHigherGeography());
                location.setContinent(importDwc.getContinent());
                location.setWaterBody(importDwc.getWaterBody());
                location.setIslandGroup(importDwc.getIslandGroup());
                location.setIsland(importDwc.getIsland());
                location.setCountry(importDwc.getCountry());
                location.setCountryCode(importDwc.getCountryCode());
                location.setStateProvince(importDwc.getStateProvince());
                location.setCounty(importDwc.getCounty());
                location.setMunicipality(importDwc.getMunicipality());
                location.setLocality(importDwc.getLocality());
                location.setVerbatimLocality(importDwc.getVerbatimLocality());
                location.setVerbatimElevation(importDwc.getVerbatimElevation());
                location.setMinimumElevationInMeters(importDwc.getMinimumElevationInMeters());
                location.setMaximumElevationInMeters(importDwc.getMaximumElevationInMeters());
                location.setVerbatimDepth(importDwc.getVerbatimDepth());
                location.setMinimumDepthInMeters(importDwc.getMinimumDepthInMeters());
                location.setMaximumDepthInMeters(importDwc.getMaximumDepthInMeters());
                location.setMinimumDistanceAboveSurfaceInMeters(
                        importDwc.getMinimumDistanceAboveSurfaceInMeters());
                location.setMaximumDistanceAboveSurfaceInMeters(
                        importDwc.getMaximumDistanceAboveSurfaceInMeters());
                location.setLocationAccordingTo(importDwc.getLocationAccordingTo());
                location.setLocationRemarks(importDwc.getLocationRemarks());
                location.setVerbatimCoordinates(importDwc.getVerbatimCoordinates());
                location.setVerbatimLatitude(importDwc.getVerbatimLatitude());
                location.setVerbatimLongitude(importDwc.getVerbatimLongitude());
                location.setVerbatimCoordinateSystem(importDwc.getVerbatimCoordinateSystem());
                location.setVerbatimSRS(importDwc.getVerbatimSRS());
                if (!importDwc.getDecimalLatitude().isEmpty())
                    location.setDecimalLatitude(Double.valueOf(importDwc.getDecimalLatitude()));
                if (!importDwc.getDecimalLongitude().isEmpty())
                    location.setDecimalLongitude(Double.valueOf(importDwc.getDecimalLongitude()));
                location.setGeodeticDatum(importDwc.getGeodeticDatum());
                location.setCoordinateUncertaintyInMeters(importDwc.getCoordinateUncertaintyInMeters());
                location.setCoordinatePrecision(importDwc.getCoordinatePrecision());
                location.setPointRadiusSpatialFit(importDwc.getPointRadiusSpatialFit());
                location.setFootprintWKT(importDwc.getFootprintWKT());
                location.setFootprintSRS(importDwc.getFootprintSRS());
                location.setFootprintSpatialFit(importDwc.getFootprintSpatialFit());
                location.setGeoreferencedBy(importDwc.getGeoreferencedBy());
                location.setGeoreferencedDate(importDwc.getGeoreferencedDate());
                location.setGeoreferenceProtocol(importDwc.getGeoreferenceProtocol());
                location.setGeoreferenceSources(importDwc.getGeoreferenceSources());
                location.setGeoreferenceVerificationStatus(importDwc.getGeoreferenceVerificationStatus());
                location.setGeoreferenceRemarks(importDwc.getGeoreferenceRemarks());

                locationDAO.create(location);
                // increment batch because location should be inserted
                batch++;
            }
            occurrence.setLocation(location);

            occurrence.setType(importDwc.getType());
            occurrence.setModified(importDwc.getModified());
            occurrence.setLanguage(importDwc.getLanguage());
            occurrence.setRights(importDwc.getRights());
            occurrence.setRightsHolder(importDwc.getRightsHolder());
            occurrence.setAccessRights(importDwc.getAccessRights());
            occurrence.setBibliographicCitation(importDwc.getBibliographicCitation());
            occurrence.setReferences(importDwc.getReferences());
            occurrence.setInstitutionId(importDwc.getInstitutionId());
            occurrence.setCollectionId(importDwc.getCollectionId());
            occurrence.setDatasetId(importDwc.getDatasetId());
            occurrence.setInstitutionCode(importDwc.getInstitutionCode());
            occurrence.setCollectionCode(importDwc.getCollectionCode());
            occurrence.setDatasetName(importDwc.getDatasetName());
            occurrence.setOwnerInstitutionCode(importDwc.getOwnerInstitutionCode());
            occurrence.setBasisOfRecord(importDwc.getBasisOfRecord());
            occurrence.setInformationWithheld(importDwc.getInformationWithheld());
            occurrence.setDataGeneralizations(importDwc.getDataGeneralizations());
            occurrence.setDynamicProperties(importDwc.getDynamicProperties());

            occurrence.setOccurrenceId(importDwc.getOccurrenceId().toString());
            occurrence.setCatalogNumber(importDwc.getCatalogNumber());
            occurrence.setOccurrenceRemarks(importDwc.getOccurrenceRemarks());
            occurrence.setRecordNumber(importDwc.getRecordNumber());
            occurrence.setRecordedBy(importDwc.getRecordedBy());
            occurrence.setIndividualId(importDwc.getIndividualId());
            occurrence.setIndividualCount(importDwc.getIndividualCount());
            occurrence.setSex(importDwc.getSex());
            occurrence.setLifeStage(importDwc.getLifeStage());
            occurrence.setReproductiveCondition(importDwc.getReproductiveCondition());
            occurrence.setBehavior(importDwc.getBehavior());
            occurrence.setEstablishmentMeans(importDwc.getEstablishmentMeans());
            occurrence.setOccurrenceStatus(importDwc.getOccurrenceStatus());
            occurrence.setPreparations(importDwc.getPreparations());
            occurrence.setDisposition(importDwc.getDisposition());
            occurrence.setOtherCatalogNumbers(importDwc.getOtherCatalogNumbers());
            occurrence.setPreviousIdentifications(importDwc.getPreviousIdentifications());
            occurrence.setAssociatedMedia(importDwc.getAssociatedMedia());
            occurrence.setAssociatedReferences(importDwc.getAssociatedReferences());
            occurrence.setAssociatedOccurrences(importDwc.getAssociatedOccurrences());
            occurrence.setAssociatedSequences(importDwc.getAssociatedSequences());
            occurrence.setAssociatedTaxa(importDwc.getAssociatedTaxa());

            occurrence.setEventId(importDwc.getEventId());
            occurrence.setSamplingProtocol(importDwc.getSamplingProtocol());
            occurrence.setSamplingEffort(importDwc.getSamplingEffort());
            occurrence.setEventDate(importDwc.getEventDate());
            occurrence.setEventTime(importDwc.getEventTime());
            occurrence.setStartDayOfYear(importDwc.getStartDayOfYear());
            occurrence.setEndDayOfYear(importDwc.getEndDayOfYear());
            occurrence.setYear(importDwc.getYear());
            occurrence.setMonth(importDwc.getMonth());
            occurrence.setDay(importDwc.getDay());
            occurrence.setVerbatimEventDate(importDwc.getVerbatimEventDate());
            occurrence.setHabitat(importDwc.getHabitat());
            occurrence.setFieldNotes(importDwc.getFieldNumber());
            occurrence.setFieldNotes(importDwc.getFieldNotes());
            occurrence.setEventRemarks(importDwc.getEventRemarks());

            occurrence.setGeologicalContextId(importDwc.getGeologicalContextId());
            occurrence.setEarliestEonOrLowestEonothem(importDwc.getEarliestEonOrLowestEonothem());
            occurrence.setLatestEonOrHighestEonothem(importDwc.getLatestEonOrHighestEonothem());
            occurrence.setEarliestEraOrLowestErathem(importDwc.getEarliestEraOrLowestErathem());
            occurrence.setLatestEraOrHighestErathem(importDwc.getLatestEraOrHighestErathem());
            occurrence.setEarliestPeriodOrLowestSystem(importDwc.getEarliestPeriodOrLowestSystem());
            occurrence.setLatestPeriodOrHighestSystem(importDwc.getLatestPeriodOrHighestSystem());
            occurrence.setEarliestEpochOrLowestSeries(importDwc.getEarliestEpochOrLowestSeries());
            occurrence.setLatestEpochOrHighestSeries(importDwc.getLatestEpochOrHighestSeries());
            occurrence.setEarliestAgeOrLowestStage(importDwc.getEarliestAgeOrLowestStage());
            occurrence.setLatestAgeOrHighestStage(importDwc.getLatestAgeOrHighestStage());
            occurrence.setLowestBiostratigraphicZone(importDwc.getLowestBiostratigraphicZone());
            occurrence.setHighestBiostratigraphicZone(importDwc.getHighestBiostratigraphicZone());
            occurrence.setLithostratigraphicTerms(importDwc.getLithostratigraphicTerms());
            occurrence.setGroup(importDwc.getGroup());
            occurrence.setFormation(importDwc.getFormation());
            occurrence.setMember(importDwc.getMember());
            occurrence.setBed(importDwc.getBed());

            occurrence.setIdentificationId(importDwc.getIdentificationId());
            occurrence.setIdentifiedBy(importDwc.getIdentifiedBy());
            if (importDwc.getDateIdentified() != null && importDwc.getDateIdentified().length() > 0)
                occurrence.setDateIdentified(sourceDateFormat.parse(importDwc.getDateIdentified()));
            occurrence.setIdentificationReferences(importDwc.getIdentificationReferences());
            occurrence.setIdentificationVerificationStatus(importDwc.getIdentificationVerificationStatus());
            occurrence.setIdentificationRemarks(importDwc.getIdentificationRemarks());
            occurrence.setIdentificationQualifier(importDwc.getIdentificationQualifier());
            occurrence.setTypeStatus(importDwc.getTypeStatus());

            occurrence.setTaxonId(importDwc.getTaxonId());
            occurrence.setScientificNameId(importDwc.getScientificNameId());
            occurrence.setAcceptedNameUsageId(importDwc.getAcceptedNameUsageId());
            occurrence.setParentNameUsageId(importDwc.getParentNameUsageId());
            occurrence.setOriginalNameUsageId(importDwc.getOriginalNameUsageId());
            occurrence.setNameAccordingToId(importDwc.getNameAccordingToId());
            occurrence.setNamePublishedInId(importDwc.getNamePublishedInId());
            occurrence.setTaxonConceptId(importDwc.getTaxonConceptId());
            occurrence.setScientificName(importDwc.getScientificName());
            occurrence.setAcceptedNameUsage(importDwc.getAcceptedNameUsage());
            occurrence.setParentNameUsage(importDwc.getParentNameUsage());
            occurrence.setOriginalNameUsage(importDwc.getOriginalNameUsage());
            occurrence.setNameAccordingTo(importDwc.getNameAccordingTo());
            occurrence.setNamePublishedIn(importDwc.getNamePublishedIn());
            occurrence.setNamePublishedInYear(importDwc.getNamePublishedInYear());
            occurrence.setHigherClassification(importDwc.getHigherClassification());
            occurrence.setKingdom(importDwc.getKingdom());
            occurrence.setPhylum(importDwc.getPhylum());
            occurrence.setClass_(importDwc.getClass_());
            occurrence.setOrder(importDwc.getOrder());
            occurrence.setFamily(importDwc.getFamily());
            occurrence.setGenus(importDwc.getGenus());
            occurrence.setSubgenus(importDwc.getSubgenus());
            occurrence.setSpecificEpithet(importDwc.getSpecificEpithet());
            occurrence.setInfraspecificEpithet(importDwc.getInfraspecificEpithet());
            occurrence.setTaxonRank(importDwc.getTaxonRank());
            occurrence.setVerbatimTaxonRank(importDwc.getVerbatimTaxonRank());
            occurrence.setScientificNameAuthorship(importDwc.getScientificNameAuthorship());
            occurrence.setVernacularName(importDwc.getVernacularName());
            occurrence.setNomenclaturalCode(importDwc.getNomenclaturalCode());
            occurrence.setTaxonomicStatus(importDwc.getTaxonomicStatus());
            occurrence.setNomenclaturalStatus(importDwc.getNomenclaturalStatus());
            occurrence.setTaxonRemarks(importDwc.getTaxonRemarks());

            if (!update)
                occurrenceDAO.create(occurrence);
            else
                occurrenceDAO.update(occurrence);

            // clear objects 
            occurrence.setImages(null);
            occurrence = null;
            taxon = null;
            location = null;

        } catch (NonUniqueResultException ex) {
            logger.warn("NonUniqueResultException occurrenceId " + importDwc.getCatalogNumber());

        } catch (NumberFormatException ex) {
            logger.error("NumberFormatException occurrenceId " + importDwc.getCatalogNumber());
            ex.printStackTrace();

            System.exit(-1);
        } catch (ParseException e) {
            e.printStackTrace();
        }
        //            } // end for, 1000 importDwc rows

        session.evict(importDwc);

        if (batch >= BATCH_SIZE) {
            batch = 0;

            SessionStatistics statistics = session.getStatistics();
            logger.trace("Entities before flush: " + String.valueOf(statistics.getEntityCount()));

            session.flush();

            logger.trace("Entities before clear: " + String.valueOf(statistics.getEntityCount()));

            //               fullTextSession.flushToIndexes();
            session.clear();

            logger.trace("Entities after clear: " + String.valueOf(statistics.getEntityCount()));

            //               System.exit(1);
        }

        if (rowsCounter % maxResults == 0) {
            //              fullTextSession.flushToIndexes();
            logger.info("Occurrences added " + rowsCounter);

            SessionStatistics statistics = session.getStatistics();
            logger.debug("Entities: " + String.valueOf(statistics.getEntityCount()));

            logger.debug("Collections: " + String.valueOf(statistics.getCollectionCount()));

        }

        // ******* for debug only ***********
        //            if(rowsCounter == 1) {
        //               session.getTransaction().rollback();
        //               scroll.close();
        //               System.exit(1);
        //            }

        //            firstResult += maxResults;

        //            occurrencesDwcList =
        //                    importDwcDAO.scrollAll(ImportDwc.class,
        //                        maxResults,
        //                        firstResult);
    } // end while, no more importDwc rows

    scroll.close();
    //        transactionManager.commit(status);
    session.flush();
    session.clear();

    logger.info("Total occurrences processed " + rowsCounter);
    //        session.getTransaction().commit();
    //        session.close();
}

From source file:org.j2free.jpa.Controller.java

License:Apache License

/**
 * It is critical that batchSize matches the hibernate.search.worker.batch_size you set
 *
 * @param <T>//  ww w  .j  a  v  a 2s. com
 * @param entityClass
 * @param batchSize
 */
public <T> void hibernateSearchIndex(Class<T> entityClass, int batchSize) {
    FullTextSession fullTextSession = org.hibernate.search.Search.getFullTextSession(getSession());
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);

    ScrollableResults results = fullTextSession.createCriteria(entityClass).setFetchSize(batchSize)
            .scroll(ScrollMode.FORWARD_ONLY);

    try {
        int index = 0;
        while (results.next()) {
            index++;
            fullTextSession.index(results.get(0)); //index each element

            //clear every batchSize since the queue is processed
            if (index % batchSize == 0) {
                fullTextSession.flushToIndexes();
                fullTextSession.clear();
            }
        }
    } finally {
        results.close();
    }
}

From source file:org.life.sl.importers.CalculateODMatrix.java

License:Open Source License

public CalculateODMatrix() {
    Timer timer = new Timer();
    timer.init();/*w w  w.  jav a 2  s .c  om*/
    ids_edges = this.loadEdgesFromOSM();
    timer.getRunTime(true, "Edges read from database: " + ids_edges.size());
    ids_nodes = this.loadNodesFromOSM();
    timer.getRunTime(true, "Nodes read from database: " + ids_nodes.size());

    psg = new PathSegmentGraph(1); // 1 = read from database...
    timer.getRunTime(true, "PathSegmentGraph initialized");
    psg.calculateDistances();

    //HashMap<Node, HashMap<Node, Double>> distances = psg.getAPSDistances();
    float dist[][] = psg.getAPSDistancesArr();//new double[nodes.size()][nodes.size()];

    Session session = HibernateUtil.getSessionFactory().getCurrentSession();
    // first, empty the database table:
    session.beginTransaction();
    session.setCacheMode(CacheMode.IGNORE);
    int nDel = session.createQuery("delete ShortestPathLength").executeUpdate();
    session.flush();
    //session.getTransaction().commit();
    System.out.println("Deleted " + nDel + " records from shortestpathlength");

    //session.beginTransaction();

    int osmNodeID1 = 0, osmNodeID2 = 0;
    float length = 0.f;
    //HashMap<Node, Double> hm1;

    System.out.println("Starting database export...");
    Integer batchSize = Integer.getInteger(new Configuration().getProperty("hibernate.jdbc.batch_size"), 30);
    System.out.println("Database batch size: " + batchSize);

    timer.init(2.5, 50.);
    double nn = dist.length * dist.length / 2; // approximate number of steps
    long n = 0, nc = 0;
    for (int i = 0; i < dist.length - 1; i++) { // outer loop over all nodes
        osmNodeID1 = i;//getOSMNodeIDForNode(n1);
        if (osmNodeID1 >= 0) { // check if node exists in OSM network at all...
            for (int j = i + 1; j < dist.length; j++) { // inner loop over all nodes
                if (i != j) { // no connection to self!
                    length = dist[i][j]; // the path length
                    if (length > 1.e-8 && length < .5f * Float.MAX_VALUE) { // ignore 0 (= self) and infinity (= no connection)
                        osmNodeID2 = j;//getOSMNodeIDForNode(n2);

                        // store length(n1, n2)
                        if (osmNodeID2 >= 0) {
                            // TODO: can this be optimized by reusing sPl1 instead of creating it (new)?
                            ShortestPathLength sPl1 = new ShortestPathLength(osmNodeID1, osmNodeID2, length);
                            session.save(sPl1);
                            if (++n % batchSize == 0) {
                                session.flush();
                                session.clear();
                            }

                            // the same path in reverse direction: not necessary
                            /*ShortestPathLength sPl2 = new ShortestPathLength(osmNodeID2, osmNodeID1, length);
                            session.save(sPl2);*/
                        }
                    }
                    nc++;
                }
            }
        }
        timer.showProgress(nc / nn);
    }
    //      for (Node n1 : distances.keySet()) {   // outer loop over all nodes
    //         osmNodeID1 = getOSMNodeIDForNode(n1);
    //         if (osmNodeID1 != 0) {            // check if node exists in OSM network at all...
    //            hm1 = distances.get(n1);      // a HashMap<Node, Double>
    //            for (Node n2 : hm1.keySet()) {   // inner loop over all nodes
    //               if (n2 != n1) {            // no connection to self!
    //                  length = hm1.get(n2);   // the path length
    //                  if (length > 1.e-8 && length < 1.e100) {   // ignore 0 (= self) and infinity (= no connection)
    //                     osmNodeID2 = getOSMNodeIDForNode(n2);
    //
    //                     // store length(n1, n2)
    //                     if (osmNodeID2 != 0) {
    //                        ShortestPathLength sPl1 = new ShortestPathLength(osmNodeID1, osmNodeID2, length);
    //                        session.save(sPl1);
    //                        
    //                        // the same path in reverse direction: not necessary
    //                        /*ShortestPathLength sPl2 = new ShortestPathLength(osmNodeID2, osmNodeID1, length);
    //                        session.save(sPl2);*/
    //                     }
    //                  }
    //               }
    //            }
    //         }
    //      }
    session.getTransaction().commit(); // TODO: complete the transaction in the outer loop above, to prevent it from getting too big?
    timer.getRunTime(true, "... finished");
    System.out.println("YEAH !");

}