Example usage for org.hibernate ScrollableResults close

List of usage examples for org.hibernate ScrollableResults close

Introduction

In this page you can find the example usage for org.hibernate ScrollableResults close.

Prototype

void close();

Source Link

Document

Release resources immediately.

Usage

From source file:de.tudarmstadt.ukp.lmf.transform.DBToXMLTransformer.java

License:Apache License

protected void doTransform(boolean includeAxes, final Lexicon... includeLexicons) throws SAXException {
    final int bufferSize = 100;
    commitCounter = 1;//w w w  . ja  va  2s  . c  om

    writeStartElement(lexicalResource);

    // Iterate over all lexicons
    if (includeLexicons == null || includeLexicons.length > 0) {
        for (Lexicon lexicon : lexicalResource.getLexicons()) {
            String lexiconName = lexicon.getName();

            // Check if we want to include this lexicon.
            if (includeLexicons != null) {
                boolean found = false;
                for (Lexicon l : includeLexicons) {
                    if (lexiconName.equals(l.getName())) {
                        found = true;
                        break;
                    }
                }
                if (!found) {
                    continue;
                }
            }

            logger.info("Processing lexicon: " + lexiconName);
            writeStartElement(lexicon);

            // Iterate over all possible sub-elements of this Lexicon and
            // write them to the XML
            Class<?>[] lexiconClassesToSave = { LexicalEntry.class, SubcategorizationFrame.class,
                    SubcategorizationFrameSet.class, SemanticPredicate.class, Synset.class,
                    SynSemCorrespondence.class,
                    //ConstraintSet.class
            };

            //  "Unfortunately, MySQL does not treat large offset values efficiently by default and will still read all the rows prior to an offset value. It is common to see a query with an offset above 100,000 take over 20 times longer than an offset of zero!"
            // http://www.numerati.com/2012/06/26/reading-large-result-sets-with-hibernate-and-mysql/
            for (Class<?> clazz : lexiconClassesToSave) {
                /*DetachedCriteria criteria = DetachedCriteria.forClass(clazz)
                      .add(Restrictions.sqlRestriction("lexiconId = '" + lexicon.getId() + "'"));
                CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize);
                while (iter.hasNext()) {
                   Object obj = iter.next();
                   writeElement(obj);
                   session.evict(obj);
                   commitCounter++;
                   if (commitCounter % 1000 == 0)
                      logger.info("progress: " + commitCounter  + " class instances written to file");
                }*/
                Session lookupSession = sessionFactory.openSession();
                Query query = lookupSession.createQuery("FROM " + clazz.getSimpleName() + " WHERE lexiconId = '"
                        + lexicon.getId() + "' ORDER BY id");
                query.setReadOnly(true);
                if (DBConfig.MYSQL.equals(dbConfig.getDBType())) {
                    query.setFetchSize(Integer.MIN_VALUE); // MIN_VALUE gives hint to JDBC driver to stream results
                } else {
                    query.setFetchSize(1000);
                }
                ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
                while (results.next()) {
                    // For streamed query results, no further queries are allowed (incl. lazy proxy queries!)
                    // Detach the object from the lookup session and reload it using the "official" session.
                    Object[] rows = results.get();
                    Object row = rows[0];
                    lookupSession.evict(row);
                    lookupSession.evict(rows);
                    rows = null;
                    row = session.get(row.getClass(), ((IHasID) row).getId());
                    writeElement(row);
                    session.evict(row);
                    row = null;
                    commitCounter++;
                    if (commitCounter % 1000 == 0) {
                        logger.info("progress: " + commitCounter + " class instances written to file");
                    }
                    if (commitCounter % 10000 == 0) {
                        closeSession();
                        openSession();
                    }
                }
                results.close();
                lookupSession.close();
            }
            writeEndElement(lexicon);
        }
    }

    // Iterate over SenseAxes and write them to XMLX when not only
    // lexicons should be converted
    if (includeAxes) {
        logger.info("Processing sense axes");
        DetachedCriteria criteria = DetachedCriteria.forClass(SenseAxis.class)
                .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'"));
        CriteriaIterator<Object> iter = new CriteriaIterator<Object>(criteria, sessionFactory, bufferSize);
        while (iter.hasNext()) {
            Object obj = iter.next();
            writeElement(obj);
            session.evict(obj);
            commitCounter++;
            if (commitCounter % 1000 == 0) {
                logger.info("progress: " + commitCounter + " class instances written to file");
            }
        }

        logger.info("Processing predicateargument axes");
        DetachedCriteria criteria2 = DetachedCriteria.forClass(PredicateArgumentAxis.class)
                .add(Restrictions.sqlRestriction("lexicalResourceId = '" + lexicalResource.getName() + "'"));
        CriteriaIterator<Object> iter2 = new CriteriaIterator<Object>(criteria2, sessionFactory, bufferSize);
        while (iter2.hasNext()) {
            Object obj = iter2.next();
            writeElement(obj);
            session.evict(obj);
            commitCounter++;
            if (commitCounter % 1000 == 0) {
                logger.info("progress: " + commitCounter + " class instances written to file");
            }
        }

    }
    writeEndElement(lexicalResource);

    writeEndDocument();
}

From source file:edu.emory.library.tast.util.CSVUtils.java

License:Open Source License

private static DictionaryInfo[] getAllData(Session sess, TastDbQuery query, boolean useSQL,
        ZipOutputStream zipStream, boolean codes, String conditions) throws FileNotFoundException, IOException {

    SimpleDateFormat dateFormatter = new SimpleDateFormat(
            AppConfig.getConfiguration().getString(AppConfig.FORMAT_DATE_CVS));

    //insert the bom - byte order marker
    final byte[] bom = new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF };
    zipStream.write(bom);//from  www  .  j a  v  a  2  s. c  o m
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(zipStream, encoding), ',');

    //CSVWriter writer = new CSVWriter(new OutputStreamWriter(zipStream), ',');
    ScrollableResults queryResponse = null;

    Map dictionaries = new HashMap();

    try {
        queryResponse = query.executeScrollableQuery(sess, useSQL);

        Attribute[] populatedAttrs = query.getPopulatedAttributes();

        if (conditions != "") {
            String[] con = new String[1];
            con[0] = conditions;
            writer.writeNext(con);
        }

        String[] row = new String[populatedAttrs.length - 1];
        for (int i = 1; i < populatedAttrs.length; i++) {
            row[i - 1] = populatedAttrs[i].getName();
        }

        writer.writeNext(row);

        int cnt = 0;

        while (queryResponse.next()) {

            cnt++;

            Object[] result = queryResponse.get();

            row = new String[populatedAttrs.length - 1];
            for (int j = 1; j < populatedAttrs.length; j++) {
                if (result[j] == null) {
                    row[j - 1] = "";
                } else {
                    if (!codes) {
                        if (result[j] instanceof Date)
                            row[j - 1] = dateFormatter.format(result[j]);
                        else
                            row[j - 1] = result[j].toString();
                        if (result[j] instanceof Dictionary) {
                            if (dictionaries.containsKey(populatedAttrs[j].toString())) {
                                DictionaryInfo info = (DictionaryInfo) dictionaries
                                        .get(populatedAttrs[j].toString());
                                if (!info.attributes.contains(populatedAttrs[j])) {
                                    info.attributes.add(populatedAttrs[j]);
                                }
                            } else {
                                DictionaryInfo info = new DictionaryInfo();
                                info.attributes.add(populatedAttrs[j]);
                                info.dictionary = result[j].getClass();
                                dictionaries.put(populatedAttrs[j].toString(), info);
                            }
                        }
                    } else {
                        if (result[j] instanceof Dictionary) {
                            row[j - 1] = ((Dictionary) result[j]).getId().toString();
                            if (dictionaries.containsKey(populatedAttrs[j].toString())) {
                                DictionaryInfo info = (DictionaryInfo) dictionaries
                                        .get(populatedAttrs[j].toString());
                                if (!info.attributes.contains(populatedAttrs[j])) {
                                    info.attributes.add(populatedAttrs[j]);
                                }
                            } else {
                                DictionaryInfo info = new DictionaryInfo();
                                info.attributes.add(populatedAttrs[j]);
                                info.dictionary = result[j].getClass();
                                dictionaries.put(populatedAttrs[j].toString(), info);
                            }
                        } else {
                            if (result[j] instanceof Date)
                                row[j - 1] = dateFormatter.format(result[j]);
                            else
                                row[j - 1] = result[j].toString();
                        }
                    }
                }
            }
            writer.writeNext(row);
        }

        writer.writeNext(new String[] { "The number of total records: " + cnt });

        writer.flush();
        return (DictionaryInfo[]) dictionaries.values().toArray(new DictionaryInfo[] {});

    } finally {
        if (queryResponse != null) {
            queryResponse.close();
        }
    }
}

From source file:edu.emory.library.tast.util.CSVUtils.java

License:Open Source License

private static void getAllData(Session sess, TastDbQuery query, boolean useSQL, ZipOutputStream zipStream,
        boolean codes) throws FileNotFoundException, IOException {
    SimpleDateFormat dateFormatter = new SimpleDateFormat(
            AppConfig.getConfiguration().getString(AppConfig.FORMAT_DATE_CVS));
    //insert the bom - byte order marker
    final byte[] bom = new byte[] { (byte) 0xEF, (byte) 0xBB, (byte) 0xBF };
    zipStream.write(bom);// www. ja  v a 2 s.c  o  m
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(zipStream, encoding), ',');

    //TODO this snippet below is used for testing purposes only 
    /*File file = new File("c:\\tmp\\voyage.csv");
    FileOutputStream fout = new FileOutputStream(file);
    final byte[] bom = new byte[] { (byte)0xEF, (byte)0xBB, (byte)0xBF };              
     fout.write(bom);       
    CSVWriter writer = new CSVWriter(new OutputStreamWriter(fout, encoding), ',');*/

    ScrollableResults queryResponse = null;

    Map dictionaries = new HashMap();

    try {
        //query to retrieve users for the submissions 
        HashMap users = getUsersForSubmissions(sess);
        boolean usersExist = false;
        if (users != null && users.size() > 0) {
            usersExist = true;
        }
        //query for all the voyages
        queryResponse = query.executeScrollableQuery(sess, useSQL);

        Attribute[] populatedAttrs = query.getPopulatedAttributes();

        String[] row = new String[populatedAttrs.length + 1];
        int i;
        for (i = 0; i < populatedAttrs.length; i++) {
            row[i] = populatedAttrs[i].getName();
        }
        row[i] = "username";
        writer.writeNext(row);

        int cnt = 0;
        String userName = null;
        while (queryResponse.next()) {
            cnt++;
            Object[] result = queryResponse.get();

            row = new String[populatedAttrs.length + 1];
            int j;
            for (j = 0; j < populatedAttrs.length; j++) {
                if (populatedAttrs[j].getName().equals("iid")) {
                    userName = null;
                    if (usersExist) {
                        userName = (String) users.get(result[j]);
                    }
                }
                if (result[j] == null) {
                    row[j] = "";
                } else if (result[j] instanceof Date) {
                    row[j] = dateFormatter.format(result[j]);
                } else if (codes) {
                    if (result[j] instanceof Dictionary) {
                        row[j] = ((Dictionary) result[j]).getId().toString();
                    } else {
                        row[j] = result[j].toString();
                    }
                } else {//labels
                    row[j] = result[j].toString();
                }
            }
            if (userName != null) {
                row[j++] = userName;
            }
            writer.writeNext(row);
        }

        writer.flush();
    } catch (IOException io) {
        io.printStackTrace();
    } finally {
        if (queryResponse != null) {
            queryResponse.close();
        }
    }
}

From source file:gallery.service.rss.RssServiceImpl.java

License:Apache License

@Override
@Transactional(readOnly = true)//from   www  .j ava  2s . c o  m
public void create() {
    if (generating) {
        logger.info("xml is allready generating ...");
        return;
    }
    try {
        generating = true;
        logger.info("start generate xml");
        long time = System.currentTimeMillis();

        File img_dir = new File(wallpaper_service.getStorePath(), Config.ENCLOSURE_IMG_SUBDIR);

        //get main wallpaper page
        Channel chan;
        List<Pages> temp = pages_service.getByPropertiesValueOrdered(null, MAIN_PSEUDONYMES, MAIN_VALUES, null,
                null);
        if (temp.isEmpty()) {
            chan = new Channel(gallery.web.Config.SITE_NAME, gallery.web.Config.SITE_NAME,
                    gallery.web.Config.SITE_NAME);
        } else {
            //TODO localize it !!!
            IAutoreplaceService.IReplacement repl = autoreplace_service.getAllReplacements("ru");
            String title = repl.replaceAll(temp.get(0).getTitle());
            String description = repl.replaceAll(temp.get(0).getDescription());
            chan = new Channel(title, gallery.web.Config.SITE_NAME, description);
        }

        RSS rss = new RSS();

        chan.setImage(new Channel.Image(gallery.web.Config.SITE_NAME + Config.LOGO_WEBDIR, chan.getTitle(),
                chan.getLink(), 0, 0, null));
        chan.setLastBuildDate(new java.util.Date());
        rss.addChannel(chan);

        ScrollableResults sr = wallpaper_service.getScrollableResults(
                "id, id_pages, description, title, date_upload, name", "active", Boolean.TRUE,
                new String[] { "date_upload" }, new String[] { "DESC" });
        int max_elements = 100;
        sr.beforeFirst();
        while (sr.next() && (max_elements-- > 0)) {
            try {
                Item item = new Item(sr.getString(2), gallery.web.Config.SITE_NAME + "index.htm?id_pages_nav="
                        + sr.getLong(1) + "&id_photo_nav=" + sr.getLong(0), sr.getString(3));
                item.setPubDate(sr.getDate(4));
                long fileLen = (new File(img_dir, sr.getString(5))).length();
                if (fileLen > 0) {
                    item.setEnclosure(new Item.Enclosure(
                            gallery.web.Config.SITE_NAME + Config.ENCLOSURE_IMG_WEBDIR + sr.getString(5),
                            fileLen, "image/jpeg"));
                }
                //item.addCategory(new Item.Category("test"));
                chan.addItem(item);
            } finally {
                //TODO: mb add some logging here
            }
        }
        sr.close();
        try {
            new RSSFeedGeneratorImpl().generateToFile(rss, new File(path, Config.RSS_FILE_NAME), "UTF-8");
        } catch (Exception e) {
            logger.error("error while saving rss to file", e);
        }
        time = System.currentTimeMillis() - time;
        logger.info("end generate xml. generated in: " + time);
    } finally {
        generating = false;
    }
}

From source file:gr.abiss.calipso.service.impl.UserServiceImpl.java

License:Open Source License

@Override
@Transactional(readOnly = false)//ww w  .j  a v a 2 s.c  o m
public void expireResetPasswordTokens() {
    // get a hibernate session suitable for read-only access to large datasets
    StatelessSession session = ((Session) this.repository.getEntityManager().getDelegate()).getSessionFactory()
            .openStatelessSession();
    Date yesterday = DateUtils.addDays(new Date(), -1);

    // send email notifications for account confirmation tokens that expired
    org.hibernate.Query query = session.createQuery(
            "SELECT new gr.abiss.calipso.model.UserDTO(u.id, u.firstName, u.lastName,u.username, u.email, u.emailHash) FROM User u "
                    + "WHERE u.password IS NULL and u.resetPasswordTokenCreated IS NOT NULL and u.resetPasswordTokenCreated  < :yesterday");
    query.setParameter("yesterday", yesterday);
    query.setFetchSize(Integer.valueOf(1000));
    query.setReadOnly(true);
    query.setLockMode("a", LockMode.NONE);
    ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);
    while (results.next()) {
        UserDTO dto = (UserDTO) results.get(0);
        // TODO: send expiration email
        this.emailService.sendAccountConfirmationExpired(new User(dto));
    }
    results.close();
    session.close();

    // expire tokens, including password reset requests
    this.repository.expireResetPasswordTokens(yesterday);
}

From source file:monasca.thresh.infrastructure.persistence.hibernate.AlarmDefinitionSqlImpl.java

License:Apache License

@SuppressWarnings("unchecked")
private List<SubExpression> findSubExpressions(final Session session, final String alarmDefId) {

    final List<SubExpression> subExpressions = Lists.newArrayList();
    Map<String, Map<String, String>> dimensionMap = Maps.newHashMap();

    final DetachedCriteria subAlarmDefinitionCriteria = DetachedCriteria
            .forClass(SubAlarmDefinitionDb.class, "sad").createAlias("alarmDefinition", "ad")
            .add(Restrictions.conjunction(Restrictions.eqProperty("sad.alarmDefinition.id", "ad.id"),
                    Restrictions.eq("sad.alarmDefinition.id", alarmDefId)))
            .addOrder(Order.asc("sad.id")).setProjection(Projections.property("sad.id"));

    final ScrollableResults subAlarmDefinitionDimensionResult = session
            .createCriteria(SubAlarmDefinitionDimensionDb.class).add(Property
                    .forName("subAlarmDefinitionDimensionId.subExpression.id").in(subAlarmDefinitionCriteria))
            .setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY);

    final ScrollableResults subAlarmDefinitionResult = session
            .getNamedQuery(SubAlarmDefinitionDb.Queries.BY_ALARMDEFINITION_ID).setString("id", alarmDefId)
            .setReadOnly(true).scroll(ScrollMode.FORWARD_ONLY);

    while (subAlarmDefinitionDimensionResult.next()) {

        final SubAlarmDefinitionDimensionDb dim = (SubAlarmDefinitionDimensionDb) subAlarmDefinitionDimensionResult
                .get()[0];/* www  .  ja va 2  s  .c o  m*/
        final SubAlarmDefinitionDimensionId id = dim.getSubAlarmDefinitionDimensionId();

        final String subAlarmId = (String) session.getIdentifier(id.getSubExpression());
        final String name = id.getDimensionName();
        final String value = dim.getValue();

        if (!dimensionMap.containsKey(subAlarmId)) {
            dimensionMap.put(subAlarmId, Maps.<String, String>newTreeMap());
        }
        dimensionMap.get(subAlarmId).put(name, value);

        session.evict(dim);
    }

    while (subAlarmDefinitionResult.next()) {
        final SubAlarmDefinitionDb def = (SubAlarmDefinitionDb) subAlarmDefinitionResult.get()[0];

        final String id = def.getId();
        final AggregateFunction function = AggregateFunction.fromJson(def.getFunction());
        final String metricName = def.getMetricName();
        final AlarmOperator operator = AlarmOperator.fromJson(def.getOperator());
        final Double threshold = def.getThreshold();
        final Integer period = def.getPeriod();
        final Integer periods = def.getPeriods();
        final Boolean deterministic = def.isDeterministic();

        Map<String, String> dimensions = dimensionMap.get(id);

        if (dimensions == null) {
            dimensions = Collections.emptyMap();
        }

        subExpressions.add(new SubExpression(id,
                new AlarmSubExpression(function, new MetricDefinition(metricName, dimensions), operator,
                        threshold, period, periods, deterministic)));

        session.evict(def);
    }

    subAlarmDefinitionDimensionResult.close();
    subAlarmDefinitionResult.close();

    return subExpressions;
}

From source file:nl.strohalm.cyclos.utils.lucene.IndexOperationRunner.java

License:Open Source License

/**
 * Recreates an index. If the force parameter is false, execute only if the index is corrupt or missing
 *//*from   w w  w  .  java2  s .c o m*/
private void rebuild(final Class<? extends Indexable> entityType, final boolean force,
        final boolean createAlert) {
    boolean execute = true;
    // When not forced, run only
    if (!force) {
        final IndexStatus status = indexHandler.getIndexStatus(entityType);
        execute = status != IndexStatus.CORRUPT && status != IndexStatus.MISSING;
    }
    if (!execute) {
        return;
    }

    if (createAlert) {
        // Create the alert for index rebuilding
        createAlert(SystemAlert.Alerts.INDEX_REBUILD_START, entityType);
    }

    IndexWriter indexWriter = cachedWriters.get(entityType);
    if (indexWriter != null) {
        try {
            indexWriter.close();
        } catch (final Exception e) {
            // Silently ignore
        }
        cachedWriters.remove(entityType);
    }
    // Remove all files and recreate the directory
    final File dir = indexHandler.getIndexDir(entityType);
    try {
        FileUtils.deleteDirectory(dir);
    } catch (final IOException e) {
        // Silently ignore
    }
    dir.mkdirs();

    final DocumentMapper documentMapper = indexHandler.getDocumentMapper(entityType);
    final IndexWriter writer = getWriter(entityType);

    // Now, we should add all entities to the index
    boolean success = readonlyTransactionTemplate.execute(new TransactionCallback<Boolean>() {
        public Boolean doInTransaction(final TransactionStatus status) {
            Session session = getSession();
            ScrollableResults scroll = session.createQuery(resolveHql(entityType))
                    .scroll(ScrollMode.FORWARD_ONLY);

            try {
                int index = 0;
                while (scroll.next()) {
                    Indexable entity = (Indexable) scroll.get(0);
                    Document document = documentMapper.map(entity);
                    try {
                        writer.addDocument(document);
                    } catch (CorruptIndexException e) {
                        handleIndexCorrupted(entityType);
                        return false;
                    } catch (IOException e) {
                        LOG.error("Error while adding document to index after rebuilding "
                                + ClassHelper.getClassName(entityType), e);
                        return false;
                    }
                    // Every batch, clear the session and commit the writer
                    if (++index % 30 == 0) {
                        session.clear();
                        commit(entityType, writer);
                    }
                }
                return true;
            } finally {
                scroll.close();
            }
        }
    });

    // Finish the writer operation
    try {
        if (success) {
            commit(entityType, writer);
        } else {
            rollback(entityType, writer);
        }
    } finally {
        if (createAlert) {
            // Create the alert for index rebuilding
            createAlert(SystemAlert.Alerts.INDEX_REBUILD_END, entityType);
        }
    }
}

From source file:nl.strohalm.cyclos.utils.lucene.IndexOperationRunner.java

License:Open Source License

private boolean rebuildMemberAds(final Long userId, final Analyzer analyzer, final Session session) {
    final Class<? extends Indexable> entityType = Ad.class;
    final IndexWriter writer = getWriter(entityType);
    boolean success = false;

    DocumentMapper documentMapper = indexHandler.getDocumentMapper(entityType);
    try {//from  w  w w  .jav a2  s .c om
        writer.deleteDocuments(new Term("owner", userId.toString()));
    } catch (CorruptIndexException e) {
        handleIndexCorrupted(entityType);
        success = false;
    } catch (IOException e) {
        LOG.error("Error while reindexing a member's advertisements", e);
        success = false;
    }

    ScrollableResults scroll = session
            .createQuery("from Ad a where a.deleteDate is null and a.owner.id = " + userId)
            .scroll(ScrollMode.FORWARD_ONLY);

    try {
        int index = 0;
        while (scroll.next()) {
            Indexable entity = (Indexable) scroll.get(0);
            Document document = documentMapper.map(entity);
            try {
                writer.addDocument(document, analyzer);
            } catch (CorruptIndexException e) {
                handleIndexCorrupted(entityType);
                success = false;
                break;
            } catch (IOException e) {
                LOG.error("Error while adding advertisements to index", e);
                success = false;
                break;
            }
            // Every batch, clear the session and commit the writer
            if (++index % 30 == 0) {
                session.clear();
            }
        }
        success = true;
    } finally {
        scroll.close();
    }

    // Finish the writer operation
    if (success) {
        commit(entityType, writer);
        return true;
    } else {
        rollback(entityType, writer);
        return false;
    }
}

From source file:nl.strohalm.cyclos.utils.lucene.IndexOperationRunner.java

License:Open Source License

private boolean rebuildMemberRecords(final Long userId, final Analyzer analyzer, final Session session) {
    final Class<? extends Indexable> entityType = MemberRecord.class;
    final IndexWriter writer = getWriter(entityType);
    boolean success = false;

    DocumentMapper documentMapper = indexHandler.getDocumentMapper(entityType);
    try {//from ww w .j  a v  a2  s . c  om
        writer.deleteDocuments(new Term("element", userId.toString()));
    } catch (CorruptIndexException e) {
        handleIndexCorrupted(entityType);
        success = false;
    } catch (IOException e) {
        LOG.error("Error while reindexing an user's records", e);
        success = false;
    }

    ScrollableResults scroll = session.createQuery("from MemberRecord mr where mr.element.id = " + userId)
            .scroll(ScrollMode.FORWARD_ONLY);

    try {
        int index = 0;
        while (scroll.next()) {
            Indexable entity = (Indexable) scroll.get(0);
            Document document = documentMapper.map(entity);
            try {
                writer.addDocument(document, analyzer);
            } catch (CorruptIndexException e) {
                handleIndexCorrupted(entityType);
                success = false;
                break;
            } catch (IOException e) {
                LOG.error("Error while adding member records to index", e);
                success = false;
                break;
            }
            // Every batch, clear the session and commit the writer
            if (++index % 30 == 0) {
                session.clear();
            }
        }
        success = true;
    } finally {
        scroll.close();
    }

    // Finish the writer operation
    if (success) {
        commit(entityType, writer);
        return true;
    } else {
        rollback(entityType, writer);
        return false;
    }
}

From source file:org.candlepin.gutterball.curator.ComplianceSnapshotCurator.java

License:Open Source License

/**
 * Retrieves the compliance status counts over the given time span with the specified criteria.
 * The counts are returned in a map of maps, with the outer map mapping the dates to the inner
 * map which maps the statuses to their respective counts.
 * <p></p>//from   w  ww  . j  av  a  2 s .  c  o m
 * If the start and/or end dates are null, the time span will be similarly unrestricted. Note
 * that the time within a given Date object is ignored. If neither the start nor end dates are
 * provided, all known compliance status data will be used.
 *
 * @param startDate
 *  The date at which the time span should begin. If null, all compliance statuses before the
 *  end date (if provided) will be used.
 *
 * @param endDate
 *  The date at which the time span should end. If null, all compliance statuses after the
 *  start date (if provided) will be used.
 *
 * @param sku
 *  A subscription sku to use to filter compliance status counts. If provided, only consumers
 *  using the specified sku will be counted.
 *
 * @param subscriptionName
 *  A subscription name to use to filter compliance status counts. If provided, only consumers
 *  using subscriptions with the specified product name will be counted.
 *
 * @param productName
 *  A product name to use to filter compliance status counts. If provided, only consumers with
 *  an installed product with the specified product name will be counted.
 *
 * @param attributes
 *  A map of entitlement attributes to use to filter compliance status counts. If provided, only
 *  consumers with entitlements having the specified values for the given attributes will be
 *  counted.
 *
 * @param ownerKey
 *  An owner key to use to filter compliance status counts. If provided, only consumers
 *  associated with the specified owner key/account will be counted.
 *
 * @param pageRequest
 *  A PageRequest instance containing paging information from the request. If null, no paging
 *  will be performed.
 *
 * @return
 *  A page containing a map of maps containing the compliance status counts, grouped by day. If
 *  no counts were found for the given time span, the page will contain an empty map.
 */
public Page<Map<Date, Map<String, Integer>>> getComplianceStatusCounts(Date startDate, Date endDate,
        String ownerKey, List<String> consumerUuids, String sku, String subscriptionName, String productName,
        Map<String, String> attributes, PageRequest pageRequest) {

    Page<Map<Date, Map<String, Integer>>> page = new Page<Map<Date, Map<String, Integer>>>();
    page.setPageRequest(pageRequest);

    // Build our query...
    // Impl note: This query's results MUST be sorted by date in ascending order. If it's not,
    // the algorithm below breaks.
    Query query = this.buildComplianceStatusCountQuery(this.currentSession(), startDate, endDate, ownerKey,
            consumerUuids, sku, subscriptionName, productName, attributes);

    // Clamp our dates so they're no further out than "today."
    Date today = new Date();
    if (startDate != null && startDate.after(today)) {
        startDate = today;
    }

    if (endDate != null && endDate.after(today)) {
        endDate = today;
    }

    // Execute & process results...
    Map<Date, Map<String, Integer>> resultmap = new TreeMap<Date, Map<String, Integer>>();
    Map<String, Object[]> cstatusmap = new HashMap<String, Object[]>();

    // Step through our data and do our manual aggregation bits...
    ScrollableResults results = query.scroll(ScrollMode.FORWARD_ONLY);

    if (results.next()) {
        Calendar date = Calendar.getInstance();

        Object[] row = results.get();
        String uuid = (String) row[0];
        row[1] = ((String) row[1]).toLowerCase();
        date.setTime((Date) row[2]);

        // Prime the calendars here...
        Calendar cdate = Calendar.getInstance();
        cdate.setTime(startDate != null ? startDate : date.getTime());
        cdate.set(Calendar.HOUR_OF_DAY, 23);
        cdate.set(Calendar.MINUTE, 59);
        cdate.set(Calendar.SECOND, 59);
        cdate.set(Calendar.MILLISECOND, 999);

        Calendar end = Calendar.getInstance();
        end.setTimeInMillis(endDate != null ? endDate.getTime() : Long.MAX_VALUE);

        for (; this.compareCalendarsByDate(cdate, end) <= 0; cdate.add(Calendar.DATE, 1)) {
            while (this.compareCalendarsByDate(date, cdate) <= 0) {
                // Date is before our current date. Store the uuid's status so we can add it to
                // our counts later.
                cstatusmap.put(uuid, row);

                if (!results.next()) {
                    if (endDate == null) {
                        end.setTimeInMillis(cdate.getTimeInMillis());
                    }

                    break;
                }

                row = (Object[]) results.get();
                uuid = (String) row[0];
                row[1] = ((String) row[1]).toLowerCase();
                date.setTime((Date) row[2]);
            }

            Date hashdate = cdate.getTime();
            Map<String, Integer> statusmap = new HashMap<String, Integer>();

            // Go through and add up all our counts for the day.
            for (Object[] cstatus : cstatusmap.values()) {
                if (cstatus[3] == null || this.compareDatesByDate(hashdate, (Date) cstatus[3]) < 0) {
                    Integer count = statusmap.get((String) cstatus[1]);
                    statusmap.put((String) cstatus[1], (count != null ? count + 1 : 1));
                }
            }

            resultmap.put(hashdate, statusmap);
        }
    }

    results.close();

    // Pagination
    // This is horribly inefficient, but the only way to do it with the current implementation.
    if (pageRequest != null && pageRequest.isPaging()) {
        page.setMaxRecords(resultmap.size());

        int offset = (pageRequest.getPage() - 1) * pageRequest.getPerPage();
        int nextpage = offset + pageRequest.getPerPage();

        // Trim results. :(
        Iterator<Date> iterator = resultmap.keySet().iterator();
        for (int pos = 0; iterator.hasNext(); ++pos) {
            iterator.next();

            if (pos < offset || pos >= nextpage) {
                iterator.remove();
            }
        }
    }

    page.setPageData(resultmap);
    return page;
}