Example usage for org.hibernate FlushMode COMMIT

List of usage examples for org.hibernate FlushMode COMMIT

Introduction

In this page you can find the example usage for org.hibernate FlushMode COMMIT.

Prototype

FlushMode COMMIT

To view the source code for org.hibernate FlushMode COMMIT.

Click Source Link

Document

The Session is flushed when Transaction#commit is called.

Usage

From source file:org.unitime.timetable.solver.TimetableDatabaseSaver.java

License:Open Source License

public void save() {
    org.hibernate.Session hibSession = null;
    Transaction tx = null;/*from   w w  w  .  j a v a 2  s .  c  o m*/
    try {
        TimetableManagerDAO dao = new TimetableManagerDAO();
        hibSession = dao.getSession();
        hibSession.setCacheMode(CacheMode.IGNORE);
        hibSession.setFlushMode(FlushMode.COMMIT);

        tx = hibSession.beginTransaction();

        Long[] solutionIds = save(hibSession);

        tx.commit();

        HashSet refreshIds = new HashSet();
        if (iCommitSolution && solutionIds != null) {
            HashSet<Solution> touchedSolutions = new HashSet<Solution>();
            if (hibSession != null && hibSession.isOpen())
                hibSession.close();
            hibSession = dao.getSession();

            iProgress.setPhase("Committing solution ...", 2 * solutionIds.length);
            tx = hibSession.beginTransaction();
            for (int i = 0; i < solutionIds.length; i++) {
                Solution solution = (new SolutionDAO()).get(solutionIds[i]);
                Solution committedSolution = solution.getOwner().getCommittedSolution();
                if (committedSolution != null) {
                    committedSolution.uncommitSolution(hibSession,
                            getModel().getProperties().getProperty("General.OwnerPuid"));
                    refreshIds.add(committedSolution.getUniqueId());
                    touchedSolutions.add(committedSolution);
                }
                touchedSolutions.add(solution);
                iProgress.incProgress();
            }
            for (int i = 0; i < solutionIds.length; i++) {
                Solution solution = (new SolutionDAO()).get(solutionIds[i]);
                List<String> messages = new ArrayList<String>();
                solution.commitSolution(messages, hibSession,
                        getModel().getProperties().getProperty("General.OwnerPuid"));
                touchedSolutions.add(solution);
                for (String m : messages) {
                    iProgress.error("Unable to commit: " + m);
                }
                hibSession.update(solution);
                iProgress.incProgress();
            }
            tx.commit();
            String className = ApplicationProperty.ExternalActionSolutionCommit.value();
            if (className != null && className.trim().length() > 0) {
                ExternalSolutionCommitAction commitAction = (ExternalSolutionCommitAction) (Class
                        .forName(className).newInstance());
                commitAction.performExternalSolutionCommitAction(touchedSolutions, hibSession);
            }
        }

        iProgress.setPhase("Refreshing solution ...", solutionIds.length + refreshIds.size());
        for (Iterator i = refreshIds.iterator(); i.hasNext();) {
            Long solutionId = (Long) i.next();
            refreshCourseSolution(solutionId);
            try {
            } catch (Exception e) {
                iProgress.warn("Unable to refresh solution " + solutionId + ", reason:" + e.getMessage(), e);
            }
            iProgress.incProgress();
        }
        for (int i = 0; i < solutionIds.length; i++) {
            try {
                refreshCourseSolution(solutionIds[i]);
            } catch (Exception e) {
                iProgress.warn("Unable to refresh solution " + solutionIds[i] + ", reason:" + e.getMessage(),
                        e);
            }
            iProgress.incProgress();
        }

        if (solutionIds != null) {
            getModel().getProperties().setProperty("General.SolutionId", solutionIds);
            iProgress.info("Solution successfully saved.");

            if (hibSession != null && hibSession.isOpen())
                hibSession.close();
            hibSession = dao.getSession();

            for (int i = 0; i < solutionIds.length; i++) {
                tx = hibSession.beginTransaction();
                Solution solution = (new SolutionDAO()).get(solutionIds[i]);
                LogInfo lInfo = new LogInfo();
                lInfo.setLog(iProgress.getLog());
                SolutionInfo logInfo = new SolutionInfo();
                logInfo.setDefinition(SolverInfoDef.findByName(hibSession, "LogInfo"));
                logInfo.setOpt(null);
                logInfo.setSolution(solution);
                logInfo.setInfo(lInfo, getFileProxy());
                hibSession.save(logInfo);
                tx.commit();
            }
        }
    } catch (Exception e) {
        iProgress.fatal("Unable to save timetable, reason: " + e.getMessage(), e);
        sLog.error(e.getMessage(), e);
        tx.rollback();
    } finally {
        // here we need to close the session since this code may run in a separate thread
        if (hibSession != null && hibSession.isOpen())
            hibSession.close();
    }
}

From source file:owldb.util.HibernateUtil.java

License:Open Source License

/**
 * Executes the given Hibernate wrapper.
 * /*from w  w  w . j a v a 2  s  .co  m*/
 * @param <T> The type of the result value
 * @param factory A session factory
 * @param wrapper The wrapper to execute
 * @return The result object
 */
@SuppressWarnings("unchecked")
public static <T> T executeTransaction(final SessionFactory factory, final HibernateWrapper<T> wrapper) {
    final Session session = factory.openSession();
    session.setFlushMode(FlushMode.COMMIT);
    Transaction t = null;
    try {
        t = session.beginTransaction();
        final Object result = wrapper.doInHibernate(session);
        t.commit();
        return (T) result;
    } catch (final RuntimeException ex) {
        if (t != null)
            t.rollback();
        throw ex;
    } finally {
        session.close();
    }
}

From source file:ubic.gemma.persistence.persister.ArrayDesignPersister.java

License:Apache License

/**
 * Persist an entirely new array design, including composite sequences and any associated new sequences.
 *//*from  w  w  w. java 2  s. c om*/
private ArrayDesign persistNewArrayDesign(ArrayDesign arrayDesign) {

    if (arrayDesign == null)
        return null;

    AbstractPersister.log.info("Persisting new platform " + arrayDesign.getName());

    try {
        this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.COMMIT);

        if (arrayDesign.getDesignProvider() != null)
            arrayDesign.setDesignProvider(this.persistContact(arrayDesign.getDesignProvider()));

        if (arrayDesign.getPrimaryTaxon() == null) {
            throw new IllegalArgumentException("Primary taxon cannot be null");
        }

        arrayDesign.setPrimaryTaxon((Taxon) this.persist(arrayDesign.getPrimaryTaxon()));

        for (DatabaseEntry externalRef : arrayDesign.getExternalReferences()) {
            externalRef.setExternalDatabase(this.persistExternalDatabase(externalRef.getExternalDatabase()));
        }

        AbstractPersister.log.info("Persisting " + arrayDesign);

        if (arrayDesign.getAuditTrail() != null && this.isTransient(arrayDesign.getAuditTrail()))
            arrayDesign.getAuditTrail().setId(null);

        Collection<CompositeSequence> scs = new ArrayList<>(arrayDesign.getCompositeSequences());
        arrayDesign.getCompositeSequences().clear();
        arrayDesign = arrayDesignDao.create(arrayDesign);
        arrayDesign.getCompositeSequences().addAll(scs);
        arrayDesign = this.persistArrayDesignCompositeSequenceAssociations(arrayDesign);
        arrayDesignDao.update(arrayDesign);

    } finally {
        this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.AUTO);
    }
    return arrayDesign;
}

From source file:ubic.gemma.persistence.persister.ExpressionPersister.java

License:Apache License

@Override
@Transactional/*  ww w  .j av  a 2s  . c  om*/
public ExpressionExperiment persist(ExpressionExperiment ee, ArrayDesignsForExperimentCache cachedArrays) {

    if (ee == null)
        return null;
    if (!this.isTransient(ee))
        return ee;

    this.clearCache();

    ExpressionExperiment existingEE = expressionExperimentDao.findByShortName(ee.getShortName());
    if (existingEE != null) {
        AbstractPersister.log.warn("Expression experiment with same short name exists (" + existingEE
                + "), returning it (this method does not handle updates)");
        return existingEE;
    }

    try {

        AbstractPersister.log.info(">>>>>>>>>> Persisting " + ee);

        this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.COMMIT);

        ee.setPrimaryPublication((BibliographicReference) this.persist(ee.getPrimaryPublication()));
        ee.setOwner((Contact) this.persist(ee.getOwner()));
        ee.setTaxon(this.persistTaxon(ee.getTaxon()));

        this.persistCollectionElements(ee.getQuantitationTypes());
        this.persistCollectionElements(ee.getOtherRelevantPublications());

        if (ee.getAccession() != null) {
            this.fillInDatabaseEntry(ee.getAccession());
        }

        // This has to come first and be persisted, so our FactorValues get persisted before we process the
        // BioAssays.
        if (ee.getExperimentalDesign() != null) {
            ExperimentalDesign experimentalDesign = ee.getExperimentalDesign();
            experimentalDesign.setId(null); // in case of retry.
            this.processExperimentalDesign(experimentalDesign);
            assert experimentalDesign.getId() != null;
            ee.setExperimentalDesign(experimentalDesign);
        }

        this.checkExperimentalDesign(ee);

        // This does most of the preparatory work.
        this.processBioAssays(ee, cachedArrays);

        ee = expressionExperimentDao.create(ee);

    } finally {
        this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.AUTO);
    }

    this.clearCache();
    AbstractPersister.log.info("<<<<<< FINISHED Persisting " + ee);
    return ee;
}

From source file:ubic.gemma.persistence.persister.GenomePersister.java

License:Apache License

/**
 * @param gene transient instance that will be used to provide information to update persistent version.
 * @return new or updated gene instance.
 */// w ww. j  av a2 s . co m
private Gene persistOrUpdateGene(Gene gene) {

    if (gene == null)
        return null;

    Gene existingGene;
    if (gene.getId() != null) {
        existingGene = geneDao.load(gene.getId());
    } else {
        existingGene = geneDao.find(gene);
    }

    if (existingGene == null) {
        return this.persistGene(gene, false);
    }

    if (AbstractPersister.log.isDebugEnabled())
        AbstractPersister.log.debug("Updating " + existingGene);

    /*
     * This allows stale data to exist in this Session, but flushing prematurely causes constraint violations.
     * Probably we should fix this some other way.
     */
    this.getSession().setFlushMode(FlushMode.COMMIT);

    return this.updateGene(existingGene, gene);

}

From source file:ubic.gemma.persistence.persister.PersisterHelper.java

License:Apache License

@Override
@Transactional//from   w  w  w.ja  va2  s.  co  m
public Object persist(Object entity) {
    try {

        this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.COMMIT);

        if (entity instanceof Auditable) {
            Auditable auditable = (Auditable) entity;

            if (auditable.getAuditTrail() == null) {
                auditable.setAuditTrail(AuditTrail.Factory.newInstance());
            }

            auditable.setAuditTrail(persistAuditTrail(auditable.getAuditTrail()));
        }

        return super.persist(entity);
    } finally {
        this.getSessionFactory().getCurrentSession().setFlushMode(FlushMode.AUTO);
    }
}

From source file:won.protocol.util.hibernate.FlushModeSettingHibernateJpaDialect.java

License:Apache License

public Object prepareTransaction(EntityManager entityManager, boolean readOnly, String name)
        throws PersistenceException {

    Session session = getSession(entityManager);
    FlushMode currentFlushMode = session.getFlushMode();
    FlushMode previousFlushMode = null;//from ww w .j a v  a 2 s  . c o  m
    if (getFlushMode() != null) {
        session.setFlushMode(flushMode);
        previousFlushMode = currentFlushMode;
    } else if (readOnly) {
        // We should suppress flushing for a read-only transaction.
        session.setFlushMode(FlushMode.MANUAL);
        previousFlushMode = currentFlushMode;
    } else {
        // We need AUTO or COMMIT for a non-read-only transaction.
        if (currentFlushMode.lessThan(FlushMode.COMMIT)) {
            session.setFlushMode(FlushMode.AUTO);
            previousFlushMode = currentFlushMode;
        }
    }
    return new SessionTransactionData(session, previousFlushMode);
}

From source file:zeroMQ.messageQueue.CompletePipelineWorker.java

License:Open Source License

/**
 * Thread run method/*from w  w w.  j  a v a  2s . co  m*/
 * read Gate-Document from Queue and process each document
 * 
 */
@Override
public void run() {
    DocumentMetaData dbDocument = null;

    boolean loop = true;

    PerformanceMeasurement timeMeasurement = null;
    try {
        timeMeasurement = new PerformanceMeasurement(this.getName());
        this.preprocessing.setTimeMeasurement(timeMeasurement);
        this.classification.setTimeMeasurement(timeMeasurement);
    } catch (NullPointerException e1) {
        log.error(e1.getClass().getName() + " occured on initializing PerformanceMeasurement");
        log.error(e1.getMessage());
        log.error("System exit - Nullpointer on initializing PerformanceMeasurement");
        System.exit(MIN_PRIORITY);
    } catch (IOException e1) {
        log.error(e1.getClass().getName() + " occured on initializing PerformanceMeasurement");
        log.error(e1.getMessage());
        log.error("System exit - IOException on initializing PerformanceMeasurement");
        System.exit(MIN_PRIORITY);
    }

    while (loop) {
        String currentMessageString = null;
        Document gateDoc = null;
        Long docID = null;

        try {
            currentMessageString = queue.popMessage();

            log.info(
                    "*******************************************************************************************");
            log.info("Thread " + this.getName() + " with ID: " + this.getId()
                    + " startet executing preprocessing");
            timeMeasurement.startTimeMeasurementLoop();

            hibernateSession = databaseConn.openSession();
            hibernateSession.setFlushMode(FlushMode.COMMIT);
            int level = hibernateSession.connection().getTransactionIsolation();
            log.trace("TransactionIsolationLevel: " + level);
            hibernateSession.connection().setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED);
            databaseConn.setHibernateSession(hibernateSession);

            long startLoadDocument = System.currentTimeMillis();

            log.debug("Starting load Gate-Document for preprocessing on: " + startLoadDocument);

            gateDoc = Factory.newDocument(currentMessageString);

            long endLoadDocument = System.currentTimeMillis();
            log.debug("End load Gate-Document for preprocessing on: " + endLoadDocument);
            long loadGateDocumentDuration = endLoadDocument - startLoadDocument;
            log.debug("Loading Gate-Document needs : " + loadGateDocumentDuration + " ms ");

            dbDocument = classification.extractDocumentMetaData(gateDoc);

            docID = dbDocument.getId();

            Date publicationDate = dbDocument.getPublicationDate();

            boolean writeInputFile = Boolean.parseBoolean(configProperties.get("writeInputFile").toString());
            if (writeInputFile) {
                boolean zipFile = Boolean.parseBoolean(configProperties.get("zipFile").toString());

                String suffix = ".gate-xml.xml";

                if (zipFile) {
                    String fileName = preprocessing.extractIDFromGateFeature(gateDoc);

                    File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                    File outputZipFile = new File(outputDir, fileName + suffix);
                    outputZipFile = GlobalParameters.zipStringToFile(gateDoc.toXml(), outputZipFile);
                    networkConfig.copyFile(outputZipFile, true);

                    DatabaseFacade.getSingletonFacade().createDocumentVersion("gate-xml", outputZipFile,
                            dbDocument);
                } else {

                    File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                    File outputFile = preprocessing.writeGateDocumentXML(gateDoc, suffix, outputDir);
                    networkConfig.copyFile(outputFile, true);

                    DatabaseFacade.getSingletonFacade().createDocumentVersion("gate-xml", outputFile,
                            dbDocument);
                }
            }

            gateDoc = preprocessing.executeDocument(gateDoc);

            String preprocessedDocumentStr = gateDoc.toXml();

            boolean writepreprocessingOutputFile = Boolean
                    .parseBoolean(configProperties.get("writeOutputFile").toString());
            if (writepreprocessingOutputFile) {
                boolean zipFile = Boolean.parseBoolean(configProperties.get("zipFile").toString());

                if (zipFile) {
                    String fileName = preprocessing.extractIDFromGateFeature(gateDoc);

                    File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                    File outputZipFile = new File(outputDir, fileName + ".preprocessed.xml");
                    outputZipFile = GlobalParameters.zipStringToFile(preprocessedDocumentStr, outputZipFile);
                    networkConfig.copyFile(outputZipFile, true);
                    DatabaseFacade.getSingletonFacade().createDocumentVersion("preprocessed", outputZipFile,
                            dbDocument);
                } else {
                    String suffix = ".preprocessed.xml";
                    File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                    File outputFile = preprocessing.writeGateDocumentXML(gateDoc, suffix, outputDir);
                    networkConfig.copyFile(outputFile, true);
                    DatabaseFacade.getSingletonFacade().createDocumentVersion("preprocessed", outputFile,
                            dbDocument);
                }
            }

            AnnotationSet docAnnotSet = gateDoc.getAnnotations();
            Set<String> annotationTypes = docAnnotSet.getAllTypes();

            if (!annotationTypes.contains("SO")) {
                /**
                 * No Sentimentobject 
                 * -> POS/NEG ORIENTATION TERM WORD COUNT AGGREGATION
                 */
                log.info(
                        "Input Document contains no SO -> starting with PosNegWordRatio Sentiment on database DocumentMetaData-object: "
                                + docID);
                DatabaseFacade.getSingletonFacade().deleteSentimentsWithClassifierType(dbDocument,
                        "PosNegWordRatio", "CRISP");

                try {
                    this.calculatePosNegWordRatioSentiment(gateDoc, dbDocument, classification);

                } catch (HibernateException hibex) {
                    log.error("Hibernateexception on calculating Pos/Neg Word-Ratio Sentiment on document: "
                            + gateDoc.getName() + " in databaseObject with ID: " + docID);
                    log.error(hibex.getMessage());
                    GateInitSingleton.executeResetApplication(gateDoc);
                }
            } else {
                /*
                 * Sentimentobject extracted
                 * KnowledgebasedCrisp Sentimentextraction
                 */
                log.info(
                        "Input Document contains SO -> starting with Knowledgebased CRISP Sentimentclassification on database DocumentMetaData-object: "
                                + docID);

                gateDoc = classification.executeDocument(gateDoc, dbDocument);

                boolean writeOutputFile = Boolean
                        .parseBoolean(configProperties.get("writeOutputFile").toString());
                if (writeOutputFile) {
                    boolean zipFile = Boolean.parseBoolean(configProperties.get("zipFile").toString());

                    if (zipFile) {
                        String currentDocString = gateDoc.toXml();
                        String fileName = classification.extractIDFromGateFeature(gateDoc);

                        File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                        File outputZipFile = new File(outputDir, fileName + ".classified.xml");
                        outputZipFile = GlobalParameters.zipStringToFile(currentDocString, outputZipFile);
                        networkConfig.copyFile(outputZipFile, true);
                        DatabaseFacade.getSingletonFacade().createDocumentVersion("classified", outputZipFile,
                                dbDocument);
                    } else {
                        String suffix = ".classified.xml";
                        File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                        File outputFile = classification.writeGateDocumentXML(gateDoc, suffix, outputDir);
                        networkConfig.copyFile(outputFile, true);
                        DatabaseFacade.getSingletonFacade().createDocumentVersion("classified", outputFile,
                                dbDocument);
                    }

                    boolean writeResultXML = Boolean
                            .parseBoolean(configProperties.get("writeResultXML").toString());
                    if (writeResultXML) {
                        boolean zipResultFile = Boolean
                                .parseBoolean(configProperties.get("zipFile").toString());

                        String fileName = classification.extractIDFromGateFeature(gateDoc);
                        File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
                        File classificationResultXML = new File(outputDir, fileName + ".result.xml");
                        log.info("Starting marshalling Result of Classification to: "
                                + classificationResultXML.getAbsolutePath());
                        ClassifiedDocument result = classification.getClassifiedDocument();
                        JAXB.marshal(result, classificationResultXML);

                        if (zipResultFile) {
                            GlobalParameters.zipFile(classificationResultXML);

                            classificationResultXML = new File(
                                    classificationResultXML.getAbsolutePath() + ".zip");
                            networkConfig.copyFile(classificationResultXML, true);
                            DatabaseFacade.getSingletonFacade().createDocumentVersion("result",
                                    classificationResultXML, dbDocument);
                        }
                    }
                }
            }

            //Write txt-File with DocumentContent
            String txtFileName = classification.extractIDFromGateFeature(gateDoc);
            File outputDir = GlobalParameters.createTmpDirectoriesFromDate(publicationDate);
            File txtContentFile = new File(outputDir, txtFileName + ".txt");

            String gateDocContent = gateDoc.getContent().toString();
            GlobalParameters.zipStringToFile(gateDocContent, txtContentFile);
            txtContentFile = new File(txtContentFile.getAbsolutePath() + ".zip");
            networkConfig.copyFile(txtContentFile, true);

            DatabaseFacade.getSingletonFacade().createDocumentVersion("txt", txtContentFile, dbDocument);

            databaseConn.startTransaction();
            tx = databaseConn.getTx();
            databaseConn.saveOrUpdateObjectToDatabase(dbDocument);
            log.trace("Starting commit");
            long start = System.currentTimeMillis();
            tx.commit();
            long end = System.currentTimeMillis();
            long commitTime = end - start;
            log.trace("Commit time for one doucment: " + commitTime);
            hibernateSession.connection().setTransactionIsolation(level);
            log.info("Added and committet new Document in Database with ID: " + dbDocument.getId()
                    + " sucessfully");
            dbDocument = null;
        } catch (NullPointerException npe) {
            log.error("NullPointerException: " + npe.getClass().getName()
                    + " occured during processing Document");
            if (npe.getMessage() != null) {
                log.error(npe.getMessage());
            } else {
                log.error("NullPointerException without message occured");
                npe.printStackTrace();
            }
            continue;
        } catch (HibernateException hibex) {
            log.error("HibernateException: " + hibex.getClass().getName()
                    + " occured during processing Document");
            log.error(hibex.getMessage());
            log.error("Continue with next message");

            if (tx != null) {
                tx.rollback();
            }
            continue;
        } catch (RuntimeException runtEx) {
            log.error(
                    "RuntimeException: " + runtEx.getClass().getName() + " occured during processing Document");
            log.error(runtEx.getMessage());
            log.error("Continue with next message");

            if (tx != null) {
                tx.rollback();
            }
            continue;
        } catch (InterruptedException iex) {
            log.error("take message from Messageque interrupted");
            log.error(iex.getMessage());
            log.error("Continue with next message");
            continue;
        } catch (ExecutionException executionEx) {
            log.error("ExecutionException on preprocessing occured");
            log.error(executionEx.getMessage());
            continue;
        } catch (Exception e) {
            log.error("Exception " + e.getClass().getName() + " occured on preprocessing document: ");
            log.error(e.getMessage());
            log.error("Continue with next message");
            continue;
        } finally {
            dbDocument = null;
            clean(timeMeasurement, gateDoc);
            log.info("Finished with current Document withID: " + docID);
        }
    }

    log.trace("End of run-Method in PreprocessingWorkerThread: " + this.getName());
}