Example usage for org.hibernate Session flush

List of usage examples for org.hibernate Session flush

Introduction

In this page you can find the example usage for org.hibernate Session flush.

Prototype

void flush() throws HibernateException;

Source Link

Document

Force this session to flush.

Usage

From source file:au.org.theark.admin.model.dao.AdminDao.java

License:Open Source License

public void createArkModuleRole(ArkModule arkModule, Collection<ArkRole> selectedArkRoles) {
    Session session = getSession();

    // Remove previous list of ArkRoles
    List<ArkModuleRole> arkModuleRoles = getArkModuleRoleByArkModule(arkModule);
    for (ArkModuleRole arkModuleRoleToRemove : arkModuleRoles) {
        session.delete(arkModuleRoleToRemove);
    }//from  www .  j av a  2s  .  c o m

    ArkPermission arkPermission = getArkPermissionByName("READ");

    // Insert the ArkRoles for the ArkModule
    for (Iterator<ArkRole> iterator = selectedArkRoles.iterator(); iterator.hasNext();) {
        ArkModuleRole arkModuleRole = new ArkModuleRole();
        ArkRole arkRole = iterator.next();
        arkModuleRole.setArkModule(arkModule);
        arkModuleRole.setArkRole(arkRole);

        session.save(arkModuleRole);

        // Add a default READ permission to NEW module/roles
        List<ArkFunction> arkFunctions = getArkFunctionListByArkModule(arkModule);
        for (Iterator iterator2 = arkFunctions.iterator(); iterator2.hasNext();) {
            ArkFunction arkFunction = (ArkFunction) iterator2.next();
            ArkRolePolicyTemplate arkRolePolicyTemplate = new ArkRolePolicyTemplate();
            arkRolePolicyTemplate.setArkRole(arkRole);
            arkRolePolicyTemplate.setArkModule(arkModule);
            arkRolePolicyTemplate.setArkFunction(arkFunction);
            arkRolePolicyTemplate.setArkPermission(arkPermission);

            session.save(arkRolePolicyTemplate);
        }
    }

    // Flush must be the last thing to call. If there is any other code/logic to be added make sure session.flush() is invoked after that.
    session.flush();
}

From source file:au.org.theark.admin.model.dao.AdminDao.java

License:Open Source License

public void updateArkModuleRole(ArkModule arkModule, Collection<ArkRole> selectedArkRoles) {
    Session session = getSession();

    // Remove previous list of ArkRoles
    List<ArkModuleRole> arkModuleRoles = getArkModuleRoleByArkModule(arkModule);
    for (ArkModuleRole arkModuleRoleToRemove : arkModuleRoles) {
        session.delete(arkModuleRoleToRemove);
    }//from ww w . j  a  v  a 2  s. co m

    // Insert the ArkRoles for the ArkModule
    for (Iterator<ArkRole> iterator = selectedArkRoles.iterator(); iterator.hasNext();) {
        ArkModuleRole arkModuleRole = new ArkModuleRole();
        ArkRole arkRole = iterator.next();
        arkModuleRole.setArkModule(arkModule);
        arkModuleRole.setArkRole(arkRole);
        session.save(arkModuleRole);
    }

    // Flush must be the last thing to call. If there is any other code/logic to be added make sure session.flush() is invoked after that.
    session.flush();
}

From source file:au.org.theark.core.dao.CSVLoaderDao.java

License:Open Source License

/**
 * Calls hibernate and inserts the data into the database
 * /*from   w ww . j  a  va  2 s.  com*/
 * @param statement
 */
private void insertIntoDatabaseByCreateSQLQuery(String statement) {
    log.info(statement);
    Session session = getSession();
    session.beginTransaction();
    session.createSQLQuery(statement).executeUpdate();
    session.flush();
    log.info("SQL insertIntoDatabase SUCCEEDED");
}

From source file:au.org.theark.core.dao.CSVLoaderDao.java

License:Open Source License

/**
 * Load the temporary created file back into the database, to temporary table, using the [LOAD DATA INFILE] SQL statement
 * @param temporaryFileName// w w  w  .j  a  va2 s.c  om
 * @param databaseName
 * @param temporaryTableName
 * @return the number fo rows in the table
 */
public int loadTempFileToDatabase(String temporaryFileName, String databaseName, String temporaryTableName) {
    int rowCount = 0;

    StringBuffer tableName = new StringBuffer();
    tableName.append(databaseName);
    tableName.append(".");
    tableName.append(temporaryTableName);

    if (temporaryTableName != null && temporaryTableName != null) {
        Session session = getSession();
        session.beginTransaction();
        StringBuffer sqlTempFileToTable = new StringBuffer();
        sqlTempFileToTable.append("LOAD DATA LOCAL INFILE '");
        sqlTempFileToTable.append(temporaryFileName);
        sqlTempFileToTable.append("' INTO TABLE ");
        sqlTempFileToTable.append(tableName.toString());
        sqlTempFileToTable.append(" FIELDS TERMINATED BY '");
        sqlTempFileToTable.append(delimiterCharacter);
        sqlTempFileToTable.append("' ENCLOSED BY '\"' ");
        sqlTempFileToTable.append("LINES TERMINATED BY '\\n' ");
        sqlTempFileToTable.append("IGNORE 1 LINES;");

        try {
            log.info("Loading data into temporary table: " + tableName);
            session.createSQLQuery(sqlTempFileToTable.toString()).executeUpdate();
            log.info("select count(*) from " + tableName);
            BigInteger rowInteger = (BigInteger) session
                    .createSQLQuery("SELECT count(*) from " + tableName.toString()).uniqueResult();
            rowCount = rowInteger.intValue();

            log.info("SQL loadTempFileToDatabase SUCCEEDED");
        } catch (JDBCException e) {
            log.error(e.getMessage());
            log.error("SQL loadTempFileToDatabase FAILED");
        } finally {
            session.flush();
        }
    }
    log.info("Rowcount: " + rowCount);

    return rowCount;
}

From source file:au.org.theark.core.dao.CSVLoaderDao.java

License:Open Source License

/**
 * Create a temporary table to store data from an external file into
 * @param databaseName/*  w w w. j  a v  a2s  .co  m*/
 * @param temporaryTableName
 * @param columnNameList
 */
public void createTemporaryTable(String databaseName, String temporaryTableName, List<String> columnNameList) {
    if (temporaryTableName != null && !columnNameList.isEmpty()) {
        Session session = getSession();
        session.beginTransaction();

        StringBuffer sqlDropTemporyTable = new StringBuffer();
        sqlDropTemporyTable.append("DROP TABLE ");
        sqlDropTemporyTable.append(databaseName);
        sqlDropTemporyTable.append(".");
        sqlDropTemporyTable.append(temporaryTableName);

        StringBuffer sqlCreateTemporyTable = new StringBuffer();
        //sqlCreateTemporyTable.append("CREATE TEMPORARY TABLE ");
        sqlCreateTemporyTable.append("CREATE TEMPORARY TABLE ");
        sqlCreateTemporyTable.append(databaseName);
        sqlCreateTemporyTable.append(".");
        sqlCreateTemporyTable.append(temporaryTableName);
        sqlCreateTemporyTable.append(" (");

        StringBuffer colNameAndType = new StringBuffer();

        for (Iterator<String> iterator = columnNameList.iterator(); iterator.hasNext();) {
            String columnName = (String) iterator.next();
            colNameAndType.append(columnName);
            colNameAndType.append(" varchar(255)");
            colNameAndType.append(delimiterCharacter);
            colNameAndType.append("\n");
        }
        colNameAndType.deleteCharAt(colNameAndType.lastIndexOf(","));

        sqlCreateTemporyTable.append(colNameAndType);
        sqlCreateTemporyTable.append(");");

        try {
            session.createSQLQuery(sqlDropTemporyTable.toString()).executeUpdate();
            log.info("SQL dropTemporaryTable SUCCEEDED");
        } catch (JDBCException e) {
            log.error(e.getMessage());
            log.info("SQL dropTemporaryTable FAILED");
        }

        try {
            session.createSQLQuery(sqlCreateTemporyTable.toString()).executeUpdate();
            log.info("SQL createTemporaryTable SUCCEEDED");
        } catch (JDBCException e) {
            log.error(e.getMessage());
            log.info("SQL createTemporaryTable FAILED");
        } finally {
            session.flush();
        }
    }
}

From source file:au.org.theark.lims.model.dao.InventoryDao.java

License:Open Source License

public void updateInvSite(LimsVO modelObject) {

    InvSite invSite = modelObject.getInvSite();
    Session session = getSession();
    session.update(invSite);/*from w  ww.  j av  a2s  .  co m*/
    session.flush();
    session.refresh(invSite);
    List<StudyInvSite> existingInvSites = invSite.getStudyInvSites();

    //for (StudyInvSite sis : modelObject.getInvSite().getStudyInvSites()) {
    //      session.delete(sis);
    //      session.flush();
    //}

    List<Long> selectedAndExistingStudies = new ArrayList<Long>();
    List<Study> selectedStudies = modelObject.getSelectedStudies();

    for (Study selectedStudy : selectedStudies) {
        boolean studyAlreadyLinked = false;
        log.info("selected =" + selectedStudy.getId());

        for (StudyInvSite sis : existingInvSites) {
            Study existingStudy = sis.getStudy();
            log.info("  existing=" + selectedStudy.getId());
            if (existingStudy.getId().equals(selectedStudy.getId())) {
                log.info("found a match for " + selectedStudy.getId());
                studyAlreadyLinked = true;
                selectedAndExistingStudies.add(selectedStudy.getId());
                break; // leave it along
            }
        }

        if (!studyAlreadyLinked) {
            log.info("about to create" + selectedStudy.getId());
            StudyInvSite studyInvSite = new StudyInvSite();
            studyInvSite.setStudy(selectedStudy);
            studyInvSite.setInvSite(invSite);
            session.save(studyInvSite);
        }

    }

    for (StudyInvSite sis : existingInvSites) {
        log.info("about to investigate for deletion existing study " + sis.getStudy().getId());
        boolean deletePreviouslyExistingSiteAsItWasNotSelected = true;
        for (Long selectedId : selectedAndExistingStudies) {
            log.info("compare it to selected " + selectedId);
            if (selectedId.equals(sis.getStudy().getId())) {
                log.info("recommending you don't delete");
                deletePreviouslyExistingSiteAsItWasNotSelected = false;
            } else {
                log.info("match not found.");
            }
        }
        if (deletePreviouslyExistingSiteAsItWasNotSelected) {
            log.info("deleting " + sis.getStudy().getId());
            session.delete(sis);
        }
    }
    session.flush();
    session.refresh(invSite);

    //List<StudyInvSite> existingInvSites = invSite.getStudyInvSites();

    //for (StudyInvSite sis : modelObject.getInvSite().getStudyInvSites()) {
    //   session.delete(sis);
    //   session.flush();
    //}

    /*
            
    List<Study> selectedAndExistingStudies = new ArrayList<Study>();
    List<Study> selectedStudies = modelObject.getSelectedStudies();
            
    for (Study selectedStudy : selectedStudies) {
       boolean studyAlreadyLinked = false;
       for(StudyInvSite sis: existingInvSites){
    Study existingStudy = sis.getStudy();
    if(existingStudy.equals(selectedStudy)){
       studyAlreadyLinked = true;
       selectedAndExistingStudies.add(selectedStudy);
       break; // leave it along
    }
       }
               
       if(!studyAlreadyLinked){
    StudyInvSite studyInvSite = new StudyInvSite();
    studyInvSite.setStudy(selectedStudy);
    studyInvSite.setInvSite(modelObject.getInvSite());
    session.save(studyInvSite);
       }
               
    }
            
    for(StudyInvSite sis: existingInvSites){
       if(!selectedAndExistingStudies.contains(sis.getStudy())){
    session.delete(sis);
       }
    }
    */
}

From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java

License:Open Source License

public void processPhenoCollectionsWithTheirDataToInsertBatch(
        List<PhenoDataSetCollection> phenoCollectionsWithTheirDataToInsert, Study study) {
    Session session = getSession();
    //      int count = 0;
    for (PhenoDataSetCollection collectionToInsert : phenoCollectionsWithTheirDataToInsert) {
        //TODO : investigate more efficient way to deal with null parent entity
        Set<PhenoDataSetData> dataToSave = collectionToInsert.getPhenoDataSetData();
        collectionToInsert.setPhenoDataSetData(new HashSet<PhenoDataSetData>());

        session.save(collectionToInsert);
        session.refresh(collectionToInsert);
        for (PhenoDataSetData data : dataToSave) {
            data.setPhenoDataSetCollection(collectionToInsert);
            session.save(data);//from  w w w  . j ava  2 s.c  o m
        }
    }
    session.flush();
    session.clear();
}

From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java

License:Open Source License

@Override
public void createPhenoDataSetFieldGroup(PhenoDataSetFieldGroupVO phenoDataSetFieldGroupVO)
        throws EntityExistsException, ArkSystemException {
    PhenoDataSetGroup phenoDataSetGroup = phenoDataSetFieldGroupVO.getPhenoDataSetGroup();
    Session session = getSession();
    session.save(phenoDataSetGroup);/*from  w  ww . j av  a 2 s .  co m*/
    session.flush();
    insertToDispalyAndDeleteFromLinkAndPicked(phenoDataSetFieldGroupVO, phenoDataSetGroup, session);
    log.debug("Saved All PhenoDataSetDisplays for PhenoDataSet Group");
}

From source file:au.org.theark.phenotypic.model.dao.PhenotypicDao.java

License:Open Source License

@Override
public void updatePhenoDataSetFieldGroup(PhenoDataSetFieldGroupVO phenoDataSetFieldGroupVO)
        throws EntityExistsException, ArkSystemException {
    PhenoDataSetGroup phenoDataSetGroup = phenoDataSetFieldGroupVO.getPhenoDataSetGroup();
    Session session = getSession();
    session.saveOrUpdate(phenoDataSetGroup);//Update phenoDataSetGroup
    Collection<PhenoDataSetFieldDisplay> phenoDataSetFieldDisplayToRemove = getPhenoFieldDisplayToRemove(
            phenoDataSetGroup);//from   w w  w .j  a  v  a2s  .co m
    for (PhenoDataSetFieldDisplay phenoDataSetFieldDisplay : phenoDataSetFieldDisplayToRemove) {
        session.delete(phenoDataSetFieldDisplay);
        session.flush();
    }
    insertToDispalyAndDeleteFromLinkAndPicked(phenoDataSetFieldGroupVO, phenoDataSetGroup, session);
    log.debug("Update PhenoDataSetFieldDisplay for PhenoDataSet Group");

}

From source file:au.org.theark.study.model.dao.StudyDao.java

License:Open Source License

/**
 * {@inheritDoc}//from   w  w w  . j  av a 2 s  . com
 */
public void processPedigreeBatch(List<LinkSubjectPedigree> parentsToInsert,
        List<LinkSubjectTwin> twinsToInsert) {
    Session session = getSession();
    int count = 0;
    for (LinkSubjectPedigree dataToInsert : parentsToInsert) {
        session.save(dataToInsert);
        count++;
        // based on recommended hibernate practice of <prop key="hibernate.jdbc.batch_size">50</prop>
        if (count % 50 == 0) {
            log.info("\n\n\n\n\n\n\n\n\nflush!!!!!!!!!!!!!!");
            session.flush();
            session.clear();
        }
    }
    count = 0;
    for (LinkSubjectTwin dataToInsert : twinsToInsert) {
        session.save(dataToInsert);
        count++;
        // based on recommended hibernate practice of <prop key="hibernate.jdbc.batch_size">50</prop>
        if (count % 50 == 0) {
            log.info("\n\n\n\n\n\n\n\n\nflush!!!!!!!!!!!!!!");
            session.flush();
            session.clear();
        }
    }
    session.flush();
    session.clear();

}