Example usage for org.hibernate Session doWork

List of usage examples for org.hibernate Session doWork

Introduction

In this page you can find the example usage for org.hibernate Session doWork.

Prototype

void doWork(Work work) throws HibernateException;

Source Link

Document

Controller for allowing users to perform JDBC related work using the Connection managed by this Session.

Usage

From source file:com.quartzdesk.executor.dao.schema.DatabaseSchemaDao.java

License:MIT License

/**
 * Initializes, or upgrades the current QuartzDesk database schema by
 * executing the specified list of SQL scripts.
 *
 * @param scriptUrls    the list of SQL scripts to execute.
 * @param schemaVersion the version of the schema after the specified SQL scripts have been applied.
 *//*  w w  w  .j a v a 2s.c om*/
public void initializeOrUpgradeSchema(final List<URL> scriptUrls, final Version schemaVersion) {
    Session session = getSessionFactory().getCurrentSession();
    session.doWork(new Work() {
        @Override
        public void execute(Connection connection) throws SQLException {
            DatabaseScriptExecutor scriptExecutor = new DatabaseScriptExecutor();
            scriptExecutor.addScriptUrls(scriptUrls);

            scriptExecutor.executeScripts(connection);

            SchemaUpdate schemaUpdate = new SchemaUpdate().withMajor(schemaVersion.getMajor())
                    .withMinor(schemaVersion.getMinor()).withMaintenance(schemaVersion.getMaintenance())
                    .withAppliedAt(Calendar.getInstance());

            insertSchemaUpdate(schemaUpdate);
        }
    });
}

From source file:com.scopix.periscope.extractionservicesserversmanagement.dao.ExtractionPlanDetailDAO.java

License:Open Source License

public void saveExtractionPlanDetail(ExtractionPlanDetail extractionPlanDetail) throws ScopixException {
    log.debug("init");
    // Save ExtractionPlanDetail and their evidenceRequest
    // Statement stEPD = null;
    // Statement stER = null;
    // Statement st = null;
    // Connection conn = null;
    ResultSet rs = null;/*from w ww.  j ava  2s. c  om*/
    Session session = this.getSession();

    try {
        int counter = 1;
        int epdId = 0;
        String sql = "select nextval('extraction_plan_detail_seq') as SEC";
        SelectWork selectWork = new SelectWork();
        selectWork.setSql(sql);
        session.doWork(selectWork);
        rs = selectWork.getResultSet();
        rs.next();
        epdId = rs.getInt("SEC");
        if (epdId == 0) {
            throw new ScopixException("No es posible obtener la secuencia");
        }
        closeConnection(null, rs, null);

        StringBuilder insertEPD = new StringBuilder(
                "INSERT INTO extraction_plan_detail(id,extraction_plan_id) ");
        insertEPD.append("VALUES (").append(epdId).append(",")
                .append(extractionPlanDetail.getExtractionPlan().getId()).append(")");
        extractionPlanDetail.setId(epdId);
        UpdateWork updateWork = new UpdateWork();
        updateWork.setSql(insertEPD.toString());
        session.doWork(updateWork);
        log.debug("extraction_plan_detail insertdas " + updateWork.getRowsAffected());
        // stEPD = conn.createStatement();
        // stEPD.executeUpdate(insertEPD.toString());

        log.debug("ExtractionPalnDetail id = " + epdId);
        for (EvidenceRequest er : extractionPlanDetail.getEvidenceRequests()) {
            er.setExtractionPlanDetail(extractionPlanDetail);

            StringBuilder insertER = new StringBuilder(
                    "INSERT INTO evidence_request(id,request_type,requested_time,");
            insertER.append("duration,device_id,");
            insertER.append(
                    "business_services_request_id,day_of_week,extraction_plan_detail_id, type, priorization)");

            insertER.append("VALUES (nextval('evidence_requests_seq'),'").append(er.getRequestType())
                    .append("',");
            insertER.append("to_timestamp('").append(DateFormatUtils.format(er.getRequestedTime(), "HH:mm"));
            insertER.append("', 'HH24:MI'),");
            insertER.append(er.getDuration()).append(",");
            insertER.append(er.getDeviceId()).append(",");
            insertER.append(er.getBusinessServicesRequestId()).append(",");
            insertER.append(er.getDayOfWeek()).append(",");
            insertER.append(epdId).append(", '").append(er.getType().name()).append("', ");
            insertER.append(er.getPriorization()).append(")");

            updateWork.setSql(insertER.toString());
            session.doWork(updateWork);
            log.debug("evidence_request insertdas " + updateWork.getRowsAffected());

            // stER = conn.createStatement();
            // stER.executeUpdate(insertER.toString());
            log.debug("EvidenceRequest Count = " + counter++);
            // closeConnection(null, null, stER);
        }

    } catch (SQLException e) {
        throw new ScopixException(e);
    } finally {
        if (rs != null) {
            try {
                rs.close();
                rs = null;
            } catch (SQLException ex) {
                log.error("rs error " + ex);
            }
        }
        this.releaseSession(session);
    }
    log.info("end");
}

From source file:com.scopix.periscope.extractionservicesserversmanagement.dao.ExtractionPlanDetailDAO.java

License:Open Source License

public void saveAllExtractionPlanDetail(List<ExtractionPlanDetail> extractionPlanDetails)
        throws ScopixException {
    log.info("start");
    ResultSet rs = null;//from  ww w  . j  ava  2  s .c  o m
    Session session = this.getSession();
    // recorremos todos los planes para almacenar sus datos
    StringBuilder valuesPlanDetail = new StringBuilder();
    StringBuilder valuesRequest = new StringBuilder();
    int countSql = 0;
    UpdateWork updateWork = new UpdateWork();
    try {
        int epdId = 0;
        String sql = "select nextval('extraction_plan_detail_seq') as SEC";
        SelectWork selectWork = new SelectWork();
        selectWork.setSql(sql);
        session.doWork(selectWork);
        rs = selectWork.getResultSet();
        rs.next();
        epdId = rs.getInt("SEC");
        if (epdId == 0) {
            throw new ScopixException("No es posible obtener la secuencia");
        }
        String baseInsertPlanDetail = "INSERT INTO extraction_plan_detail(id,extraction_plan_id) VALUES";
        String baseInsertRequest = "INSERT INTO evidence_request(id,request_type,requested_time, duration,"
                + "device_id,business_services_request_id,day_of_week,extraction_plan_detail_id, type, live, priorization) VALUES";
        closeConnection(null, rs, null);
        log.info("epdId " + epdId);
        for (ExtractionPlanDetail epd : extractionPlanDetails) {
            // recuperamos el id
            try {
                epd.setId(epdId);

                // generamos los values para cada tipo de insert
                // generamos el sql de insert para el plan
                if (valuesPlanDetail.length() > 0) {
                    valuesPlanDetail.append(",");
                }
                valuesPlanDetail.append("(").append(epdId).append(",");
                valuesPlanDetail.append(epd.getExtractionPlan().getId()).append(")");
                // incrementamos el contador de sql
                countSql++;
                // generamos los sql para los request del plan
                for (EvidenceRequest er : epd.getEvidenceRequests()) {
                    er.setExtractionPlanDetail(epd);
                    if (valuesRequest.length() > 0) {
                        valuesRequest.append(",");
                    }
                    valuesRequest.append(" (nextval('evidence_requests_seq'),'").append(er.getRequestType())
                            .append("',");
                    valuesRequest.append("to_timestamp('")
                            .append(DateFormatUtils.format(er.getRequestedTime(), "HH:mm"));
                    valuesRequest.append("', 'HH24:MI'),");
                    valuesRequest.append(er.getDuration()).append(",");
                    valuesRequest.append(er.getDeviceId()).append(",");
                    valuesRequest.append(er.getBusinessServicesRequestId()).append(",");
                    valuesRequest.append(er.getDayOfWeek()).append(",");
                    valuesRequest.append(epdId).append(", '").append(er.getType().name()).append("', ");
                    valuesRequest.append(er.getLive()).append(",");
                    valuesRequest.append(er.getPriorization()).append(")");
                    countSql++;
                }
                epdId++;

            } catch (HibernateException e) {
                log.error("NO es posible generar data para plan ");
            }

            try {
                // revisamos si son mas de 1000 y ejecutamos a base de datos
                if (countSql > 5000) {
                    // ejecutamos el insert a base datos
                    updateWork.setSql(baseInsertPlanDetail + valuesPlanDetail.toString() + ";");
                    session.doWork(updateWork);
                    updateWork.setSql(baseInsertRequest + valuesRequest.toString() + ";");
                    session.doWork(updateWork);

                    valuesRequest.setLength(0);
                    valuesPlanDetail.setLength(0);
                    countSql = 0;

                    log.info("insert " + epdId);
                }
            } catch (HibernateException e) {
                log.error("No se pudo ejecutar insert masivo " + baseInsertPlanDetail
                        + valuesPlanDetail.toString());
                log.error("No se pudo ejecutar insert masivo " + baseInsertRequest + valuesRequest.toString());
                log.error(e, e);
                valuesRequest.setLength(0);
                valuesPlanDetail.setLength(0);
                countSql = 0;
            }
        }

        try {
            // revisamos si son mas de 1000 y ejecutamos a base de datos
            // ejecutamos el insert a base datos
            if (countSql > 0) {
                updateWork.setSql(baseInsertPlanDetail + valuesPlanDetail.toString() + ";");
                session.doWork(updateWork);
                updateWork.setSql(baseInsertRequest + valuesRequest.toString() + ";");
                session.doWork(updateWork);

                log.info("insert " + epdId);
            }
        } catch (HibernateException e) {
            log.error(
                    "No se pudo ejecutar insert masivo " + baseInsertPlanDetail + valuesPlanDetail.toString());
            log.error("No se pudo ejecutar insert masivo " + baseInsertRequest + valuesRequest.toString());
            log.error(e, e);
            valuesRequest.setLength(0);
            valuesPlanDetail.setLength(0);
            countSql = 0;
        }

        // actualizamos indice
        log.debug("before update secuencia extraction_plan_detail_seq" + (epdId + 1));
        updateWork.setSql("ALTER SEQUENCE extraction_plan_detail_seq RESTART WITH " + (epdId + 1) + ";");
        session.doWork(updateWork);

    } catch (SQLException e) {
        log.error("no se puede obtener indice " + e, e);
        throw new ScopixException(e);
    } finally {
        closeConnection(null, rs, null);
        this.releaseSession(session);

    }

    log.info("end");
}

From source file:com.sinosoft.one.data.jade.dataaccess.DataAccessImpl.java

License:Apache License

/**
 *  2012-08-16/*from   w w  w . j  a  v  a 2s  .  co  m*/
 */
public <T> List<T> select(String sql, Object[] args, RowMapper<?> rowMapper) {
    log.info(sql);
    Session session = em.unwrap(Session.class);
    SelectWork<T> work = new SelectWork<T>(sql, args, rowMapper);
    session.doWork(work);
    return work.results;
}

From source file:com.sinosoft.one.data.jade.dataaccess.DataAccessImpl.java

License:Apache License

/**
 *  2012-08-16//from w  w  w  . ja  v a  2  s . c  om
 */
public <T> Page<T> selectByPage(Pageable pageable, String sql, String countSql, Object[] args,
        RowMapper<?> rowMapper) {
    Session session = em.unwrap(Session.class);
    SingleColumnRowMapper<BigDecimal> scrm = new SingleColumnRowMapper<BigDecimal>();
    List<BigDecimal> totals = select(countSql, args, scrm);
    RenderSqlWork psw = new RenderSqlWork(sql, pageable, null);
    session.doWork(psw);
    sql = psw.getSql();
    List<T> content = select(sql, args, rowMapper);
    if (content == null) {
        content = new ArrayList<T>();
    }
    Object o = totals.get(0);
    Number num = (Number) o;
    Page<T> page = new PageImpl<T>(content, pageable, num.longValue());
    return page;
}

From source file:com.sinosoft.one.data.jade.dataaccess.DataAccessImpl.java

License:Apache License

/**
 * ?//from w w w  . j  a v  a  2  s. com
 */
public <T> List<?> selectBySort(Sort sort, String sql, Object[] args, RowMapper<?> rowMapper) {
    Session session = em.unwrap(Session.class);
    RenderSqlWork psw = new RenderSqlWork(sql, null, sort);
    session.doWork(psw);
    sql = psw.getSql();
    List<T> content = select(sql, args, rowMapper);
    if (content == null) {
        content = new ArrayList<T>();
    }
    return content;
}

From source file:com.sinosoft.one.data.jade.dataaccess.DataAccessImpl.java

License:Apache License

/**
 *  2012-08-16//from  w w w.j  av  a  2  s  .c o  m
 */
@Transactional
public int update(String sql, Object[] args, KeyHolder generatedKeyHolder) {
    log.info(sql);
    Session session = em.unwrap(Session.class);
    UpdateWork work = new UpdateWork(sql, args, generatedKeyHolder);
    session.doWork(work);
    return work.number;
}

From source file:com.sinosoft.one.data.jade.dataaccess.DataAccessImpl.java

License:Apache License

/**
 *  2012-10-12//from w  w  w. j ava2 s . c o  m
 */
public void call(String sql, Object[] args, RowMapperFactory rowMapperFactory,
        ResultSetProcedureResult[] rsprs) {
    Session session = em.unwrap(Session.class);
    CallWork work = new CallWork(sql, args, rsprs, rowMapperFactory);
    session.doWork(work);
}

From source file:com.sonymobile.backlogtool.dbupdate.UpdateVersion1_2.java

License:Open Source License

@Override
public boolean update(SessionFactory sessionFactory) {
    boolean success = true;
    Properties propertiesFile = new Properties();
    try {//from  w ww . j  a va 2  s.  c  om
        String backlogconf = System.getProperty("catalina.home") + File.separator
                + "conf/backlogtool.properties";
        InputStream input = new FileInputStream(backlogconf);
        propertiesFile.load(input);
    } catch (Exception e) {
        e.printStackTrace();
    }
    final String dialect = (String) propertiesFile.get("db.hibernate.dialect");

    Transaction tx = null;
    Session session = sessionFactory.openSession();
    try {
        tx = session.beginTransaction();

        session.doWork(new Work() {
            @Override
            public void execute(Connection connection) throws SQLException {
                String storyQuery = null;
                String epicQuery = null;
                String themeQuery = null;
                if (dialect.contains("PostgreSQL")) {
                    storyQuery = "ALTER TABLE stories ALTER COLUMN description TYPE varchar(%d)";
                    epicQuery = "ALTER TABLE epics ALTER COLUMN description TYPE varchar(%d)";
                    themeQuery = "ALTER TABLE themes ALTER COLUMN description TYPE varchar(%d)";
                } else if (dialect.contains("HSQL")) {
                    storyQuery = "ALTER TABLE stories ALTER COLUMN description varchar(%d)";
                    epicQuery = "ALTER TABLE epics ALTER COLUMN description varchar(%d)";
                    themeQuery = "ALTER TABLE themes ALTER COLUMN description varchar(%d)";
                }
                if (storyQuery == null) {
                    throw new SQLException("Unable to find a matching query for the specified database type");
                } else {
                    storyQuery = String.format(storyQuery, Story.DESCRIPTION_LENGTH);
                    epicQuery = String.format(epicQuery, Epic.DESCRIPTION_LENGTH);
                    themeQuery = String.format(themeQuery, Theme.DESCRIPTION_LENGTH);

                    connection.prepareStatement(storyQuery).executeUpdate();
                    connection.prepareStatement(epicQuery).executeUpdate();
                    connection.prepareStatement(themeQuery).executeUpdate();
                }
            }
        });
        tx.commit();

    } catch (Exception e) {
        e.printStackTrace();
        if (tx != null) {
            tx.rollback();
        }
        success = false;
    } finally {
        session.close();
    }
    return success;
}

From source file:com.thesett.catalogue.config.CatalogueConfigBeanImpl.java

License:Apache License

/** Initializes all enumeration reference types in the database. */
private void createEnumerationReferenceTypes() {
    log.debug("private void createEnumerationReferenceTypes(): called");

    final AtomicLong id = new AtomicLong();

    for (EnumeratedStringAttribute.EnumeratedStringType enumType : model.getAllEnumTypes()) {
        Session session = hibernateBean.getSecondarySession();
        Transaction transaction = session.beginTransaction();

        for (Iterator<EnumeratedStringAttribute> enumIterator = enumType
                .getAllPossibleValuesIterator(false); enumIterator.hasNext();) {
            final EnumeratedStringAttribute enumAttribute = enumIterator.next();
            String enumName = enumAttribute.getType().getName();
            String enumClassName = StringUtils.toCamelCaseUpper(enumName);

            // Create an instance of the enumeration bean class using a constructor on the enumeration value.
            Class theBeanClass = ReflectionUtils.forName(model.getModelPackage() + "." + enumClassName);

            Class[] arguments = new Class[] { EnumeratedStringAttribute.class };
            Constructor beanConstructor = ReflectionUtils.getConstructor(theBeanClass, arguments);

            enumAttribute.setId(id.get());

            Object theBean = ReflectionUtils.newInstance(beanConstructor, new Object[] { enumAttribute });

            log.debug("Created enum bean: " + theBean);

            // Store the hierarchy value in the database.
            final String tableName = enumClassName + "_enumeration";

            session.doWork(new Work() {
                public void execute(Connection connection) throws SQLException {
                    PreparedStatement sql = null;

                    sql = connection.prepareStatement("DELETE FROM " + tableName);
                    sql.execute();//  w  w  w.j  av a  2  s.  c  om

                    String value = enumAttribute.getStringValue();

                    sql = connection.prepareStatement("INSERT INTO " + tableName + " VALUES (?, ?)");
                    sql.setLong(1, id.getAndIncrement());
                    sql.setString(2, value);
                    sql.execute();
                }
            });
        }

        transaction.commit();
        session.close();
    }
}