Example usage for org.hibernate Session doWork

List of usage examples for org.hibernate Session doWork

Introduction

In this page you can find the example usage for org.hibernate Session doWork.

Prototype

void doWork(Work work) throws HibernateException;

Source Link

Document

Controller for allowing users to perform JDBC related work using the Connection managed by this Session.

Usage

From source file:managebeans.relatorioDeCidades.java

public void emitir() {

    Map<String, Object> parametros = new HashMap<>();

    ExecutorRelatorio relatorio = new ExecutorRelatorio("/relatorios/relatorio_cidades.jasper", this.response,
            parametros, "Relatorio de cidades.pdf");

    Session session = manager.unwrap(Session.class);
    session.doWork(relatorio);

    facesContext.responseComplete();//w ww .  ja  va 2s.com
}

From source file:org.ambraproject.models.UserRoleTest.java

License:Apache License

@Test
public void testUnknownPermission() {
    Set<UserRole.Permission> permsOrig = new HashSet<UserRole.Permission>();

    permsOrig.add(UserRole.Permission.CROSS_PUB_ARTICLES);
    permsOrig.add(UserRole.Permission.VIEW_UNPUBBED_ARTICLES);
    permsOrig.add(UserRole.Permission.MANAGE_ANNOTATIONS);
    permsOrig.add(UserRole.Permission.MANAGE_CACHES);

    UserRole role = new UserRole("admin3", permsOrig.toArray(new UserRole.Permission[permsOrig.size()]));

    final Serializable id = hibernateTemplate.save(role);

    Session session = hibernateTemplate.getSessionFactory().openSession();
    session.doWork(new Work() {
        @Override//from   ww w. j a v  a 2  s .  c  o  m
        public void execute(Connection connection) throws SQLException {
            connection.createStatement()
                    .execute("insert into userRolePermission values(" + id + ", 'FOOBAR_PERMISSION');");
        }
    });
    session.close();

    UserRole storedRole = hibernateTemplate.get(UserRole.class, id);
    //This should not throw an exception with the unknown permission value
    Set<UserRole.Permission> permsResult = storedRole.getPermissions();

    //Make sure results match expected
    assertEquals(permsResult, permsOrig);
}

From source file:org.ambraproject.testutils.HibernateTestSessionFactory.java

License:Apache License

@Override
public void afterPropertiesSet() throws Exception {
    super.afterPropertiesSet();
    try {//w  w  w.  ja  va2s.  c  om
        HibernateTemplate hibernateTemplate = new HibernateTemplate((SessionFactory) getObject());
        // Create an admin user to test admin functions
        UserRole adminRole = new UserRole("admin");

        Set<UserRole.Permission> perms = new HashSet<UserRole.Permission>();
        perms.add(UserRole.Permission.ACCESS_ADMIN);
        perms.add(UserRole.Permission.INGEST_ARTICLE);
        perms.add(UserRole.Permission.MANAGE_FLAGS);
        perms.add(UserRole.Permission.MANAGE_ANNOTATIONS);
        perms.add(UserRole.Permission.MANAGE_USERS);
        perms.add(UserRole.Permission.MANAGE_ROLES);
        perms.add(UserRole.Permission.MANAGE_JOURNALS);
        perms.add(UserRole.Permission.MANAGE_SEARCH);
        perms.add(UserRole.Permission.MANAGE_ARTICLE_LISTS);
        perms.add(UserRole.Permission.MANAGE_CACHES);
        perms.add(UserRole.Permission.CROSS_PUB_ARTICLES);
        perms.add(UserRole.Permission.DELETE_ARTICLES);
        perms.add(UserRole.Permission.VIEW_UNPUBBED_ARTICLES);

        adminRole.setPermissions(perms);
        hibernateTemplate.save(adminRole);

        UserProfile admin = new UserProfile();
        admin.setAuthId(BaseTest.DEFAULT_ADMIN_AUTHID);
        admin.setEmail("admin@test.org");
        admin.setDisplayName("testAdmin");
        admin.setPassword("adminPass");
        admin.setRoles(new HashSet<UserRole>(1));
        admin.getRoles().add(adminRole);
        hibernateTemplate.save(admin);

        UserRole editorialRole = new UserRole("editorial");
        perms = new HashSet<UserRole.Permission>();
        perms.add(UserRole.Permission.ACCESS_ADMIN);
        perms.add(UserRole.Permission.VIEW_UNPUBBED_ARTICLES);
        editorialRole.setPermissions(perms);
        hibernateTemplate.save(editorialRole);

        UserProfile editorial = new UserProfile();
        editorial.setAuthId(BaseTest.DEFAULT_EDITORIAL_AUTHID);
        editorial.setEmail("editorial@test.org");
        editorial.setDisplayName("editorialAdmin");
        editorial.setPassword("pass");
        editorial.setRoles(new HashSet<UserRole>(1));
        editorial.getRoles().add(editorialRole);
        hibernateTemplate.save(editorial);

        UserProfile nonAdmin = new UserProfile();
        nonAdmin.setAuthId(BaseTest.DEFAULT_USER_AUTHID);
        nonAdmin.setEmail("nonAdmin@test.org");
        nonAdmin.setDisplayName("testNonAdmin");
        nonAdmin.setPassword("nonAdminPass");
        hibernateTemplate.save(nonAdmin);
        //save the default journal
        hibernateTemplate.save(BaseTest.defaultJournal);

        //turn off foreign keys
        hibernateTemplate.execute(new HibernateCallback() {
            @Override
            public Object doInHibernate(Session session) throws HibernateException, SQLException {
                session.doWork(new Work() {
                    @Override
                    public void execute(Connection connection) throws SQLException {
                        connection.createStatement().execute("SET DATABASE REFERENTIAL INTEGRITY FALSE;");
                    }
                });
                return null;
            }
        });
    } catch (DataAccessException ex) {
        //must've already inserted the users
    }
}

From source file:org.arsenico.database.HibernateUtil.java

License:Apache License

/**
 * indica se la tabella passata come argomento esiste sul db su cui  aperta la sessione
 * @param session/*from  www  .  java  2 s .c o m*/
 *       sessione
 * @param tableName
 *       tabella da verificare
 * @return
 *       true se la tabella esiste.
 * @throws HibernateException
 * @throws SQLException
 */
public static boolean tableExists(final Session session, final String tableName)
        throws HibernateException, SQLException {
    final TableCheck result = new TableCheck();
    session.doWork(new Work() {

        public void execute(Connection connection) throws SQLException {
            if (connection != null) {
                ResultSet tables = connection.getMetaData().getTables(null, null, tableName, null);
                while (tables.next()) {
                    String currentTableName = tables.getString("TABLE_NAME");
                    if (currentTableName.equals(tableName)) {
                        result.setTableExist(true);
                    }
                }
                tables.close();
            }

        }
    });

    return result.isTableExist();
}

From source file:org.arsenico.SqlDeployer.java

License:Apache License

/**
 * Esegue il task principale:/*from   w w w. j a v a2s  .  c o  m*/
 * <ul>
 * <li>
 * Verifica se il database contiene la tabella di registro. Se non esiste,
 * viene creato</li>
 * <li>Recupero elenco dei file che devono essere ancora analizzati.</li>
 * <li>Per ogni statement presente nei file eseguo e memorizzo
 * l'informazione su db</li>
 * </ul>
 * 
 * @throws Exception
 */
public void execute() throws Exception {
    Session session;

    final Settings s = Settings.getInstance();

    // fix su svnUrl
    String svnUrl = s.getSvnUrl();

    if (svnUrl == null)
        svnUrl = "";
    svnUrl = svnUrl.replace("scm:svn:", "");
    s.setSvnUrl(svnUrl);

    buildDynamicIgnoredErrors();

    // Verifica se il database contiene la tabella di registro. Se non
    // esiste, viene creato
    session = createSession();
    final Logger log = s.getLogger();

    try {
        // recupero elenco dei file che devono essere ancora analizzati
        List<String> listaFileDaAnalizzare = retrieveFileToAnalyze(session);
        Timestamp now = new Timestamp(System.currentTimeMillis());

        final Session sqlSession = session;

        for (String item : listaFileDaAnalizzare) {

            final LogRegistry registryLog = new LogRegistry();

            // inizialmente il numero di errori = 0
            registryLog.setErrors(0L);

            final String fileName = item;
            final StringBuilder buffer = new StringBuilder();
            final List<String> sql = FileParser.getSplittedContents(new File(s.getDirectorySource(), item),
                    "/");

            log.debug(" ");
            log.debug("-----------------------------");
            log.info("Analisi file " + fileName);

            session.doWork(new Work() {
                /*
                 * (non-Javadoc)
                 * 
                 * @see org.hibernate.jdbc.Work#execute(java.sql.Connection)
                 */
                public void execute(Connection connection) throws SQLException {

                    SqlType sqlType;
                    // per ogni file, apriamo un file ed eseguiamo le varie
                    // righe
                    for (String linea : sql) {
                        final String currentSql = linea;
                        sqlType = determineType(currentSql);

                        if (currentSql.trim().length() > 0) {
                            try {
                                Statement st = connection.createStatement();

                                log.debug("SQL (" + sqlType + ") = " + currentSql);

                                st.execute(currentSql);

                                log.debug("Esecuzione... OK");
                                buffer.append("/* ESEGUITO CON SUCCESSO */\n");
                            } catch (SQLException e) {
                                // nel caso di DDL si pu provare ad andare avanti
                                // negli altri casi segnaliamo un errore.

                                if (ErroreCodeAnalizer.canIgnore(e)) {
                                    String messaggioErrore = e.getMessage().replace("\n", "").trim();
                                    // incrementiamo il numero di errori dentro il bean
                                    registryLog.setErrors(registryLog.getErrors() + 1);
                                    log.warn("SQL ERR (" + sqlType + "), SQL-CODE " + e.getSQLState() + " = "
                                            + currentSql);
                                    log.warn("Errore NON BLOCCANTE durante l'esecuzione di un " + sqlType + ": "
                                            + messaggioErrore);
                                    log.warn("VENDOR SQL ERROR CODE = " + e.getErrorCode());
                                    log.warn("GENERIC SQL ERROR CODE = " + e.getSQLState());

                                    buffer.append("/* NON ESEGUITO CON ERROR-CODE = " + e.getErrorCode() + " ("
                                            + messaggioErrore + ") */\n");
                                } else {
                                    log.error("SQL ERR (" + sqlType + "), SQL-CODE " + e.getSQLState() + " = "
                                            + currentSql);
                                    log.error("Errore durante l'esecuzione di un " + sqlType + ": "
                                            + e.getMessage());
                                    log.error("VENDOR SQL ERROR CODE = " + e.getErrorCode());
                                    log.error("GENERIC SQL ERROR CODE = " + e.getSQLState());
                                    throw (e);
                                }
                            }

                            buffer.append(currentSql);
                            buffer.append("\n/\n");

                        }
                    }
                }

            });

            registryLog.setSvnUrl(s.getSvnUrl());

            registryLog.setVersion(s.getVersion());

            registryLog.setBuildNumber(s.getBuildNumber());
            registryLog.setDirectory(s.getDirectorySource());
            registryLog.setFileName(fileName);
            registryLog.setMessages(buffer.toString());

            registryLog.setInsertDate(now);

            // registriamo voce in registry
            session.beginTransaction();
            sqlSession.save(registryLog);
            session.getTransaction().commit();
        }

    } catch (Exception e) {
        log.error(e.getMessage());
        throw (e);
    } finally {
        session.close();
    }
}

From source file:org.glite.security.voms.admin.persistence.deployer.SchemaDeployer.java

License:Apache License

private DatabaseMetaData getDatabaseMetadata(Session s) {

    GetDatabaseMetadataWork w = new GetDatabaseMetadataWork();

    s.doWork(w);

    return w.getMetadata();

}

From source file:org.gluewine.persistence_jpa_hibernate.impl.SessionAspectProvider.java

License:Apache License

/**
 * Checks whether the statements have already been executed, and if not are executed.
 *//*from   w  ww .  ja  va  2  s .co m*/
private void checkStatements() {
    final Session session = factory.openSession();
    session.doWork(new Work() {
        @Override
        @edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "SQL_NONCONSTANT_STRING_PASSED_TO_EXECUTE")
        public void execute(Connection conn) throws SQLException {
            Iterator<Map<String, List<SQLStatement>>> stiter = statements.values().iterator();
            while (stiter.hasNext()) // Bundle Level.
            {
                Map<String, List<SQLStatement>> entry = stiter.next();
                stiter.remove();
                for (List<SQLStatement> stmts : entry.values()) // File level within a bundle, executing in the same transaction.
                {
                    session.beginTransaction();
                    boolean commit = true;
                    for (SQLStatement st : stmts) // Statement level.
                    {
                        SQLStatement st2 = (SQLStatement) session.get(SQLStatement.class, st.getId());
                        if (st2 == null) {
                            try {
                                logger.info("Executing SQL statement " + st.getStatement());
                                conn.createStatement().execute(st.getStatement());
                                st.setSuccess(true);
                                st.setExecutionTime(new Date());
                            } catch (Throwable e) {
                                st.setMessage(e.getMessage());
                                st.setSuccess(false);
                                logger.warn(e);
                                commit = false;
                                break;
                            }
                        }
                    }

                    if (commit) {
                        // Save all statements.
                        session.getTransaction().commit();
                        try {
                            Session session2 = factory.openSession();
                            session2.beginTransaction();
                            for (SQLStatement st : stmts)
                                session2.saveOrUpdate(st);
                            session2.getTransaction().commit();
                            session2.close();
                        } catch (Throwable e) {
                            commit = false;
                        }
                    } else
                        session.getTransaction().rollback();
                }
            }
        }
    });
    session.close();

}

From source file:org.inbio.neoportal.core.dao.impl.ImportDwcDAOImpl.java

License:Open Source License

@Override
public long copy(final Reader fileReader) {
    final long[] copyRows = { 0 };
    Session session = getSessionFactory().getCurrentSession();
    session.doWork(new Work() {

        @Override//from  w  w w  .ja v a2  s  . com
        public void execute(Connection connection) throws SQLException {
            try {
                Method m = PGConnection.class.getMethod("getCopyAPI", new Class[0]);
                Object[] arg = new Object[0];
                CopyManager copyManager = (CopyManager) ((NewProxyConnection) connection)
                        .rawConnectionOperation(m, NewProxyConnection.RAW_CONNECTION, arg);

                String sql = "COPY import_dwc (\"type\",\"modified\",\"language\",\"rights\",\"rightsHolder\",\"accessRights\",\"bibliographicCitation\",\"references\",\"institutionID\",\"collectionID\",\"datasetID\",\"institutionCode\",\"collectionCode\",\"datasetName\",\"ownerInstitutionCode\",\"basisOfRecord\",\"informationWithheld\",\"dataGeneralizations\",\"dynamicProperties\",\"occurrenceID\",\"catalogNumber\",\"occurrenceRemarks\",\"recordNumber\",\"recordedBy\",\"individualID\",\"individualCount\",\"sex\",\"lifeStage\",\"reproductiveCondition\",\"behavior\",\"establishmentMeans\",\"occurrenceStatus\",\"preparations\",\"disposition\",\"otherCatalogNumbers\",\"previousIdentifications\",\"associatedMedia\",\"associatedReferences\",\"associatedOccurrences\",\"associatedSequences\",\"associatedTaxa\",\"eventID\",\"samplingProtocol\",\"samplingEffort\",\"eventDate\",\"eventTime\",\"startDayOfYear\",\"endDayOfYear\",\"year\",\"month\",\"day\",\"verbatimEventDate\",\"habitat\",\"fieldNumber\",\"fieldNotes\",\"eventRemarks\",\"locationID\",\"higherGeographyID\",\"higherGeography\",\"continent\",\"waterBody\",\"islandGroup\",\"island\",\"country\",\"countryCode\",\"stateProvince\",\"county\",\"municipality\",\"locality\",\"verbatimLocality\",\"verbatimElevation\",\"minimumElevationInMeters\",\"maximumElevationInMeters\",\"verbatimDepth\",\"minimumDepthInMeters\",\"maximumDepthInMeters\",\"minimumDistanceAboveSurfaceInMeters\",\"maximumDistanceAboveSurfaceInMeters\",\"locationAccordingTo\",\"locationRemarks\",\"verbatimCoordinates\",\"verbatimLatitude\",\"verbatimLongitude\",\"verbatimCoordinateSystem\",\"verbatimSRS\",\"decimalLongitude\",\"decimalLatitude\",\"geodeticDatum\",\"coordinateUncertaintyInMeters\",\"coordinatePrecision\",\"pointRadiusSpatialFit\",\"footprintWKT\",\"footprintSRS\",\"footprintSpatialFit\",\"georeferencedBy\",\"georeferencedDate\",\"georeferenceProtocol\",\"georeferenceSources\",\"georeferenceVerificationStatus\",\"georeferenceRemarks\",\"identificationID\",\"identifiedBy\", \"dateIdentified\",\"identificationReferences\",\"identificationVerificationStatus\",\"identificationRemarks\",\"identificationQualifier\",\"typeStatus\",\"taxonID\",\"scientificNameID\",\"acceptedNameUsageID\",\"parentNameUsageID\",\"originalNameUsageID\",\"nameAccordingToID\",\"namePublishedInID\",\"taxonConceptID\",\"scientificName\",\"acceptedNameUsage\",\"parentNameUsage\",\"originalNameUsage\",\"nameAccordingTo\",\"namePublishedIn\",\"namePublishedInYear\",\"higherClassification\",\"kingdom\",\"phylum\",\"class\",\"order\",\"family\",\"genus\",\"subgenus\",\"specificEpithet\",\"infraspecificEpithet\",\"taxonRank\",\"verbatimTaxonRank\",\"scientificNameAuthorship\",\"vernacularName\",\"nomenclaturalCode\",\"taxonomicStatus\",\"nomenclaturalStatus\",\"taxonRemarks\",\"taxonCategoryID\",\"oldTaxonID\") "
                        + "FROM STDIN " + "(FORMAT CSV, HEADER, ESCAPE '\\')";

                copyRows[0] = copyManager.copyIn(sql, fileReader);
            } catch (IOException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (SecurityException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (NoSuchMethodException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IllegalArgumentException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (IllegalAccessException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            } catch (InvocationTargetException e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
            }
        }
    });

    return copyRows[0];
}

From source file:org.jboss.dashboard.database.DatabaseAutoSynchronizer.java

License:Apache License

protected void runDDL(final String ddl) throws Exception {
    String separator = ddl.startsWith(customDelimiterEnabler) ? customDelimiter : ";";
    String[] statements = splitString(ddl, separator);
    for (int i = 0; i < statements.length; i++) {
        final String ddlStatement = removeComments(statements[i]).trim();
        if (ddlStatement.length() == 0 || ArrayUtils.contains(excludedScriptStatements, ddlStatement)) {
            continue;
        }/*  www .j a  va  2 s .  com*/

        if (log.isDebugEnabled()) {
            log.debug("Running statement: " + ddlStatement);
        }
        new HibernateTxFragment() {
            protected void txFragment(Session session) throws Exception {
                Work w = new Work() {
                    public void execute(Connection connection) throws SQLException {
                        Statement statement = null;
                        try {
                            statement = connection.createStatement();
                            statement.execute(ddlStatement);
                        } catch (Exception e) {
                            Throwable root = ErrorManager.lookup().getRootCause(e);
                            log.error("Error executing " + ddlStatement + ": " + root.getMessage());
                        } finally {
                            if (statement != null) {
                                statement.close();
                            }
                        }
                    }
                };
                session.doWork(w);
                session.flush();
            }
        }.execute();
    }
}

From source file:org.jboss.dashboard.database.DatabaseAutoSynchronizer.java

License:Apache License

/**
 * Check if the dashbuilder installed modules table exist.
 *
 * BZ-1030424: Added <code>default_schema</code> argument to allow finding tables only for a given database schema.
 *
 * @param default_schema If specified, look up the table only for the specified schema.
 * @return If exist dashbuilder installed modules table
 * @throws Exception/*  w ww.  ja  v a 2  s .  c  om*/
 */
protected boolean existsModulesTable(final String default_schema) throws Exception {
    final boolean[] returnValue = { false };
    new HibernateTxFragment(true) {
        protected void txFragment(Session session) throws Exception {
            Work w = new Work() {
                public void execute(Connection connection) throws SQLException {
                    // IMPORTANT NOTE: SQL Server driver closes the previous result set. So it's very important to read the
                    // data from the first result set before opening a new one. If not an exception is thrown.

                    DatabaseMetaData metaData = connection.getMetaData();
                    String schema = default_schema != null && default_schema.trim().length() > 0
                            ? default_schema
                            : null;
                    returnValue[0] = metaData.getTables(null, schema, installedModulesTable.toLowerCase(), null)
                            .next();
                    if (!returnValue[0])
                        returnValue[0] = metaData
                                .getTables(null, schema, installedModulesTable.toUpperCase(), null).next();
                }
            };
            session.doWork(w);
        }
    }.execute();
    return returnValue[0];
}