Example usage for org.hibernate Session createSQLQuery

List of usage examples for org.hibernate Session createSQLQuery

Introduction

In this page you can find the example usage for org.hibernate Session createSQLQuery.

Prototype

@Override
    NativeQuery createSQLQuery(String queryString);

Source Link

Usage

From source file:at.ac.tuwien.ifs.tita.dao.test.util.NativeSqlExecutorDao.java

License:Apache License

/** {@inheritDoc} */
@Override/*  w  ww. j  a  v a2  s.co m*/
@Transactional(propagation = Propagation.REQUIRED)
public void executeSql(String query) {
    Session session = getSession();
    SQLQuery q;
    q = session.createSQLQuery(query.toUpperCase());
    q.executeUpdate();
}

From source file:at.ac.tuwien.ifs.tita.dao.test.util.NativeSqlExecutorDao.java

License:Apache License

/** {@inheritDoc} */
@Override//from   w w  w .  j ava2  s.  c o  m
@SuppressWarnings("unchecked")
@Transactional(propagation = Propagation.REQUIRED)
public List<Object> getQueryList(String str) {

    Session session = getSession();
    SQLQuery q;
    q = session.createSQLQuery(str.toUpperCase());
    return q.list();
}

From source file:au.com.nicta.ct.experiment.coordinates.CtLimits.java

License:Open Source License

protected void insertLimits(CtExperiments e) {

    String sql = " SELECT c.fk_coordinate_type, MIN( c.value ) as limit1, MAX( c.value ) as limit2 "
            + " FROM ct_images_coordinates ic "
            + " INNER JOIN ct_coordinates c ON ic.fk_coordinate = c.pk_coordinate"
            + " INNER JOIN ct_images i ON ic.fk_image = i.pk_image" + " WHERE fk_experiment = "
            + e.getPkExperiment() //--- for efficiency only update specific expt
            + " GROUP BY i.fk_experiment, fk_coordinate_type" + " ORDER BY i.fk_experiment, fk_coordinate_type";

    Session session = CtSession.Current();

    // 2 queries: first is a native one which is tuned to efficiently get
    // the PKs of the sequence, in order..
    // Sorry - 1 + 3n queries where n is number of dimensions...
    Query query = session.createSQLQuery(sql);

    Iterator i = query.list().iterator();

    while (i.hasNext()) {

        Object[] o = (Object[]) i.next();

        int fkCoordinateType = ((Integer) o[0]).intValue();
        int limit1 = ((Integer) o[1]).intValue();
        int limit2 = ((Integer) o[2]).intValue();

        sql = " SELECT min( pk_coordinate ), value " + " FROM ct_images_coordinates ic "
                + " INNER JOIN ct_coordinates c ON ic.fk_coordinate = c.pk_coordinate "
                + " INNER JOIN ct_images i on ic.fk_image = i.pk_image" + " WHERE fk_experiment = "
                + e.getPkExperiment()// --- for efficiency only update specific expt
                + " AND c.fk_coordinate_type = " + fkCoordinateType + " AND (c.value = " + limit1
                + " OR c.value = " + limit2 + " ) " + " GROUP BY c.value " + " ORDER BY c.value ";

        Query query2 = session.createSQLQuery(sql);

        Iterator i2 = query2.list().iterator();

        int pkCoordinate1 = -1;
        int pkCoordinate2 = -1;

        // should be 2 rows:
        int row = 0;

        while (i2.hasNext()) {
            Object[] o2 = (Object[]) i2.next();

            int pkCoordinate = ((Integer) o2[0]).intValue();

            if (row == 0) {
                pkCoordinate1 = pkCoordinate;
            } else {
                pkCoordinate2 = pkCoordinate;
            }/*from w  ww.j  av  a  2 s.c o m*/

            ++row;
        }

        CtCoordinates c1 = (CtCoordinates) session.get(CtCoordinates.class, pkCoordinate1);
        CtCoordinates c2 = null;

        if (limit1 == limit2) {
            c2 = c1;
        } else {
            c2 = (CtCoordinates) session.get(CtCoordinates.class, pkCoordinate2);
        }

        CtExperimentsAxes ea = new CtExperimentsAxes();

        ea.setCtExperiments(e);
        ea.setCtCoordinatesByFkCoordinate1(c1);
        ea.setCtCoordinatesByFkCoordinate2(c2);

        Transaction t = session.beginTransaction();
        session.save(ea);
        //            session.persist( ea );
        t.commit(); //you might even want to wrap this in another try/catch block.
        //            session.delete( ea );
        //            session.save( ea );
        //            session.getTransaction().commit();
    }

    //        session.getTransaction().commit();
}

From source file:au.com.nicta.ct.experiment.coordinates.CtLimits.java

License:Open Source License

protected void deleteLimits(CtExperiments e) {

    String sql = " delete from ct_experiments_axes where fk_experiment = " + e.getPkExperiment();

    Session session = CtSession.Current();

    // *****************************************************************************
    session.beginTransaction();/*from  ww  w . j a v a 2 s .  c  om*/
    // *****************************************************************************

    // 2 queries: first is a native one which is tuned to efficiently get
    // the PKs of the sequence, in order..
    Query query = session.createSQLQuery(sql);

    query.executeUpdate();

    session.getTransaction().commit();
}

From source file:au.com.nicta.ct.solution.lineage.CtLineageModel.java

License:Open Source License

static ArrayList<CtAbstractPair<CtTracks, CtTracks>> findIntersectingTracks(CtSolutions s) {

    // working sql:
    //with tracks( pk_track, fk_detection ) AS
    //(// w w w.j  a v  a 2 s .c o m
    // select t1.pk_track, td1.fk_detection from ct_tracks t1
    // inner join ct_tracks_detections td1 on td1.fk_track = t1.pk_track
    // where t1.fk_solution = 11
    //)
    //select distinct t1.pk_track, t2.pk_track
    //from tracks t1, tracks t2
    //where ( t2.pk_track <> t1.pk_track )
    //and ( t2.fk_detection = t1.fk_detection )
    //order by t1.pk_track, t2.pk_track
    int pkSolution = s.getPkSolution();

    //        String hql = " with tracks1( pkTrack, ctDetection ) as "
    //                   + "( "
    //                   + " select t1.pkTrack, td1.ctDetection from ctTracks t1 "
    //                   + " inner join t1.ctTracksDetectionses td1 "
    //                   + " inner join t1.ctSolutions s "
    //                   + " where s.pkSolution = " + pkSolution + " "
    //                   + ") "
    //                   + "select x1.pkTrack, x2.pkTrack "
    //                   + " from tracks1 x1, tracks1 x2 "// inner join tracks1 x2 on x1.ctDetection = x2.ctDetection "
    //                   + "where ( x2.pkTrack <> x1.pkTrack ) "
    ////                   + "and ( t2.fk_detection = t1.fk_detection ) "
    //                   + "order by x1.pkTrack, x2.pkTrack ";

    String sql = " select t1.pk_track as dave, t2.pk_track as matt" + " from ct_tracks t1 "
            + " inner join ct_tracks_detections td1 on td1.fk_track = t1.pk_track " + " inner join ( "
            + " ct_tracks t2 " + " inner join ct_tracks_detections td2 on td2.fk_track = t2.pk_track "
            + " ) on td1.fk_detection = td2.fk_detection " + " where t1.fk_solution = " + pkSolution + " "
            + " and t2.fk_solution = " + pkSolution + " " + " and t2.pk_track <> t1.pk_track "
            + "order by t1.pk_track, t2.pk_track ";

    /*        String sql = " with tracks1( pk_track, fk_detection ) as "
           + "( "
           + " select t1.pk_track, td1.fk_detection from ct_tracks t1 "
           + " inner join ct_tracks_detections td1 on td1.fk_track = t1.pk_track "
           + " where t1.fk_solution = "+pkSolution+" "
           + "),"
           + " tracks2( pk_track, fk_detection ) as "
           + "( "
           + " select t1.pk_track, td1.fk_detection from ct_tracks t1 "
           + " inner join ct_tracks_detections td1 on td1.fk_track = t1.pk_track "
           + " where t1.fk_solution = "+pkSolution+" "
           + ") "
           + "select t1.pk_track, t2.pk_track "
    //                   + "from tracks t1, tracks t2 "
           + " from tracks1 t1 inner join tracks2 t2 on t1.fk_detection = t2.fk_detection "
           + "where ( t2.pk_track <> t1.pk_track ) "
    //                   + "and ( t2.fk_detection = t1.fk_detection ) "
           + "order by t1.pk_track, t2.pk_track ";*/
    //        String hql = "select t1.pkTrack,t2.pkTrack from CtTracks t1 "
    //                + " inner join t1.ctTracksDetectionses td1 "
    //                + " inner join t1.ctSolutions s "
    //                + " where s.pkSolution = '" + s.getPkSolution() + "'"
    //                + " and exists( "
    //                + " select t2.pkTrack from CtTracks t2 "
    //                + " inner join t2.ctTracksDetectionses td2 "
    //                + " inner join t2.ctSolutions s "
    //                + " where s.pkSolution = '" + s.getPkSolution() + "'"
    //                + " and t2.pkTrack <> t1.pkTrack "
    //                + " and td2.ctDetections = td1.ctDetections "
    //                + " ) ";

    Session session = CtSession.Current();
    session.beginTransaction();
    //        Query q = session.createQuery( hql );
    SQLQuery q = session.createSQLQuery(sql);
    List results = q.list();
    session.getTransaction().commit();

    ArrayList<CtAbstractPair<CtTracks, CtTracks>> al = new ArrayList<CtAbstractPair<CtTracks, CtTracks>>();

    Iterator i = results.iterator();

    while (i.hasNext()) {
        Object[] os = (Object[]) i.next();
        Integer n1 = (Integer) os[0];
        Integer n2 = (Integer) os[1];
        //System.out.println( "n1="+n1+" n2="+n2);
        CtTracks t1 = (CtTracks) CtSession.getObject(CtTracks.class, n1);
        CtTracks t2 = (CtTracks) CtSession.getObject(CtTracks.class, n2);
        CtAbstractPair<CtTracks, CtTracks> ap = new CtAbstractPair<CtTracks, CtTracks>(t1, t2);
        al.add(ap);
    }

    return al;
}

From source file:au.edu.anu.metadatastores.store.people.PersonService.java

License:Open Source License

/**
 * Create a stub of a person with just their name and a staff type of 'Unknown'
 * // w w  w. j a va2 s.com
 * @param givenName The given name of the person
 * @param surname The surname of the person
 * @return The created person object
 */
private Person createBasicPerson(String givenName, String surname) {
    Person person = new Person();
    Session session = StoreHibernateUtil.getSessionFactory().openSession();
    try {
        Query idQuery = session.createSQLQuery("SELECT nextval('person_seq')");
        BigInteger id = (BigInteger) idQuery.uniqueResult();

        person.setExtId("mu" + id.toString());
        person.setGivenName(givenName);
        person.setSurname(surname);
        person.setStaffType("Unknown");

        return person;
    } finally {
        session.close();
    }
}

From source file:au.edu.anu.metadatastores.store.publication.PublicationService.java

License:Open Source License

/**
 * Save the publication/* w  ww .  j a va 2s .c o  m*/
 * 
 * @param publication The publication to save
 * @param userUpdated Indicates whether the update is user updated
 * @return The publication item
 */
public PublicationItem savePublication(Publication publication, Boolean userUpdated) {
    if (publication.getTitle() == null || publication.getTitle().trim().length() == 0) {
        return null;
    }

    Session session = StoreHibernateUtil.getSessionFactory().openSession();
    try {
        session.beginTransaction();
        session.enableFilter("attributes");

        //note this may need to be updated if we retrieve publications from other systems without an aries id
        Query query = session.createQuery(
                "SELECT pi FROM PublicationItem as pi inner join pi.itemAttributes as pia WHERE pia.attrType = :attrType and pia.attrValue = :attrValue");
        query.setParameter("attrType", StoreAttributes.ARIES_ID);
        query.setParameter("attrValue", publication.getAriesId());

        PublicationItem item = (PublicationItem) query.uniqueResult();

        Date lastModified = new Date();
        ItemTraitParser parser = new ItemTraitParser();
        Item newItem = null;
        try {
            newItem = parser.getItem(publication, userUpdated, lastModified);
        } catch (Exception e) {
            LOGGER.error("Exception transforming grant to an item", e);
        }

        if (item == null) {
            item = new PublicationItem();
            Query idQuery = session.createSQLQuery("SELECT nextval('publication_seq')");
            BigInteger id = (BigInteger) idQuery.uniqueResult();
            item.setExtId("p" + id.toString());
            item.setTitle(publication.getTitle());
            item = (PublicationItem) session.merge(item);
        } else if (publication.getTitle() != null && publication.getTitle().trim().length() > 0) {
            item.setTitle(publication.getTitle());
        }
        updateAttributesFromItem(item, newItem, session, lastModified);

        //TODO remove people who are no longer related
        Item personItem = null;
        ItemRelation itemRelation = null;
        ItemRelationId id = null;
        List<Item> peopleItems = new ArrayList<Item>();
        for (Person person : publication.getAuthors()) {
            personItem = personService_.getPersonItem(person.getUid());
            if (personItem != null) {
                peopleItems.add(personItem);
            } else {
                LOGGER.error("No person found to add relation for id: {}", person.getUid());
            }
        }
        boolean hasPerson = false;
        for (Item item2 : peopleItems) {
            for (ItemRelation relation : item.getItemRelationsForIid()) {
                if (relation.getId().getRelatedIid().equals(item2.getIid())) {
                    hasPerson = true;
                    break;
                }
            }
            if (!hasPerson) {
                itemRelation = new ItemRelation();
                id = new ItemRelationId(item.getIid(), StoreProperties.getProperty("publication.author.type"),
                        item2.getIid());
                itemRelation.setId(id);
                item.getItemRelationsForIid().add(itemRelation);
            }
            hasPerson = false;
        }

        item = (PublicationItem) session.merge(item);

        session.getTransaction().commit();

        return item;
    } finally {
        session.close();
    }
}

From source file:au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.tests.RigDaoTester.java

License:Open Source License

/**
 * Test method for {@link au.edu.uts.eng.remotelabs.schedserver.dataaccess.dao.RigDao#findFreeinType(au.edu.uts.eng.remotelabs.schedserver.dataaccess.entities.RigType)}.
 *///  ww  w .  j  a  va  2  s.c  o  m
@Test
public void testFindFreeinType() {
    /* Add a rig. */
    long ids[] = new long[5];
    Session ses = this.dao.getSession();
    ses.beginTransaction();
    RigType type = new RigType();
    type.setName("rig_test");
    type.setLogoffGraceDuration(600);
    ses.save(type);

    RigCapabilities caps = new RigCapabilities("a,b,c,d,e,f");
    ses.save(caps);
    ses.getTransaction().commit();

    Rig rig = new Rig();
    rig.setName("rig_name_test_1");
    rig.setRigType(type);
    rig.setContactUrl("http://url");
    rig.setRigCapabilities(caps);
    Timestamp ts = new Timestamp(System.currentTimeMillis());
    ts.setNanos(0); // Need to trunc time as nanoseconds aren't stored in the DB.
    rig.setLastUpdateTimestamp(ts);
    rig.setOnline(true);
    rig.setActive(true);
    rig.setManaged(true);
    this.dao.persist(rig);
    ids[0] = rig.getId();

    rig = new Rig();
    rig.setName("rig_name_test_2");
    rig.setRigType(type);
    rig.setContactUrl("http://url");
    rig.setRigCapabilities(caps);
    rig.setLastUpdateTimestamp(ts);
    rig.setOnline(true);
    rig.setActive(true);
    rig.setManaged(true);
    this.dao.persist(rig);
    ids[1] = rig.getId();

    rig = new Rig();
    rig.setName("rig_name_test_3");
    rig.setRigType(type);
    rig.setContactUrl("http://url");
    rig.setRigCapabilities(caps);
    rig.setLastUpdateTimestamp(ts);
    rig.setActive(true);
    rig.setOnline(true);
    rig.setInSession(true);
    this.dao.persist(rig);
    ids[2] = rig.getId();

    rig = new Rig();
    rig.setName("rig_name_test_4");
    rig.setRigType(type);
    rig.setContactUrl("http://url");
    rig.setRigCapabilities(caps);
    rig.setLastUpdateTimestamp(ts);
    rig.setOnline(true);
    rig.setInSession(true);
    this.dao.persist(rig);
    ids[3] = rig.getId();

    rig = new Rig();
    rig.setName("rig_name_test_5");
    rig.setRigType(type);
    rig.setContactUrl("http://url");
    rig.setRigCapabilities(caps);
    rig.setLastUpdateTimestamp(ts);
    rig.setActive(true);
    rig.setInSession(true);
    this.dao.persist(rig);
    ids[4] = rig.getId();

    this.dao.closeSession();
    this.dao = new RigDao();

    List<Rig> free = this.dao.findFreeinType(type);
    assertNotNull(free);
    assertEquals(2, free.size());
    Rig fr = free.get(0);
    assertTrue(fr.isActive());
    assertTrue(fr.isOnline());
    assertFalse(fr.isInSession());
    assertTrue(fr.isManaged());
    assertNull(fr.getMeta());
    fr = free.get(1);
    assertTrue(fr.isActive());
    assertTrue(fr.isOnline());
    assertFalse(fr.isInSession());
    assertTrue(fr.isManaged());
    assertNull(fr.getMeta());

    List<String> names = new ArrayList<String>(2);
    for (Rig r : free) {
        names.add(r.getName());
    }
    assertTrue(names.contains("rig_name_test_1"));
    assertTrue(names.contains("rig_name_test_2"));

    ses = this.dao.getSession();
    ses.beginTransaction();
    for (long i : ids) {
        ses.createSQLQuery("DELETE FROM rig WHERE id=" + i).executeUpdate();
    }
    ses.createSQLQuery("DELETE FROM rig_capabilities WHERE id=" + caps.getId()).executeUpdate();
    ses.createSQLQuery("DELETE FROM rig_type WHERE id=" + type.getId()).executeUpdate();
    ses.getTransaction().commit();
}

From source file:au.org.theark.core.dao.CSVLoaderDao.java

License:Open Source License

/**
 * Calls hibernate and inserts the data into the database
 * //from w ww.  jav  a  2 s.c o  m
 * @param statement
 */
private void insertIntoDatabaseByCreateSQLQuery(String statement) {
    log.info(statement);
    Session session = getSession();
    session.beginTransaction();
    session.createSQLQuery(statement).executeUpdate();
    session.flush();
    log.info("SQL insertIntoDatabase SUCCEEDED");
}

From source file:au.org.theark.core.dao.CSVLoaderDao.java

License:Open Source License

/**
 * Load the temporary created file back into the database, to temporary table, using the [LOAD DATA INFILE] SQL statement
 * @param temporaryFileName//from w ww.  j a v a 2 s . c  om
 * @param databaseName
 * @param temporaryTableName
 * @return the number fo rows in the table
 */
public int loadTempFileToDatabase(String temporaryFileName, String databaseName, String temporaryTableName) {
    int rowCount = 0;

    StringBuffer tableName = new StringBuffer();
    tableName.append(databaseName);
    tableName.append(".");
    tableName.append(temporaryTableName);

    if (temporaryTableName != null && temporaryTableName != null) {
        Session session = getSession();
        session.beginTransaction();
        StringBuffer sqlTempFileToTable = new StringBuffer();
        sqlTempFileToTable.append("LOAD DATA LOCAL INFILE '");
        sqlTempFileToTable.append(temporaryFileName);
        sqlTempFileToTable.append("' INTO TABLE ");
        sqlTempFileToTable.append(tableName.toString());
        sqlTempFileToTable.append(" FIELDS TERMINATED BY '");
        sqlTempFileToTable.append(delimiterCharacter);
        sqlTempFileToTable.append("' ENCLOSED BY '\"' ");
        sqlTempFileToTable.append("LINES TERMINATED BY '\\n' ");
        sqlTempFileToTable.append("IGNORE 1 LINES;");

        try {
            log.info("Loading data into temporary table: " + tableName);
            session.createSQLQuery(sqlTempFileToTable.toString()).executeUpdate();
            log.info("select count(*) from " + tableName);
            BigInteger rowInteger = (BigInteger) session
                    .createSQLQuery("SELECT count(*) from " + tableName.toString()).uniqueResult();
            rowCount = rowInteger.intValue();

            log.info("SQL loadTempFileToDatabase SUCCEEDED");
        } catch (JDBCException e) {
            log.error(e.getMessage());
            log.error("SQL loadTempFileToDatabase FAILED");
        } finally {
            session.flush();
        }
    }
    log.info("Rowcount: " + rowCount);

    return rowCount;
}