List of usage examples for org.hibernate SQLQuery setLong
@Deprecated @SuppressWarnings("unchecked") default Query<R> setLong(int position, long val)
From source file:com.tysanclan.site.projectewok.util.forum.ShadowForumViewContext.java
License:Open Source License
@Override public List<ForumPost> getPosts(Session sess, ForumThread context, User viewer, long offset, long count) { StringBuilder q = new StringBuilder(); q.append("SELECT * FROM FORUMPOST FP WHERE (fp.shadow = false OR fp.poster_id = :viewer)"); q.append(" AND fp.thread_id = :thread "); q.append("ORDER BY time ASC LIMIT :count OFFSET :offset"); SQLQuery query = sess.createSQLQuery(q.toString()); query.setLong("thread", context.getId()); query.setLong("count", count); query.setLong("offset", offset); query.setLong("viewer", viewer.getId()); query.addEntity(ForumPost.class); return listOf(query); }
From source file:com.viettel.hqmc.DAOHE.FeePaymentInfoDAOHE.java
/** * danh sach hoa don doanh nghiep/*from w ww. j a v a2 s .c om*/ * * @param fileId * @param start * @param count * @param sortField * @return */ public GridResult getLstPayment(Long fileId, int start, int count, String sortField) { // String sql = "from Fee f left join Fee_Payment_Info fpi on f.fee_Id = fpi.fee_Id where f.fee_Id " // + "in (select fp.fee_Id from Fee_Procedure fp where fp.procedure_Id = ((select fi.file_Type from Files fi where fi.file_Id = ?)) union " // + "select ff.fee_Id from Fee_File ff where ff.file_Id = ? ) and f.is_Active = 1"; String sql = "from fee f inner join fee_payment_info fpi on f.fee_id = fpi.fee_id where fpi.file_id = ? and f.is_Active=1 and fpi.is_Active=1"; SQLQuery countQuery = (SQLQuery) getSession().createSQLQuery("select count(*) " + sql); SQLQuery query = (SQLQuery) getSession().createSQLQuery( "select f.fee_Id,f.fee_Name,f.description,fpi.cost,f.fee_Type,fpi.status,fpi.fee_Payment_Type_Id, f.price,fpi.payment_Person,fpi.payment_Date,fpi.payment_Info,fpi.bill_path " + sql); query.setLong(0, fileId); //query.setLong(1, fileId); countQuery.setParameter(0, fileId); //countQuery.setParameter(1, fileId); query.setFirstResult(start); query.setMaxResults(count); int total = Integer.parseInt(countQuery.uniqueResult().toString()); List lstResult = query.list(); FeePaymentFileForm item = new FeePaymentFileForm(); List result = new ArrayList<FeePaymentFileForm>(); for (int i = 0; i < lstResult.size(); i++) { Object[] row = (Object[]) lstResult.get(i); if (row.length > 0) { if (row[0] != null && !"".equals(row[0])) { item.setFeeId(Long.parseLong(row[0].toString())); } if (row[1] != null && !"".equals(row[1])) { item.setFeeName(row[1].toString()); } if (row[2] != null && !"".equals(row[2])) { item.setDescription(row[2].toString()); } if (row[7] != null && !"".equals(row[7])) { item.setPrice(Long.parseLong(row[7].toString())); } if (row[4] != null && !"".equals(row[4])) { item.setFeeType(Long.parseLong(row[4].toString())); } if (row[5] != null && !"".equals(row[5])) { item.setStatus(Long.parseLong(row[5].toString())); } if (row[6] != null && !"".equals(row[6])) { item.setFeePaymentType(Long.parseLong(row[6].toString())); } if (row[8] != null && !"".equals(row[8])) { item.setPaymentPerson(row[8].toString()); } if (row[9] != null && !"".equals(row[9])) { item.setPaymentDate(row[9].toString()); } if (row[10] != null && !"".equals(row[10])) { item.setPaymentInfo(row[10].toString()); } if (row[11] != null && !"".equals(row[11])) { item.setBillPath(row[11].toString()); } } result.add(item); item = new FeePaymentFileForm(); } GridResult gr = new GridResult(total, result); return gr; }
From source file:de.csw.expertfinder.expertise.ExpertiseModel.java
License:Open Source License
/** * Returns the TF/IDF weighting for the given word in the given document. * Does no word normalization in terms of lemmatization or stemming! If * normalization is needed, it has to be done on the word before calling * this method.//from w w w . j a v a 2 s .c o m * * @param documentId * the id of the document * @param word * the word (lemma or stem) * @return the TF/IDF */ public double getTFIDFWeight(Long documentId, String word) { persistenceStore.beginTransaction(); SQLQuery q = persistenceStore .createSQLQuery("select count(*) as count from ( " + "select word from word w, revision r " + "where w.id_revision_created = r.id " + "and w.id_revision_deleted is null " + "and r.id_document = :documentId " + "and word = :word) words"); q.setLong("documentId", documentId).setString("word", word); q.addScalar("count", Hibernate.INTEGER); int wordDocumentFreq = (Integer) q.uniqueResult(); q = persistenceStore.createSQLQuery("select count(*) as count from ( " + "select word from word w, revision r " + "where w.id_revision_created = r.id " + "and w.id_revision_deleted is null " + "and r.id_document = :documentId) words"); q.setLong("documentId", documentId); q.addScalar("count", Hibernate.INTEGER); int allDocumentFreq = (Integer) q.uniqueResult(); q = persistenceStore.createSQLQuery("select count(*) as count from document"); q.addScalar("count", Hibernate.INTEGER); int documentCount = (Integer) q.uniqueResult(); q = persistenceStore.createSQLQuery( "select count(*) as count from ( " + " select distinct d.id from document d, revision rc, word w " + " where w.word=:word " + " and w.id_revision_created = rc.id " + " and w.id_revision_deleted is null " + " and rc.id_document = d.id) word"); q.setString("word", word); q.addScalar("count", Hibernate.INTEGER); int wordCorpusFreq = (Integer) q.uniqueResult(); persistenceStore.endTransaction(); double tf = (double) wordDocumentFreq / (double) allDocumentFreq; double idf = Math.log((double) documentCount / (double) wordCorpusFreq); return tf * idf; }
From source file:de.csw.expertfinder.persistence.PersistenceStoreFacade.java
License:Open Source License
/** * Returns all distinct words as a set of Strings for a given document. * @param document// w w w . j a v a 2s . c o m * @return all distinct words as a set of Strings for the given document */ public List<String> getBagOfWordsForLatestRevision(Document document) { Session session = sessionFactory.getCurrentSession(); SQLQuery q = session.createSQLQuery( "select distinct word from word w, revision r " + "where w.id_revision_created = r.id " + "and w.id_revision_deleted is null " + "and r.id_document = :documentId"); q.setLong("documentId", document.getId()); q.addScalar("word", Hibernate.STRING); List<String> result = q.list(); return result; }
From source file:de.csw.expertfinder.persistence.PersistenceStoreFacade.java
License:Open Source License
/** * /*from w ww.j a va2s . co m*/ * @param concept * @param author * @return */ public List<Object[]> getContributionForConcept(long conceptId, long authorId) { Session session = sessionFactory.getCurrentSession(); SQLQuery query = session.createSQLQuery( "select d.id as documentId, rc.count as rcCount, rd.count as rdCount, rd.id_author as deletor " + "from word w " + "join revision rc " + "on w.id_revision_created = rc.id " + "and rc.id_author = :authorId " + "join document d " + "on rc.id_document = d.id " + "left outer join revision rd " + "on w.id_revision_deleted = rd.id " + "where w.id_concept = :conceptId"); query.setLong("authorId", authorId); query.setLong("conceptId", conceptId); query.addScalar("documentId", Hibernate.LONG); query.addScalar("rcCount", Hibernate.LONG); query.addScalar("rdCount", Hibernate.LONG); query.addScalar("deletor", Hibernate.LONG); return query.list(); }
From source file:de.csw.expertfinder.persistence.PersistenceStoreFacade.java
License:Open Source License
/** * Gets all contributions by a given author to a given concept by section * (or document, because the top level section spans and has the same title * as the document itself).<br/>//from w ww.j a va 2 s . c o m * For performance reasons, this method does not return an object graph but * a list of arrays of ids and other numbers (see returns section). * * @return A list of Object[] arrays. The items in each object array are:<br/> * 0: the document id (Long)<br/> * 1: the section id (Long)<br/> * 2: the section level (Integer)<br/> * 3: the id of the revision where the contribution was added (Long)<br/> * 4: the id of the revision where the contribution was deleted or * null if the contribution has not been deleted (Long)<br/> * 5: the id of the author who has deleted the contribution or null * if the contribution has not been deleted (Long)<br/> * 6: a similarity value between 0.0 and 1.0 if a section could not * be mapped to any concept but one of its parent sections or the * document itself (Double). If one or more concepts could be found * for this section, this value is null.<br/> * */ @SuppressWarnings("unchecked") public List<Object[]> getContributionsToSectionsWithConceptForAuthor(Concept concept, Author author) { Session session = sessionFactory.getCurrentSession(); SQLQuery query = session.createSQLQuery( "select d.id as documentId, s.id as sectionId, s.level as sectionLevel, revCreated.count as revisionCreated, revDeleted.count as revisionDeleted, a.id as deletor, sc.similarity as similarity " + "from word w " + "join revision revCreated " + " on w.id_revision_created = revCreated.id " + " and revCreated.id_author = :authorId " + "left outer join revision revDeleted " + " on w.id_revision_deleted = revDeleted.id " + "left outer join author a " + " on revDeleted.id_author = a.id " + "join section s " + " on w.id_section = s.id " + "join section_has_concept sc " + " on sc.id_section = s.id " + " and sc.id_concept = :conceptId " + "join document d " + " on d.id = revCreated.id_document " + "where w.id_concept is null " + "group by word, sectionId, revisionCreated, revisionDeleted " + "order by documentId, sectionId, revisionCreated, revisionDeleted"); query.addScalar("documentId", Hibernate.LONG).addScalar("sectionId", Hibernate.LONG) .addScalar("sectionLevel", Hibernate.INTEGER).addScalar("revisionCreated", Hibernate.LONG) .addScalar("revisionDeleted", Hibernate.LONG).addScalar("deletor", Hibernate.LONG) .addScalar("similarity", Hibernate.DOUBLE); query.setLong("conceptId", concept.getId()); query.setLong("authorId", author.getId()); return (List<Object[]>) query.list(); }
From source file:es.emergya.bbdd.dao.RecursoHome.java
License:Open Source License
@Transactional(readOnly = true, rollbackFor = Throwable.class, propagation = Propagation.REQUIRED) public void updateLastGpsSQL(HistoricoGPS historicoGPS, BigInteger idRecurso) { SQLQuery query = getSession() .createSQLQuery("update recursos set fk_historico_gps = :HISTORICO where x_recurso = :ID"); query.setBigInteger("ID", idRecurso); query.setLong("HISTORICO", historicoGPS.getId()); query.executeUpdate();/*from w ww . jav a 2 s .com*/ }
From source file:fr.gael.dhus.database.dao.FileScannerDao.java
License:Open Source License
@Override public void delete(final FileScanner scanner) { getHibernateTemplate().execute(new HibernateCallback<Void>() { @Override//from ww w. j a va2 s .co m public Void doInHibernate(Session session) throws HibernateException, SQLException { String sql = "DELETE FROM FILE_SCANNER_PREFERENCES " + "WHERE FILE_SCANNER_ID = ?"; SQLQuery query = session.createSQLQuery(sql); query.setLong(0, scanner.getId()); query.executeUpdate(); return null; } }); super.delete(scanner); }
From source file:fr.mcc.ginco.services.MistralRevServiceImpl.java
License:CeCILL license
@Override public File getAllRevisions(long timestamp, Language language) throws IOException { File res;/*from ww w .j a va 2s . c o m*/ SQLQuery query = sessionFactory.getCurrentSession() .createSQLQuery("SELECT DISTINCT THESAURUSID FROM REVINFO WHERE REVTSTMP > :pdate"); query.setLong("pdate", timestamp); List<String> allThesaurusId = query.list(); try { res = File.createTempFile("pattern", ".suffix"); res.deleteOnExit(); BufferedWriter out = new BufferedWriter(new FileWriter(res)); for (String thesaurusId : allThesaurusId) { if (thesaurusId != null && thesaurusService.getThesaurusList() .contains(thesaurusService.getThesaurusById(thesaurusId))) { List<CommandLine> eventsByThesaurus = getEventsByThesaurus(thesaurusId, timestamp, language); if (!eventsByThesaurus.isEmpty()) { out.write("-----------------------------------------"); out.newLine(); out.write(thesaurusService.getThesaurusById(thesaurusId).getTitle()); out.newLine(); out.write(thesaurusId); out.newLine(); out.write("-----------------------------------------"); out.newLine(); for (CommandLine line : eventsByThesaurus) { out.write(line.toString()); out.newLine(); } out.newLine(); } } } out.flush(); out.close(); return res; } catch (IOException e) { throw new TechnicalException("Error writing audit log file", e); } }
From source file:gov.nih.nci.caarray.security.AuthorizationManagerExtensions.java
License:BSD License
private static boolean checkPermissionWithCanonicalTable(String userName, String className, String attributeName, String value, String privilegeName, Application application, Session s) throws CSException { String sql = " select pe.protection_element_id from csm_protection_element pe " + "inner join csm_pg_pe pgpe on pe.protection_element_id = pgpe.protection_element_id " + "inner join csm_user_group_role_pg ugrpg on pgpe.protection_group_id = ugrpg.protection_group_id " + "inner join csm_role r on ugrpg.role_id = r.role_id " + "inner join csm_user_group ug on ugrpg.group_id = ug.group_id " + "inner join csm_role_privilege rp on r.role_id = rp.role_id " + "inner join csm_privilege p on rp.privilege_id = p.privilege_id " + "inner join csm_user u on ug.user_id = u.user_id " + "where pe.object_id = :class_name and pe.attribute = :attr_name " + "and pe.attribute_value = :attr_value and u.login_name = :login_name " + "and p.privilege_name= :priv_name and pe.application_id = :app_id"; SQLQuery query = s.createSQLQuery(sql); query.setString("class_name", className); query.setString("attr_name", attributeName); query.setString("attr_value", value); query.setString("login_name", userName); query.setString("priv_name", privilegeName); query.setLong("app_id", application.getApplicationId()); List<?> results = query.list(); return !results.isEmpty(); }