List of usage examples for org.hibernate SQLQuery addScalar
SQLQuery<T> addScalar(String columnAlias, Type type);
From source file:org.gbif.portal.dao.resources.impl.hibernate.DataProviderDAOImpl.java
License:Open Source License
/** * @see org.gbif.portal.dao.resources.DataProviderDAO#getRolloverDates() *//* w ww . j a va2s .c o m*/ @SuppressWarnings("unchecked") public List<Date> getRolloverDates() { HibernateTemplate template = getHibernateTemplate(); List<Date> results = (List) template.execute(new HibernateCallback() { public Object doInHibernate(Session session) { SQLQuery query = session .createSQLQuery("select rollover_date from rollover order by rollover_date desc"); query.addScalar("rollover_date", Hibernate.DATE); return query.list(); } }); return results; }
From source file:org.gbif.portal.dao.resources.impl.hibernate.DataProviderDAOImpl.java
License:Open Source License
@SuppressWarnings("unchecked") public List<String> getProviderTypeCounts() { HibernateTemplate template = getHibernateTemplate(); List<Object[]> providerTypeCounts = (List<Object[]>) getHibernateTemplate() .execute(new HibernateCallback() { public Object doInHibernate(Session session) { SQLQuery query = session.createSQLQuery( "SELECT provider_type, count FROM stats_provider_type_species_counts"); query.setCacheable(true); query.addScalar("provider_type", Hibernate.STRING); query.addScalar("count", Hibernate.INTEGER); return query.list(); }/*from w w w .jav a 2s . c o m*/ }); ArrayList<String> providerC = new ArrayList<String>(); for (Object[] result : providerTypeCounts) { providerC.add(result[0].toString() + "|" + result[1].toString()); } return providerC; }
From source file:org.gbif.portal.dao.resources.impl.hibernate.DataProviderDAOImpl.java
License:Open Source License
@SuppressWarnings("unchecked") public List<String> getOcurrencePerMonthAccumulativeCounts() { HibernateTemplate template = getHibernateTemplate(); List<Object[]> monthAccumulativeCounts = (List<Object[]>) getHibernateTemplate() .execute(new HibernateCallback() { public Object doInHibernate(Session session) { SQLQuery query = session.createSQLQuery( "SELECT year, month, accumulative FROM stats_month_counts where count > 0"); query.setCacheable(true); query.addScalar("year", Hibernate.INTEGER); query.addScalar("month", Hibernate.INTEGER); query.addScalar("accumulative", Hibernate.INTEGER); return query.list(); }// w w w . j av a 2 s. c o m }); ArrayList<String> monthC = new ArrayList<String>(); for (Object[] result : monthAccumulativeCounts) { monthC.add(result[0].toString() + "-" + result[1].toString() + "|" + result[2].toString()); } return monthC; }
From source file:org.gbif.portal.dao.resources.impl.hibernate.DataProviderDAOImpl.java
License:Open Source License
@SuppressWarnings("unchecked") public List<String> getOcurrencePerMonthTriCounts() { HibernateTemplate template = getHibernateTemplate(); List<Object[]> monthTriCounts = (List<Object[]>) getHibernateTemplate().execute(new HibernateCallback() { public Object doInHibernate(Session session) { SQLQuery query = session .createSQLQuery("SELECT tri, count FROM stats_tri_month_counts where count > 0"); query.setCacheable(true);// www. java2s .c o m query.addScalar("tri", Hibernate.STRING); query.addScalar("count", Hibernate.INTEGER); return query.list(); } }); ArrayList<String> monthTC = new ArrayList<String>(); for (Object[] result : monthTriCounts) { monthTC.add(result[0].toString() + "|" + result[1].toString()); } return monthTC; }
From source file:org.gbif.portal.dao.resources.impl.hibernate.DataResourceDAOImpl.java
License:Open Source License
/** * @param dataResourceId// w ww. j a v a2s . co m * @param geoRefOnly * @return */ @SuppressWarnings("unchecked") public List<Object[]> getCountryCountsForDataResource(final Long dataResourceId, final boolean geoRefOnly) { HibernateTemplate template = getHibernateTemplate(); return (List) template.execute(new HibernateCallback() { public Object doInHibernate(Session session) { StringBuffer sb = new StringBuffer( "select c.id as country_id, c.iso_country_code as iso_country_code, c.region as region,"); if (geoRefOnly) { sb.append(" rc.occurrence_coordinate_count as the_count"); } else { sb.append(" rc.count as the_count"); } sb.append( " from resource_country rc inner join country c on c.iso_country_code=rc.iso_country_code"); if (dataResourceId != null) { sb.append(" where rc.data_resource_id=:dataResourceId "); } if (geoRefOnly) { sb.append(" and rc.occurrence_coordinate_count>0"); } sb.append(" order by c.iso_country_code"); SQLQuery query = session.createSQLQuery(sb.toString()); if (dataResourceId != null) query.setParameter("dataResourceId", dataResourceId); query.setCacheable(true); query.addScalar("country_id", Hibernate.LONG); query.addScalar("iso_country_code", Hibernate.STRING); query.addScalar("region", Hibernate.STRING); query.addScalar("the_count", Hibernate.INTEGER); return query.list(); } }); }
From source file:org.gbif.portal.dao.taxonomy.impl.hibernate.TaxonConceptDAOImpl.java
License:Open Source License
@SuppressWarnings("unchecked") public List<String> getTaxonConceptCounts() { HibernateTemplate template = getHibernateTemplate(); List<Object[]> taxonCounts = (List<Object[]>) getHibernateTemplate().execute(new HibernateCallback() { public Object doInHibernate(Session session) { SQLQuery query = session.createSQLQuery("select taxon_name, count from stats_taxon_concept_counts"); query.setCacheable(true);//from www .j a va2s. c o m query.addScalar("taxon_name", Hibernate.STRING); query.addScalar("count", Hibernate.INTEGER); return query.list(); } }); ArrayList<String> taxaC = new ArrayList<String>(); for (Object[] result : taxonCounts) { taxaC.add(result[0].toString() + "|" + result[1].toString()); } return taxaC; }
From source file:org.generationcp.middleware.dao.dms.DmsProjectDao.java
License:Open Source License
public List<StudyInstance> getDatasetInstances(final int datasetId) { try {/*from w w w . java 2 s . co m*/ final String sql = "select \n" + " geoloc.nd_geolocation_id as INSTANCE_DBID, \n" + " max(if(geoprop.type_id = 8190, loc.locid, null)) as LOCATION_ID, \n" // 8190 = cvterm for LOCATION_ID + " max(if(geoprop.type_id = 8190, loc.lname, null)) as LOCATION_NAME, \n" + " max(if(geoprop.type_id = 8190, loc.labbr, null)) as LOCATION_ABBR, \n" + // 8189 = cvterm for LOCATION_ABBR " max(if(geoprop.type_id = 8189, geoprop.value, null)) as CUSTOM_LOCATION_ABBR, \n" + // 8189 = cvterm for CUSTOM_LOCATION_ABBR " max(if(geoprop.type_id = 8583, geoprop.value, null)) as FIELDMAP_BLOCK, \n" + // 8583 = cvterm for BLOCK_ID (meaning instance has fieldmap) " geoloc.description as INSTANCE_NUMBER \n" + " from \n" + " nd_geolocation geoloc \n" + " inner join nd_experiment nde on nde.nd_geolocation_id = geoloc.nd_geolocation_id \n" + " inner join project proj on proj.project_id = nde.project_id \n" + " left outer join nd_geolocationprop geoprop on geoprop.nd_geolocation_id = geoloc.nd_geolocation_id \n" + " left outer join location loc on geoprop.value = loc.locid and geoprop.type_id = 8190 \n" + " where proj.project_id = :datasetId \n" + " group by geoloc.nd_geolocation_id \n" + " order by (1 * geoloc.description) asc "; final SQLQuery query = this.getSession().createSQLQuery(sql); query.setParameter("datasetId", datasetId); query.addScalar("INSTANCE_DBID", new IntegerType()); query.addScalar("LOCATION_ID", new IntegerType()); query.addScalar("LOCATION_NAME", new StringType()); query.addScalar("LOCATION_ABBR", new StringType()); query.addScalar("CUSTOM_LOCATION_ABBR", new StringType()); query.addScalar("FIELDMAP_BLOCK", new StringType()); query.addScalar("INSTANCE_NUMBER", new IntegerType()); final List queryResults = query.list(); final List<StudyInstance> instances = new ArrayList<>(); for (final Object result : queryResults) { final Object[] row = (Object[]) result; final boolean hasFieldmap = !StringUtils.isEmpty((String) row[5]); final StudyInstance instance = new StudyInstance((Integer) row[0], (Integer) row[1], (String) row[2], (String) row[3], (Integer) row[6], (String) row[4], hasFieldmap); instances.add(instance); } return instances; } catch (final HibernateException he) { throw new MiddlewareQueryException("Unexpected error in executing getDatasetInstances(datasetId = " + datasetId + ") query: " + he.getMessage(), he); } }
From source file:org.generationcp.middleware.dao.dms.ExperimentDao.java
License:Open Source License
private SQLQuery createQueryAndAddScalar(final List<MeasurementVariableDto> selectionMethodsAndTraits, final String observationUnitTableQuery) { final SQLQuery query = this.getSession().createSQLQuery(observationUnitTableQuery); query.addScalar(ExperimentDao.OBS_UNIT_ID, new StringType()); query.addScalar(ExperimentDao.ND_EXPERIMENT_ID); this.addScalarForTraits(selectionMethodsAndTraits, query, true); return query; }
From source file:org.generationcp.middleware.dao.dms.ExperimentDao.java
License:Open Source License
public Map<Integer, Map<String, List<Object>>> getValuesFromObservations(final int studyId, final List<Integer> datasetTypeIds, final Map<Integer, Integer> inputVariableDatasetMap) { final StringBuilder queryString = new StringBuilder("SELECT \n" + "CASE WHEN e.parent_id IS NULL THEN e.nd_experiment_id ELSE e.parent_id END as `experimentId`,\n" + "p.observable_id as `variableId`, \n" + "p.value \n" + "FROM nd_experiment e \n" + "INNER JOIN project proj ON proj.project_id = e.project_id AND proj.study_id = :studyId \n" + "INNER JOIN phenotype p ON p.nd_experiment_id = e.nd_experiment_id \n" + "WHERE proj.dataset_type_id IN (:datasetTypeIds) "); if (!inputVariableDatasetMap.isEmpty()) { queryString.append("AND ("); final Iterator<Map.Entry<Integer, Integer>> iterator = inputVariableDatasetMap.entrySet().iterator(); while (iterator.hasNext()) { final Map.Entry<Integer, Integer> entry = iterator.next(); queryString.append(String.format("(p.observable_id = %s AND e.project_id = %s %n)", entry.getKey(), entry.getValue()));/*from w w w . ja v a 2s .c o m*/ if (iterator.hasNext()) { queryString.append(" OR "); } else { queryString.append(") \n"); } } } queryString.append("ORDER BY `experimentId`, `variableId` ;"); final SQLQuery q = this.getSession().createSQLQuery(queryString.toString()); q.addScalar("experimentId", new IntegerType()); q.addScalar("variableId", new StringType()); q.addScalar("value", new StringType()); q.setParameter("studyId", studyId); q.setParameterList("datasetTypeIds", datasetTypeIds); q.setResultTransformer(AliasToEntityMapResultTransformer.INSTANCE); final List<Map<String, Object>> results = q.list(); final Map<Integer, Map<String, List<Object>>> map = new HashMap<>(); for (final Map<String, Object> row : results) { final Integer experimentId = (Integer) row.get("experimentId"); final String variableId = (String) row.get("variableId"); final Object value = row.get("value"); if (!map.containsKey(experimentId)) { map.put(experimentId, new HashMap<String, List<Object>>()); } if (!map.get(experimentId).containsKey(variableId)) { map.get(experimentId).put(variableId, new ArrayList<Object>()); } // Group values per variable and experimentId. map.get(experimentId).get(variableId).add(value); } return map; }
From source file:org.generationcp.middleware.dao.dms.ExperimentPropertyDao.java
License:Open Source License
@SuppressWarnings("unchecked") public List<FieldMapDatasetInfo> getFieldMapLabels(final int projectId) { List<FieldMapDatasetInfo> datasets = null; try {/*from ww w .ja v a 2 s.c om*/ final String order = projectId > 0 ? "ASC" : "DESC"; final StringBuilder sql = new StringBuilder().append(" SELECT ").append(" nde.project_id AS datasetId ") .append(" , proj.name AS datasetName ").append(" , geo.nd_geolocation_id AS geolocationId ") .append(" , site.value AS siteName ").append(" , nde.nd_experiment_id AS experimentId ") .append(" , s.uniqueName AS entryNumber ").append(" , s.name AS germplasmName ") .append(" , epropRep.value AS rep ").append(" , epropPlot.value AS plotNo ") .append(" , row.value AS row ").append(" , col.value AS col ") .append(" , blk.value AS block_id ").append(" , inst.description AS trialInstance ") //Casting inst.description to signed for natural sort .append(" , CAST(inst.description as SIGNED) AS casted_trialInstance") .append(" , st.name AS studyName ").append(" , s.dbxref_id AS gid ") .append(" , st.start_date as startDate ").append(" , gpSeason.value as season ") .append(" , siteId.value AS siteId").append(" , epropBlock.value AS blockNo ") .append(" , ldp.group_name AS pedigree ").append(" , geo.obs_unit_id as obsUnitId ") .append(" FROM ").append(" nd_experiment nde ") .append(" INNER JOIN project proj on proj.project_id = nde.project_id ") .append(" INNER JOIN project st ON st.project_id = proj.study_id ") .append(" INNER JOIN stock s ON s.stock_id = nde.stock_id ") .append(" LEFT JOIN nd_experimentprop epropRep ON nde.nd_experiment_id = epropRep.nd_experiment_id ") .append(" AND epropRep.type_id = " + TermId.REP_NO.getId()) // 8210 .append(" AND epropRep.value IS NOT NULL AND epropRep.value <> '' ") .append(" LEFT JOIN nd_experimentprop epropBlock ON nde.nd_experiment_id = epropBlock.nd_experiment_id ") .append(" AND epropBlock.type_id = " + TermId.BLOCK_NO.getId()) // 8220 .append(" AND epropBlock.value IS NOT NULL AND epropBlock.value <> '' ") .append(" INNER JOIN nd_experimentprop epropPlot ON nde.nd_experiment_id = epropPlot.nd_experiment_id ") .append(" AND epropPlot.type_id IN (" + TermId.PLOT_NO.getId() + ", " + TermId.PLOT_NNO.getId() + ") ") // 8200, 8380 .append(" AND epropPlot.value IS NOT NULL AND epropPlot.value <> '' ") .append(" INNER JOIN nd_experiment geo ON nde.nd_experiment_id = geo.nd_experiment_id ") .append(" AND geo.type_id = ").append(TermId.PLOT_EXPERIMENT.getId()) .append(" INNER JOIN nd_geolocation inst ON geo.nd_geolocation_id = inst.nd_geolocation_id ") .append(" LEFT JOIN nd_geolocationprop site ON geo.nd_geolocation_id = site.nd_geolocation_id ") .append(" AND site.type_id = ").append(TermId.TRIAL_LOCATION.getId()) .append(" LEFT JOIN nd_geolocationprop siteId ON siteId.nd_geolocation_id = geo.nd_geolocation_id ") .append(" AND siteId.type_id = ").append(TermId.LOCATION_ID.getId()) .append(" LEFT JOIN nd_geolocationprop blk ON blk.nd_geolocation_id = geo.nd_geolocation_id ") .append(" AND blk.type_id = ").append(TermId.BLOCK_ID.getId()) .append(" LEFT JOIN nd_experimentprop row ON row.nd_experiment_id = nde.nd_experiment_id ") .append(" AND row.type_id = ").append(TermId.RANGE_NO.getId()) .append(" LEFT JOIN nd_experimentprop col ON col.nd_experiment_id = nde.nd_experiment_id ") .append(" AND col.type_id = ").append(TermId.COLUMN_NO.getId()) .append(" LEFT JOIN nd_geolocationprop gpSeason ON geo.nd_geolocation_id = gpSeason.nd_geolocation_id ") .append(" AND gpSeason.type_id = ").append(TermId.SEASON_VAR.getId()).append(" ") // -- 8371 (2452) .append(" LEFT JOIN listnms lnms ON lnms.projectid = st.project_id AND lnms.listtype in ('STUDY')") .append(" LEFT JOIN listdata_project ldp on ldp.list_id = lnms.listid AND ldp.entry_id = s.uniqueName AND ldp.germplasm_id = s.dbxref_id") .append(" WHERE st.project_id = :studyId") .append(" ORDER BY casted_trialInstance, inst.description, nde.nd_experiment_id ") .append(order); final SQLQuery query = this.getSession().createSQLQuery(sql.toString()); query.addScalar("datasetId").addScalar("datasetName").addScalar("geolocationId").addScalar("siteName") .addScalar("experimentId").addScalar("entryNumber").addScalar("germplasmName").addScalar("rep") .addScalar("plotNo").addScalar("row").addScalar("col").addScalar("block_id") .addScalar("trialInstance").addScalar("studyName").addScalar("gid").addScalar("startDate") .addScalar("season").addScalar("siteId").addScalar("blockNo").addScalar("pedigree") .addScalar("obsUnitId", Hibernate.STRING); query.setParameter("studyId", projectId); final List<Object[]> list = query.list(); if (list != null && !list.isEmpty()) { datasets = this.createFieldMapDatasetInfo(list); } } catch (final HibernateException e) { final String message = "Error at getFieldMapLabels(projectId=" + projectId + ") at ExperimentPropertyDao: " + e.getMessage(); ExperimentPropertyDao.LOG.error(message, e); throw new MiddlewareQueryException(message, e); } return datasets; }