Example usage for org.hibernate SQLQuery addScalar

List of usage examples for org.hibernate SQLQuery addScalar

Introduction

In this page you can find the example usage for org.hibernate SQLQuery addScalar.

Prototype

SQLQuery<T> addScalar(String columnAlias, Type type);

Source Link

Document

Declare a scalar query result.

Usage

From source file:org.generationcp.middleware.dao.oms.CVTermRelationshipDao.java

License:Open Source License

@SuppressWarnings("unchecked")
protected List<String> getScaleCategoriesUsedAsEnvironmentFactors(final int scaleId) {
    final SQLQuery query = this.getSession().createSQLQuery("SELECT categ.name category "
            + " FROM cvterm_relationship scale_values "
            + " INNER JOIN cvterm categ ON categ.cvterm_id = scale_values.object_id "
            + " INNER JOIN cvterm_relationship var ON var.object_id = scale_values.subject_id and var.type_id = "
            + TermId.HAS_SCALE.getId() + " WHERE scale_values.subject_id = :scaleId AND scale_values.type_id = "
            + TermId.HAS_VALUE.getId() + " AND EXISTS ( " + "      SELECT 1 "
            + "      FROM nd_geolocationprop gp "
            + "      INNER JOIN nd_experiment ep on ep.nd_geolocation_id = gp.nd_geolocation_id "
            + "      INNER JOIN project pr ON pr.project_id =ep.project_id and pr.deleted = 0 "
            + "      WHERE gp.type_id = var.subject_id and gp.value = categ.cvterm_id)");
    query.setParameter("scaleId", scaleId);
    query.addScalar("category", CVTermRelationshipDao.STRING);
    return query.list();
}

From source file:org.generationcp.middleware.dao.oms.CVTermRelationshipDao.java

License:Open Source License

@SuppressWarnings("unchecked")
protected List<String> getScaleCategoriesUsedAsTrialDesignFactors(final int scaleId) {
    final SQLQuery query = this.getSession().createSQLQuery("SELECT categ.name category "
            + " FROM cvterm_relationship scale_values "
            + " INNER JOIN cvterm categ ON categ.cvterm_id = scale_values.object_id "
            + " INNER JOIN cvterm_relationship var ON var.object_id = scale_values.subject_id and var.type_id = "
            + TermId.HAS_SCALE.getId() + " WHERE scale_values.subject_id = :scaleId AND scale_values.type_id = "
            + TermId.HAS_VALUE.getId() + " AND EXISTS ( " + "      SELECT 1 "
            + "      FROM nd_experimentprop e "
            + "      INNER JOIN nd_experiment ep on e.nd_experiment_id = ep.nd_experiment_id "
            + "      INNER JOIN project pr ON pr.project_id =ep.project_id and pr.deleted = 0 "
            + "      WHERE e.type_id = var.subject_id and e.value = categ.cvterm_id)");
    query.setParameter("scaleId", scaleId);
    query.addScalar("category", CVTermRelationshipDao.STRING);
    return query.list();
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestCorrections.java

License:Open Source License

@Override
public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) {

    List<DataTable> allTables = new ArrayList<>();

    // gathering IDs from the filter passed by dataSource
    List<Integer> idList = getIds(dataSource);
    if (idList == null || idList.size() == 0) {
        return null;
    }//  w  w  w.j av  a  2 s  . c  om

    //TODO: replace it with some other solution
    // adding time restrictions
    String natSQL = new SQLStepRequests(this.timeFilterFrom, this.timeFilterTo, getTimeUnit(), idList)
            .getSQL(HistoryTypeEnum.taskError, null, false, false);

    Session session = Helper.getHibernateSession();

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("stepCount", StandardBasicTypes.DOUBLE);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    DataTable dtbl = new DataTable(
            StatisticsMode.getByClassName(this.getClass()).getTitle() + Helper.getTranslation("_(number)"));

    DataRow dataRow;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out
    for (Object obj : list) {
        dataRow = new DataRow(null);
        Object[] objArr = (Object[]) obj;
        try {

            // getting localized time group unit
            // setting row name with date/time extraction based on the group
            dataRow.setName(new Converter(objArr[1]).getString() + "");
            dataRow.addValue(Helper.getTranslation("Corrections/Errors"),
                    (new Converter(objArr[0]).getDouble()));
        } catch (Exception e) {
            dataRow.addValue(e.getMessage(), 0.0);
        }

        // finally adding dataRow to DataTable and fetching next row
        dtbl.addDataRow(dataRow);
    }

    // a list of DataTables is expected as return Object, even if there is
    // only one Data Table as it is here in this implementation
    dtbl.setUnitLabel(Helper.getTranslation(getTimeUnit().getSingularTitle()));
    allTables.add(dtbl);
    return allTables;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestProduction.java

License:Open Source License

/**
 * List objects here need to extend BaseDTO.
 *
 * <p>/*  ww  w .ja va 2  s.co  m*/
 * (non-Javadoc)
 * </p>
 *
 * @see org.goobi.production.flow.statistics.IStatisticalQuestion#getDataTables(
 *      List)
 */
@Override
public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) {

    // contains an intger representing "reihenfolge" in schritte, as defined
    // for this request
    // if not defined it will trigger a fall back on a different way of
    // retrieving the statistical data
    Integer exactStepDone = null;
    String stepname = null;
    List<DataTable> allTables = new ArrayList<>();

    // gathering some information from the filter passed by dataSource
    // exactStepDone is very important ...

    //TODO; find way to replace it
    /*try {
    exactStepDone = originalFilter.stepDone();
    } catch (UnsupportedOperationException e1) {
    logger.error(e1);
    }
    try {
    stepname = originalFilter.stepDoneName();
    } catch (UnsupportedOperationException e1) {
    logger.error(e1);
    }*/

    // we have to build a query from scratch by reading the ID's
    List<Integer> idList = getIds(dataSource);
    if (idList == null || idList.size() == 0) {
        return null;
    }
    String natSQL = "";
    // adding time restrictions
    if (stepname == null) {
        natSQL = new ImprovedSQLProduction(this.timeFilterFrom, this.timeFilterTo, this.timeGrouping, idList)
                .getSQL(exactStepDone);
    } else {
        natSQL = new ImprovedSQLProduction(this.timeFilterFrom, this.timeFilterTo, this.timeGrouping, idList)
                .getSQL(stepname);
    }
    Session session = Helper.getHibernateSession();

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("volumes", StandardBasicTypes.INTEGER);
    query.addScalar("pages", StandardBasicTypes.INTEGER);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    StringBuilder title = new StringBuilder(StatisticsMode.PRODUCTION.getTitle());
    title.append(" (");
    title.append(this.cu.getTitle());
    if (stepname == null || stepname.equals("")) {
        title.append(")");
    } else {
        title.append(", ");
        title.append(stepname);
        title.append(" )");
    }

    // building table for the Table
    DataTable dtbl = new DataTable(title.toString());
    // building a second table for the chart
    DataTable dtblChart = new DataTable(title.toString());
    //
    DataRow dataRowChart;
    DataRow dataRow;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out
    for (Object obj : list) {
        dataRowChart = new DataRow(null);
        dataRow = new DataRow(null);
        Object[] objArr = (Object[]) obj;
        try {

            // getting localized time group unit

            // String identifier = timeGrouping.getTitle();
            // setting row name with localized time group and the date/time
            // extraction based on the group

            dataRowChart.setName(new Converter(objArr[2]).getString() + "");
            dataRow.setName(new Converter(objArr[2]).getString() + "");
            // dataRow.setName(new Converter(objArr[2]).getString());

            // building up row depending on requested output having
            // different fields
            switch (this.cu) {
            case volumesAndPages: {
                dataRowChart.addValue(CalculationUnit.volumes.getTitle(),
                        (new Converter(objArr[0]).getDouble()));
                dataRowChart.addValue(CalculationUnit.pages.getTitle() + " (*100)",
                        (new Converter(objArr[1]).getDouble()) / 100);
                dataRow.addValue(CalculationUnit.volumes.getTitle(), (new Converter(objArr[0]).getDouble()));
                dataRow.addValue(CalculationUnit.pages.getTitle(), (new Converter(objArr[1]).getDouble()));
            }
                break;
            case volumes: {
                dataRowChart.addValue(CalculationUnit.volumes.getTitle(),
                        (new Converter(objArr[0]).getDouble()));
                dataRow.addValue(CalculationUnit.volumes.getTitle(), (new Converter(objArr[0]).getDouble()));
            }
                break;
            case pages: {
                dataRowChart.addValue(CalculationUnit.pages.getTitle(), (new Converter(objArr[1]).getDouble()));
                dataRow.addValue(CalculationUnit.pages.getTitle(), (new Converter(objArr[1]).getDouble()));
            }
                break;
            }

            // fall back, if conversion triggers an exception
        } catch (Exception e) {
            dataRowChart.addValue(e.getMessage(), 0.0);
            dataRow.addValue(e.getMessage(), 0.0);
        }

        // finally adding dataRow to DataTable and fetching next row
        // adding the extra table
        dtblChart.addDataRow(dataRowChart);
        dtbl.addDataRow(dataRow);
    }

    // a list of DataTables is expected as return Object, even if there is
    // only one
    // Data Table as it is here in this implementation
    dtblChart.setUnitLabel(Helper.getTranslation(this.timeGrouping.getSingularTitle()));
    dtbl.setUnitLabel(Helper.getTranslation(this.timeGrouping.getSingularTitle()));

    dtblChart.setShowableInTable(false);
    dtbl.setShowableInChart(false);

    allTables.add(dtblChart);
    allTables.add(dtbl);
    return allTables;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestProjectProgressData.java

License:Open Source License

/**
 * Method generates a DataTable based on the input SQL. Methods success is
 * depending on a very specific data structure ... so don't use it if you don't
 * exactly understand it//from w ww. j a va2s.c  om
 *
 *
 * @param natSQL
 *            headerFromSQL -> to be used, if headers need to be read in first
 *            in order to get a certain sorting
 * @return DataTable
 */
private DataTable buildDataTableFromSQL(String natSQL) {
    Session session = Helper.getHibernateSession();

    if (this.commonWorkFlow == null) {
        return null;
    }

    DataRow headerRow = new DataRow("Header - delete again");

    for (StepInformation step : this.commonWorkFlow) {
        String stepName = step.getTitle();
        headerRow.setName("header - delete again");
        headerRow.addValue(stepName, Double.parseDouble("0"));
    }

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("stepCount", StandardBasicTypes.DOUBLE);
    query.addScalar("stepName", StandardBasicTypes.STRING);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    DataTable dtbl = new DataTable("");

    // Set columns to be removed later.
    dtbl.addDataRow(headerRow);

    DataRow dataRow = null;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out

    // checks if intervall has changed which then triggers the start for a
    // new row
    // intervall here is the timeGroup Expression (e.g. "2006/05" or
    // "2006-10-05")
    String observeIntervall = "";

    for (Object obj : list) {
        Object[] objArr = (Object[]) obj;
        String stepName = new Converter(objArr[1]).getString();
        if (isInWorkFlow(stepName)) {
            try {
                String intervall = new Converter(objArr[2]).getString();

                if (!observeIntervall.equals(intervall)) {
                    observeIntervall = intervall;

                    // row cannot be added before it is filled because the
                    // add process triggers
                    // a testing for header alignement -- this is where we
                    // add it after iterating it first
                    if (dataRow != null) {
                        dtbl.addDataRow(dataRow);
                    }

                    // setting row name with localized time group and the
                    // date/time extraction based on the group
                    dataRow = new DataRow(intervall);
                }
                if (dataRow != null) {
                    Double count = new Converter(objArr[0]).getDouble();
                    dataRow.addValue(stepName, count);
                }

            } catch (Exception e) {
                if (dataRow != null) {
                    dataRow.addValue(e.getMessage(), 0.0);
                }
            }
        }
    }
    // to add also the last row
    if (dataRow != null) {
        dtbl.addDataRow(dataRow);
    }

    // now removing headerRow
    dtbl.removeDataRow(headerRow);

    return dtbl;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestStorage.java

License:Open Source License

@Override
public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) {
    List<DataTable> allTables = new ArrayList<>();

    // gathering IDs from the filter passed by dataSource
    List<Integer> idList = getIds(dataSource);

    if (idList == null || idList.size() == 0) {
        return null;
    }//from   w  w  w .  jav  a  2s  .com

    // TODO: filter results according to date without sql query
    // adding time restrictions
    String natSQL = new SQLStorage(this.timeFilterFrom, this.timeFilterTo, this.timeGrouping, idList).getSQL();

    Session session = Helper.getHibernateSession();
    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("storage", StandardBasicTypes.DOUBLE);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    DataTable dtbl = new DataTable(
            StatisticsMode.getByClassName(this.getClass()).getTitle() + " " + Helper.getTranslation("_inGB"));

    DataRow dataRow;

    // each data row comes out as an Array of Objects the only way to
    // extract the data is by knowing
    // in which order they come out
    for (Object obj : list) {
        dataRow = new DataRow(null);
        // TODO: Don't use arrays
        Object[] objArr = (Object[]) obj;
        try {
            // getting localized time group unit
            // setting row name with date/time extraction based on the group
            dataRow.setName(new Converter(objArr[1]).getString() + "");
            dataRow.addValue(Helper.getTranslation("storageDifference"), (new Converter(objArr[0]).getGB()));
        } catch (Exception e) {
            dataRow.addValue(e.getMessage(), 0.0);
        }

        // finally adding dataRow to DataTable and fetching next row
        dtbl.addDataRow(dataRow);
    }

    // a list of DataTables is expected as return Object, even if there is
    // only one Data Table as it is here in this implementation
    dtbl.setUnitLabel(Helper.getTranslation(this.timeGrouping.getSingularTitle()));
    allTables.add(dtbl);
    return allTables;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestThroughput.java

License:Open Source License

/**
 * Method generates a DataTable based on the input SQL. Methods success is
 * depending on a very specific data structure ... so don't use it if you don't
 * exactly understand it//from  w ww .ja  v a 2 s  .c o m
 *
 * @param natSQL
 *            headerFromSQL -> to be used, if headers need to be read in first
 *            in order to get a certain sorting
 * @return DataTable
 */

// TODO Remove redundant code
private DataTable buildDataTableFromSQL(String natSQL, String headerFromSQL) {
    Session session = Helper.getHibernateSession();

    // creating header row from headerSQL (gets all columns in one row
    DataRow headerRow = null;
    if (headerFromSQL != null) {
        headerRow = new DataRow(null);
        SQLQuery headerQuery = session.createSQLQuery(headerFromSQL);

        // needs to be there otherwise an exception is thrown
        headerQuery.addScalar("stepCount", StandardBasicTypes.DOUBLE);
        headerQuery.addScalar("stepName", StandardBasicTypes.STRING);
        headerQuery.addScalar("stepOrder", StandardBasicTypes.DOUBLE);
        headerQuery.addScalar("intervall", StandardBasicTypes.STRING);

        @SuppressWarnings("rawtypes")
        List headerList = headerQuery.list();
        for (Object obj : headerList) {
            Object[] objArr = (Object[]) obj;
            try {
                headerRow.setName(new Converter(objArr[3]).getString() + "");
                headerRow.addValue(
                        new Converter(new Converter(objArr[2]).getInteger()).getString() + " ("
                                + new Converter(objArr[1]).getString() + ")",
                        (new Converter(objArr[0]).getDouble()));

            } catch (Exception e) {
                headerRow.addValue(e.getMessage(), 0.0);
            }
        }
    }

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("stepCount", StandardBasicTypes.DOUBLE);
    query.addScalar("stepName", StandardBasicTypes.STRING);
    query.addScalar("stepOrder", StandardBasicTypes.DOUBLE);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    DataTable dtbl = new DataTable("");

    // if headerRow is set then add it to the DataTable to set columns
    // needs to be removed later
    if (headerRow != null) {
        dtbl.addDataRow(headerRow);
    }

    DataRow dataRow = null;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out

    // checks if intervall has changed which then triggers the start for a
    // new row
    // intervall here is the timeGroup Expression (e.g. "2006/05" or
    // "2006-10-05")
    String observeIntervall = "";

    for (Object obj : list) {
        Object[] objArr = (Object[]) obj;
        try {
            // objArr[3]
            if (!observeIntervall.equals(new Converter(objArr[3]).getString())) {
                observeIntervall = new Converter(objArr[3]).getString();

                // row cannot be added before it is filled because the add
                // process triggers
                // a testing for header alignement -- this is where we add
                // it after iterating it first
                if (dataRow != null) {
                    dtbl.addDataRow(dataRow);
                }

                dataRow = new DataRow(null);
                // setting row name with localized time group and the
                // date/time extraction based on the group
                dataRow.setName(new Converter(objArr[3]).getString() + "");
            }
            dataRow.addValue(
                    new Converter(new Converter(objArr[2]).getInteger()).getString() + " ("
                            + new Converter(objArr[1]).getString() + ")",
                    (new Converter(objArr[0]).getDouble()));

        } catch (Exception e) {
            dataRow.addValue(e.getMessage(), 0.0);
        }
    }
    // to add the last row
    if (dataRow != null) {
        dtbl.addDataRow(dataRow);
    }

    // now removing headerRow
    if (headerRow != null) {
        dtbl.removeDataRow(headerRow);
    }

    return dtbl;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestThroughput.java

License:Open Source License

/**
 * method retrieves the highest step order in the requested history range.
 *
 * @param requestedType// w  ww.jav a2  s .  c  o  m
 *            HistoryTypeEnum object
 */
private Integer getMaxStepCount(HistoryTypeEnum requestedType) {

    // adding time restrictions
    String natSQL = new SQLStepRequestsImprovedDiscrimination(this.timeFilterFrom, this.timeFilterTo,
            this.timeGrouping, this.myIDlist).getSQLMaxStepOrder(requestedType);

    Session session = Helper.getHibernateSession();
    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("maxStep", StandardBasicTypes.DOUBLE);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    if (list != null && list.size() > 0 && list.get(0) != null) {
        return new Converter(list.get(0)).getInteger();
    } else {
        return 0;
    }

}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestThroughput.java

License:Open Source License

/**
 * method retrieves the lowest step order in the requested history range.
 *
 * @param requestedType/*from www  . ja va 2 s  .  c o m*/
 *            HistoryTypeEnum object
 */
private Integer getMinStepCount(HistoryTypeEnum requestedType) {
    // adding time restrictions
    String natSQL = new SQLStepRequestsImprovedDiscrimination(this.timeFilterFrom, this.timeFilterTo,
            this.timeGrouping, this.myIDlist).getSQLMinStepOrder(requestedType);

    Session session = Helper.getHibernateSession();
    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("minStep", StandardBasicTypes.DOUBLE);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    if (list != null && list.size() > 0 && list.get(0) != null) {
        return new Converter(list.get(0)).getInteger();
    } else {
        return 0;
    }

}

From source file:org.hil.core.dao.hibernate.ChildrenDaoHibernate.java

License:Open Source License

public boolean hasChildInCommnue(Long communeId) {
    String sql = "Select c.id as cid from children c, village vl "
            + " where c.id_village =vl.id and vl.id_commune=:communeId order by c.id desc limit 1";
    SQLQuery qry = getSession().createSQLQuery(sql);
    qry.setParameter("communeId", communeId);
    log.debug("SQL: " + sql);
    qry.addScalar("cid", Hibernate.LONG);

    int result = qry.list().size();
    if (result > 0)
        return true;
    else/*from w w w .j  av  a2  s  .  co  m*/
        return false;
}