Example usage for org.hibernate.type StandardBasicTypes STRING

List of usage examples for org.hibernate.type StandardBasicTypes STRING

Introduction

In this page you can find the example usage for org.hibernate.type StandardBasicTypes STRING.

Prototype

StringType STRING

To view the source code for org.hibernate.type StandardBasicTypes STRING.

Click Source Link

Document

The standard Hibernate type for mapping String to JDBC java.sql.Types#VARCHAR VARCHAR .

Usage

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.taxonomy.TaxonomyRepositoryHibernate.java

License:Open Source License

@Cacheable("gooruCache")
@Override/*from ww  w . j a va 2s  .c  o  m*/
public String findTaxonomyRootCode(String code) {
    String hql = "select root_node_id from code where organization_uid  IN ("
            + getUserOrganizationUidsAsString() + ") and code=:code or display_code=:displayCode";
    Query query = getSession().createSQLQuery(hql).addScalar("root_node_id", StandardBasicTypes.STRING);
    query.setParameter("code", code);
    query.setParameter("displayCode", code);
    return (String) (query.list().size() > 0 ? query.list().get(0) : null);
}

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.taxonomy.TaxonomyRepositoryHibernate.java

License:Open Source License

@Cacheable("gooruCache")
@Override/*  w  w w. ja  v a2 s  . com*/
public String findTaxonomyCodeLabels(String codeIds) {
    String sql = "select group_concat(label) as labels from code where depth = 0 and organization_uid  IN ("
            + getUserOrganizationUidsAsString() + ") and code_id  in (" + codeIds + ")";
    Query query = getSession().createSQLQuery(sql).addScalar("labels", StandardBasicTypes.STRING);
    return query.list() != null ? (String) query.list().get(0) : null;
}

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.taxonomy.TaxonomyRepositoryHibernate.java

License:Open Source License

@Cacheable("gooruCache")
@Override//from ww  w  .  j  av a  2 s .  c o  m
public String findGooruTaxonomyCourse(List<String> codeIds) {
    String sql = "select group_concat(label) as labels from code where root_node_id=20000 and organization_uid  IN ("
            + getUserOrganizationUidsAsString() + ") and code_id IN (:codeIds) ";
    Query query = getSession().createSQLQuery(sql).addScalar("labels", StandardBasicTypes.STRING);
    query.setParameterList("codeIds", codeIds);
    return query.list() != null ? (String) query.list().get(0) : null;
}

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.user.UserRepositoryHibernate.java

License:Open Source License

@SuppressWarnings("unchecked")
@Override//from  www .j a va  2s .  c  om
public List<Object[]> getInactiveUsers(Integer offset, Integer limit) {
    String sql = "select user_uid as user_uid,  external_id as email_id from identity i inner join party_custom_field p on p.party_uid = i.user_uid where (date(last_login) between  date(last_login) and date_sub(now(),INTERVAL 2 WEEK) or  last_login is null) and p.optional_key = 'last_user_inactive_mail_send_date' and (p.optional_value = '-' or  date(p.optional_value) between  date(p.optional_value) and date_sub(now(),INTERVAL 2 WEEK))";
    Query query = getSession().createSQLQuery(sql).addScalar("user_uid", StandardBasicTypes.STRING)
            .addScalar("email_id", StandardBasicTypes.STRING);
    query.setFirstResult(offset);
    query.setMaxResults(limit != null ? (limit > MAX_LIMIT ? MAX_LIMIT : limit) : LIMIT);
    return query.list();
}

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.user.UserRepositoryHibernate.java

License:Open Source License

@Override
public String getUserGrade(String userUid, Integer classificationId, Integer activeFlag) {
    String sql = "select group_concat(grade) as grade from user_classification uc  where uc.user_Uid ='"
            + userUid + "' and uc.classification_type='" + classificationId + "'";
    if (activeFlag != null) {
        sql += "and uc.active_flag ='" + activeFlag + "'";
    }//  w ww.  j  av  a 2s .c  om
    Query query = getSession().createSQLQuery(sql).addScalar("grade", StandardBasicTypes.STRING);
    return (String) query.list().get(0);
}

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.user.UserRepositoryHibernate.java

License:Open Source License

@SuppressWarnings("unchecked")
@Override/* w  ww . j a  va 2 s.  c o m*/
public List<Object[]> listUserByBirthDay(Integer offset, Integer limit) {
    Query query = getSession().createSQLQuery(FETCH_USERS_BY_BIRTHDAY)
            .addScalar("email_id", StandardBasicTypes.STRING).addScalar("user_id", StandardBasicTypes.STRING);
    query.setFirstResult(offset);
    query.setMaxResults(limit == null ? LIMIT : (limit > MAX_LIMIT ? MAX_LIMIT : limit));
    return query.list();
}

From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.user.UserRepositoryHibernate.java

License:Open Source License

@SuppressWarnings("unchecked")
@Override//  ww w .j a v  a2s .c om
public List<Object[]> listChildUserByBirthDay() {
    Query query = getSession().createSQLQuery(FETCH_CHILD_USERS_BY_BIRTHDAY)
            .addScalar("child_user_name", StandardBasicTypes.STRING)
            .addScalar("parent_email_id", StandardBasicTypes.STRING);
    return query.list();
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestCorrections.java

License:Open Source License

@Override
public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) {

    List<DataTable> allTables = new ArrayList<>();

    // gathering IDs from the filter passed by dataSource
    List<Integer> idList = getIds(dataSource);
    if (idList == null || idList.size() == 0) {
        return null;
    }/*  w ww .j a  v a2 s . c om*/

    //TODO: replace it with some other solution
    // adding time restrictions
    String natSQL = new SQLStepRequests(this.timeFilterFrom, this.timeFilterTo, getTimeUnit(), idList)
            .getSQL(HistoryTypeEnum.taskError, null, false, false);

    Session session = Helper.getHibernateSession();

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("stepCount", StandardBasicTypes.DOUBLE);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    DataTable dtbl = new DataTable(
            StatisticsMode.getByClassName(this.getClass()).getTitle() + Helper.getTranslation("_(number)"));

    DataRow dataRow;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out
    for (Object obj : list) {
        dataRow = new DataRow(null);
        Object[] objArr = (Object[]) obj;
        try {

            // getting localized time group unit
            // setting row name with date/time extraction based on the group
            dataRow.setName(new Converter(objArr[1]).getString() + "");
            dataRow.addValue(Helper.getTranslation("Corrections/Errors"),
                    (new Converter(objArr[0]).getDouble()));
        } catch (Exception e) {
            dataRow.addValue(e.getMessage(), 0.0);
        }

        // finally adding dataRow to DataTable and fetching next row
        dtbl.addDataRow(dataRow);
    }

    // a list of DataTables is expected as return Object, even if there is
    // only one Data Table as it is here in this implementation
    dtbl.setUnitLabel(Helper.getTranslation(getTimeUnit().getSingularTitle()));
    allTables.add(dtbl);
    return allTables;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestProduction.java

License:Open Source License

/**
 * List objects here need to extend BaseDTO.
 *
 * <p>//from ww w  .  ja  va  2s.  co  m
 * (non-Javadoc)
 * </p>
 *
 * @see org.goobi.production.flow.statistics.IStatisticalQuestion#getDataTables(
 *      List)
 */
@Override
public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) {

    // contains an intger representing "reihenfolge" in schritte, as defined
    // for this request
    // if not defined it will trigger a fall back on a different way of
    // retrieving the statistical data
    Integer exactStepDone = null;
    String stepname = null;
    List<DataTable> allTables = new ArrayList<>();

    // gathering some information from the filter passed by dataSource
    // exactStepDone is very important ...

    //TODO; find way to replace it
    /*try {
    exactStepDone = originalFilter.stepDone();
    } catch (UnsupportedOperationException e1) {
    logger.error(e1);
    }
    try {
    stepname = originalFilter.stepDoneName();
    } catch (UnsupportedOperationException e1) {
    logger.error(e1);
    }*/

    // we have to build a query from scratch by reading the ID's
    List<Integer> idList = getIds(dataSource);
    if (idList == null || idList.size() == 0) {
        return null;
    }
    String natSQL = "";
    // adding time restrictions
    if (stepname == null) {
        natSQL = new ImprovedSQLProduction(this.timeFilterFrom, this.timeFilterTo, this.timeGrouping, idList)
                .getSQL(exactStepDone);
    } else {
        natSQL = new ImprovedSQLProduction(this.timeFilterFrom, this.timeFilterTo, this.timeGrouping, idList)
                .getSQL(stepname);
    }
    Session session = Helper.getHibernateSession();

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("volumes", StandardBasicTypes.INTEGER);
    query.addScalar("pages", StandardBasicTypes.INTEGER);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    StringBuilder title = new StringBuilder(StatisticsMode.PRODUCTION.getTitle());
    title.append(" (");
    title.append(this.cu.getTitle());
    if (stepname == null || stepname.equals("")) {
        title.append(")");
    } else {
        title.append(", ");
        title.append(stepname);
        title.append(" )");
    }

    // building table for the Table
    DataTable dtbl = new DataTable(title.toString());
    // building a second table for the chart
    DataTable dtblChart = new DataTable(title.toString());
    //
    DataRow dataRowChart;
    DataRow dataRow;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out
    for (Object obj : list) {
        dataRowChart = new DataRow(null);
        dataRow = new DataRow(null);
        Object[] objArr = (Object[]) obj;
        try {

            // getting localized time group unit

            // String identifier = timeGrouping.getTitle();
            // setting row name with localized time group and the date/time
            // extraction based on the group

            dataRowChart.setName(new Converter(objArr[2]).getString() + "");
            dataRow.setName(new Converter(objArr[2]).getString() + "");
            // dataRow.setName(new Converter(objArr[2]).getString());

            // building up row depending on requested output having
            // different fields
            switch (this.cu) {
            case volumesAndPages: {
                dataRowChart.addValue(CalculationUnit.volumes.getTitle(),
                        (new Converter(objArr[0]).getDouble()));
                dataRowChart.addValue(CalculationUnit.pages.getTitle() + " (*100)",
                        (new Converter(objArr[1]).getDouble()) / 100);
                dataRow.addValue(CalculationUnit.volumes.getTitle(), (new Converter(objArr[0]).getDouble()));
                dataRow.addValue(CalculationUnit.pages.getTitle(), (new Converter(objArr[1]).getDouble()));
            }
                break;
            case volumes: {
                dataRowChart.addValue(CalculationUnit.volumes.getTitle(),
                        (new Converter(objArr[0]).getDouble()));
                dataRow.addValue(CalculationUnit.volumes.getTitle(), (new Converter(objArr[0]).getDouble()));
            }
                break;
            case pages: {
                dataRowChart.addValue(CalculationUnit.pages.getTitle(), (new Converter(objArr[1]).getDouble()));
                dataRow.addValue(CalculationUnit.pages.getTitle(), (new Converter(objArr[1]).getDouble()));
            }
                break;
            }

            // fall back, if conversion triggers an exception
        } catch (Exception e) {
            dataRowChart.addValue(e.getMessage(), 0.0);
            dataRow.addValue(e.getMessage(), 0.0);
        }

        // finally adding dataRow to DataTable and fetching next row
        // adding the extra table
        dtblChart.addDataRow(dataRowChart);
        dtbl.addDataRow(dataRow);
    }

    // a list of DataTables is expected as return Object, even if there is
    // only one
    // Data Table as it is here in this implementation
    dtblChart.setUnitLabel(Helper.getTranslation(this.timeGrouping.getSingularTitle()));
    dtbl.setUnitLabel(Helper.getTranslation(this.timeGrouping.getSingularTitle()));

    dtblChart.setShowableInTable(false);
    dtbl.setShowableInChart(false);

    allTables.add(dtblChart);
    allTables.add(dtbl);
    return allTables;
}

From source file:org.goobi.production.flow.statistics.hibernate.StatQuestProjectProgressData.java

License:Open Source License

/**
 * Method generates a DataTable based on the input SQL. Methods success is
 * depending on a very specific data structure ... so don't use it if you don't
 * exactly understand it//from  w  w  w .j a v a 2 s  .c  o m
 *
 *
 * @param natSQL
 *            headerFromSQL -> to be used, if headers need to be read in first
 *            in order to get a certain sorting
 * @return DataTable
 */
private DataTable buildDataTableFromSQL(String natSQL) {
    Session session = Helper.getHibernateSession();

    if (this.commonWorkFlow == null) {
        return null;
    }

    DataRow headerRow = new DataRow("Header - delete again");

    for (StepInformation step : this.commonWorkFlow) {
        String stepName = step.getTitle();
        headerRow.setName("header - delete again");
        headerRow.addValue(stepName, Double.parseDouble("0"));
    }

    SQLQuery query = session.createSQLQuery(natSQL);

    // needs to be there otherwise an exception is thrown
    query.addScalar("stepCount", StandardBasicTypes.DOUBLE);
    query.addScalar("stepName", StandardBasicTypes.STRING);
    query.addScalar("intervall", StandardBasicTypes.STRING);

    @SuppressWarnings("rawtypes")
    List list = query.list();

    DataTable dtbl = new DataTable("");

    // Set columns to be removed later.
    dtbl.addDataRow(headerRow);

    DataRow dataRow = null;

    // each data row comes out as an Array of Objects
    // the only way to extract the data is by knowing
    // in which order they come out

    // checks if intervall has changed which then triggers the start for a
    // new row
    // intervall here is the timeGroup Expression (e.g. "2006/05" or
    // "2006-10-05")
    String observeIntervall = "";

    for (Object obj : list) {
        Object[] objArr = (Object[]) obj;
        String stepName = new Converter(objArr[1]).getString();
        if (isInWorkFlow(stepName)) {
            try {
                String intervall = new Converter(objArr[2]).getString();

                if (!observeIntervall.equals(intervall)) {
                    observeIntervall = intervall;

                    // row cannot be added before it is filled because the
                    // add process triggers
                    // a testing for header alignement -- this is where we
                    // add it after iterating it first
                    if (dataRow != null) {
                        dtbl.addDataRow(dataRow);
                    }

                    // setting row name with localized time group and the
                    // date/time extraction based on the group
                    dataRow = new DataRow(intervall);
                }
                if (dataRow != null) {
                    Double count = new Converter(objArr[0]).getDouble();
                    dataRow.addValue(stepName, count);
                }

            } catch (Exception e) {
                if (dataRow != null) {
                    dataRow.addValue(e.getMessage(), 0.0);
                }
            }
        }
    }
    // to add also the last row
    if (dataRow != null) {
        dtbl.addDataRow(dataRow);
    }

    // now removing headerRow
    dtbl.removeDataRow(headerRow);

    return dtbl;
}