List of usage examples for org.hibernate.type StandardBasicTypes DOUBLE
DoubleType DOUBLE
To view the source code for org.hibernate.type StandardBasicTypes DOUBLE.
Click Source Link
From source file:com.vmware.sqlfire.hibernate.v4.v0.SQLFireDialect.java
License:Open Source License
public SQLFireDialect() { super();// w ww .j a va 2 s. co m LOG.info("SQLFireDialect for Hibernate 4.0 initialized."); registerFunction("concat", new DerbyConcatFunction()); registerFunction("trim", new AnsiTrimFunction()); registerFunction("value", new StandardSQLFunction("coalesce")); registerFunction("nvl", new NvlFunction()); registerFunction("groups", new StandardSQLFunction("GROUPS", StandardBasicTypes.STRING)); registerFunction("dsid", new StandardSQLFunction("DSID", StandardBasicTypes.STRING)); registerFunction("groupsintersection", new StandardSQLFunction("GROUPSINTERSECTION", StandardBasicTypes.STRING)); registerFunction("groupsintersect", new StandardSQLFunction("GROUPSINTERSECT", StandardBasicTypes.BOOLEAN)); registerFunction("groupsunion", new StandardSQLFunction("GROUPSUNION", StandardBasicTypes.STRING)); registerFunction("longint", new StandardSQLFunction("bigint", StandardBasicTypes.LONG)); registerFunction("int", new StandardSQLFunction("integer", StandardBasicTypes.INTEGER)); registerFunction("pi", new StandardSQLFunction("pi", StandardBasicTypes.DOUBLE)); registerFunction("random", new NoArgSQLFunction("random", StandardBasicTypes.DOUBLE)); registerFunction("rand", new StandardSQLFunction("rand", StandardBasicTypes.DOUBLE));// override registerFunction("sinh", new StandardSQLFunction("sinh", StandardBasicTypes.DOUBLE)); registerFunction("cosh", new StandardSQLFunction("cosh", StandardBasicTypes.DOUBLE)); registerFunction("tanh", new StandardSQLFunction("tanh", StandardBasicTypes.DOUBLE)); registerFunction("user", new NoArgSQLFunction("USER", StandardBasicTypes.STRING, false)); registerFunction("current_user", new NoArgSQLFunction("CURRENT_USER", StandardBasicTypes.STRING, false)); registerFunction("session_user", new NoArgSQLFunction("SESSION_USER", StandardBasicTypes.STRING, false)); registerFunction("current isolation", new NoArgSQLFunction("CURRENT ISOLATION", StandardBasicTypes.STRING, false)); registerFunction("current_role", new NoArgSQLFunction("CURRENT_ROLE", StandardBasicTypes.STRING, false)); registerFunction("current schema", new NoArgSQLFunction("CURRENT SCHEMA", StandardBasicTypes.STRING, false)); registerFunction("current sqlid", new NoArgSQLFunction("CURRENT SQLID", StandardBasicTypes.STRING, false)); registerFunction("xmlexists", new StandardSQLFunction("XMLEXISTS", StandardBasicTypes.NUMERIC_BOOLEAN)); registerFunction("xmlparse", new StandardSQLFunction("XMLPARSE", StandardBasicTypes.TEXT)); registerFunction("xmlquery", new StandardSQLFunction("XMLQUERY", StandardBasicTypes.STRING)); registerFunction("xmlserialize", new StandardSQLFunction("XMLSERIALIZE", StandardBasicTypes.STRING)); registerFunction("get_current_connection", new NoArgSQLFunction("GET_CURRENT_CONNECTION", StandardBasicTypes.BINARY, true)); registerFunction("identity_val_local", new NoArgSQLFunction("IDENTITY_VAL_LOCAL", StandardBasicTypes.BINARY, true)); }
From source file:dao.OperacionesDAO.java
License:GNU General Public License
/** * El usuario calificador califica al calificado * (Se inserta en la base de datos)//w w w . j a va 2s. c o m * @param calificador - El usuario que califica. * @param calificado - El usuario que es calificado. * @param calificacion - La calificacin del usuario. * @return el perfil del usuario que se est calificando. */ public String califica(Usuario calificador, Usuario calificado, double calificacion) { if (calificacion < 1 || calificacion > 5) { return "error"; } /* Lo usamos para saber si hay resultados */ Object hayResultados = null; Transaction tx = session().beginTransaction(); /* Lista de calificaciones */ List<Double> lista; try { /*Aqu se ve si hay calificacin */ Query busca = session() .createSQLQuery("select * from " + "calificacion where id_calificado = :id and id_calificador" + " = :id2") .setInteger("id", calificado.getIdUsuario()).setInteger("id2", calificador.getIdUsuario()); hayResultados = busca.uniqueResult(); if (hayResultados != null) { /* Si hay calificacin, se borra esta */ Query borra = session() .createSQLQuery("DELETE FROM " + "calificacion WHERE id_calificado = :id and " + "id_calificador = :id2") .setInteger("id", calificado.getIdUsuario()).setInteger("id2", calificador.getIdUsuario()); borra.executeUpdate(); } /* Se inserta */ Query inserta = session().createSQLQuery("INSERT INTO calificacion" + "(id_calificador ,id_calificado, calificacion) VALUES(" + calificador.getIdUsuario() + "," + calificado.getIdUsuario() + "," + Double.toString(calificacion) + ")"); inserta.executeUpdate(); /* Se va a promediar */ Query q = session() .createSQLQuery("select calificacion from " + "calificacion where id_calificado = :id") .addScalar("calificacion", StandardBasicTypes.DOUBLE) .setInteger("id", calificado.getIdUsuario()); /* La reputacin nueva del Usuario */ lista = q.list(); } catch (Exception e) { e.printStackTrace(); // Lo mantengo para revisar el log. tx.rollback(); return "error"; } finally { closeSession(); } /* Estpido hibernate hace que mi cdigo se vea feo y me obliga a actualizar instancias despus de la transaccin */ if (calificado.esAgente()) { /* La instancia del usuario como agente */ Agente c = calificado.getAgente(); c.setReputacionAgente(promedioLista(lista)); actualizaAgente(c); } else { /* La instancia del usuario como programador */ Programador c = calificado.getProgramador(); c.setReputacionProgramador(promedioLista(lista)); actualizaProgramador(c); } return "perfilAjeno"; }
From source file:dao.OperacionesDAO.java
License:GNU General Public License
/** * Saca el promedio de la calificacin del Agente y lo actualiza en la base * de datos//ww w .jav a 2 s . co m * @param p - Es el agente a promediar su calificacin. */ public void promediaAgente(Agente p) { Transaction tx = session().beginTransaction(); try { Query q = session() .createSQLQuery("select avg(calificacion) from " + "calificacion where id_calificado = :id") .addScalar("calificacion", StandardBasicTypes.DOUBLE) .setInteger("id", p.getUsuario().getIdUsuario()); /* La reputacin nueva del Agente */ double promedio = (Double) q.uniqueResult(); p.setReputacionAgente(promedio); } catch (Exception e) { e.printStackTrace(); // Lo mantengo para revisar el log. tx.rollback(); } finally { if (!tx.wasCommitted()) { tx.commit(); } closeSession(); } actualizaAgente(p); }
From source file:dao.OperacionesDAO.java
License:GNU General Public License
/** * Saca el promedio de la calificacin del Programador y lo actualiza en la base * de datos/*w w w .ja v a2 s . c o m*/ * @param p - Es el programador a promediar su calificacin. */ public void promediaProgramador(Programador p) { Transaction tx = session().beginTransaction(); try { Query q = session() .createSQLQuery("select avg(calificacion) from " + "calificacion where id_calificado = :id") .addScalar("calificacion", StandardBasicTypes.DOUBLE) .setInteger("id", p.getUsuario().getIdUsuario()); /* La reputacin nueva del Programador */ double promedio = (Double) q.uniqueResult(); p.setReputacionProgramador(promedio); } catch (Exception e) { e.printStackTrace(); // Lo mantengo para revisar el log. tx.rollback(); } finally { if (!tx.wasCommitted()) { tx.commit(); } closeSession(); } actualizaProgramador(p); }
From source file:fr.gael.dhus.olingo.v1.SQLVisitor.java
License:Open Source License
@Override public Object visitMethod(MethodExpression method_expression, MethodOperator method, List<Object> parameters) { Criterion criterion;/*from w w w . j a va2 s . c om*/ switch (method) { // String functions case CONCAT: { criterion = Restrictions.sqlRestriction("CONCAT(?,?)", new Object[] { parameters.get(0), parameters.get(1) }, new Type[] { StandardBasicTypes.STRING, StandardBasicTypes.STRING }); break; } case INDEXOF: { criterion = Restrictions.sqlRestriction("LOCATE(?,?)", new Object[] { parameters.get(0), parameters.get(1) }, new Type[] { StandardBasicTypes.STRING, StandardBasicTypes.STRING }); break; } case LENGTH: { criterion = Restrictions.sqlRestriction("LENGTH(?)", parameters.get(0), StandardBasicTypes.STRING); break; } case SUBSTRING: { criterion = Restrictions.sqlRestriction("SUBSTR(?,?)", new Object[] { parameters.get(0), parameters.get(1) }, new Type[] { StandardBasicTypes.STRING, StandardBasicTypes.STRING }); break; } case TOUPPER: { criterion = Restrictions.sqlRestriction("UPPER(?)", parameters.get(0), StandardBasicTypes.STRING); break; } case TOLOWER: { criterion = Restrictions.sqlRestriction("LOWER(?)", parameters.get(0), StandardBasicTypes.STRING); break; } case TRIM: { criterion = Restrictions.sqlRestriction("TRIM(?)", parameters.get(0), StandardBasicTypes.STRING); break; } case ENDSWITH: case STARTSWITH: { criterion = getCriterionFunction(method, parameters.get(0), parameters.get(1)); break; } case SUBSTRINGOF: { criterion = getCriterionFunction(method, parameters.get(1), parameters.get(0)); break; } // Date functions case DAY: { criterion = Restrictions.sqlRestriction("DAYOFMONTH(?)", parameters.get(0), StandardBasicTypes.TIMESTAMP); break; } case HOUR: { criterion = Restrictions.sqlRestriction("HOUR(?)", parameters.get(0), StandardBasicTypes.TIMESTAMP); break; } case MINUTE: { criterion = Restrictions.sqlRestriction("MINUTE(?)", parameters.get(0), StandardBasicTypes.TIMESTAMP); break; } case MONTH: { criterion = Restrictions.sqlRestriction("MONTH(?)", parameters.get(0), StandardBasicTypes.TIMESTAMP); break; } case SECOND: { criterion = Restrictions.sqlRestriction("SECOND(?)", parameters.get(0), StandardBasicTypes.TIMESTAMP); break; } case YEAR: { criterion = Restrictions.sqlRestriction("YEAR(?)", parameters.get(0), StandardBasicTypes.TIMESTAMP); break; } // Math functions case CEILING: { criterion = Restrictions.sqlRestriction("CEILING(?)", parameters.get(0), StandardBasicTypes.DOUBLE); break; } case FLOOR: { criterion = Restrictions.sqlRestriction("FLOOR (?)", parameters.get(0), StandardBasicTypes.DOUBLE); break; } case ROUND: { criterion = Restrictions.sqlRestriction("ROUND(?)", parameters.get(0), StandardBasicTypes.DOUBLE); break; } default: throw new UnsupportedOperationException("Unsupported method: " + method.toUriLiteral()); } return criterion; }
From source file:org.ednovo.gooru.infrastructure.persistence.hibernate.assessment.AssessmentRepositoryHibernate.java
License:Open Source License
@Override public Map<String, Object> getAssessmentAttemptsInfo(Integer attemptId, String gooruOAssessmentId, Integer studentId) {/*from w ww .j a va 2s . c o m*/ String sql = "SELECT COUNT(1) as count, AVG(attempt.score) as avg FROM assessment_attempt attempt INNER JOIN content content ON content.gooru_oid = '" + gooruOAssessmentId + "' INNER JOIN assessment assessment ON ( assessment.assessment_id = content.content_id AND assessment.assessment_id = attempt.assessment_id ) WHERE attempt.mode = 1 AND attempt.attempt_id != '" + attemptId + "' AND attempt.student_id != " + studentId + " AND " + generateAuthSqlQueryWithData("content."); Query query = getSession().createSQLQuery(sql).addScalar("count", StandardBasicTypes.INTEGER) .addScalar("avg", StandardBasicTypes.DOUBLE); Object[] result = (Object[]) query.uniqueResult(); Map<String, Object> resultMap = new HashMap<String, Object>(); resultMap.put("otherAttempts", result[0]); resultMap.put("othersAvg", result[1]); return resultMap; }
From source file:org.goobi.production.flow.statistics.hibernate.StatQuestCorrections.java
License:Open Source License
@Override public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) { List<DataTable> allTables = new ArrayList<>(); // gathering IDs from the filter passed by dataSource List<Integer> idList = getIds(dataSource); if (idList == null || idList.size() == 0) { return null; }//from ww w . j av a 2s . co m //TODO: replace it with some other solution // adding time restrictions String natSQL = new SQLStepRequests(this.timeFilterFrom, this.timeFilterTo, getTimeUnit(), idList) .getSQL(HistoryTypeEnum.taskError, null, false, false); Session session = Helper.getHibernateSession(); SQLQuery query = session.createSQLQuery(natSQL); // needs to be there otherwise an exception is thrown query.addScalar("stepCount", StandardBasicTypes.DOUBLE); query.addScalar("intervall", StandardBasicTypes.STRING); @SuppressWarnings("rawtypes") List list = query.list(); DataTable dtbl = new DataTable( StatisticsMode.getByClassName(this.getClass()).getTitle() + Helper.getTranslation("_(number)")); DataRow dataRow; // each data row comes out as an Array of Objects // the only way to extract the data is by knowing // in which order they come out for (Object obj : list) { dataRow = new DataRow(null); Object[] objArr = (Object[]) obj; try { // getting localized time group unit // setting row name with date/time extraction based on the group dataRow.setName(new Converter(objArr[1]).getString() + ""); dataRow.addValue(Helper.getTranslation("Corrections/Errors"), (new Converter(objArr[0]).getDouble())); } catch (Exception e) { dataRow.addValue(e.getMessage(), 0.0); } // finally adding dataRow to DataTable and fetching next row dtbl.addDataRow(dataRow); } // a list of DataTables is expected as return Object, even if there is // only one Data Table as it is here in this implementation dtbl.setUnitLabel(Helper.getTranslation(getTimeUnit().getSingularTitle())); allTables.add(dtbl); return allTables; }
From source file:org.goobi.production.flow.statistics.hibernate.StatQuestProjectProgressData.java
License:Open Source License
/** * Method generates a DataTable based on the input SQL. Methods success is * depending on a very specific data structure ... so don't use it if you don't * exactly understand it//from w w w.j a v a 2s .c o m * * * @param natSQL * headerFromSQL -> to be used, if headers need to be read in first * in order to get a certain sorting * @return DataTable */ private DataTable buildDataTableFromSQL(String natSQL) { Session session = Helper.getHibernateSession(); if (this.commonWorkFlow == null) { return null; } DataRow headerRow = new DataRow("Header - delete again"); for (StepInformation step : this.commonWorkFlow) { String stepName = step.getTitle(); headerRow.setName("header - delete again"); headerRow.addValue(stepName, Double.parseDouble("0")); } SQLQuery query = session.createSQLQuery(natSQL); // needs to be there otherwise an exception is thrown query.addScalar("stepCount", StandardBasicTypes.DOUBLE); query.addScalar("stepName", StandardBasicTypes.STRING); query.addScalar("intervall", StandardBasicTypes.STRING); @SuppressWarnings("rawtypes") List list = query.list(); DataTable dtbl = new DataTable(""); // Set columns to be removed later. dtbl.addDataRow(headerRow); DataRow dataRow = null; // each data row comes out as an Array of Objects // the only way to extract the data is by knowing // in which order they come out // checks if intervall has changed which then triggers the start for a // new row // intervall here is the timeGroup Expression (e.g. "2006/05" or // "2006-10-05") String observeIntervall = ""; for (Object obj : list) { Object[] objArr = (Object[]) obj; String stepName = new Converter(objArr[1]).getString(); if (isInWorkFlow(stepName)) { try { String intervall = new Converter(objArr[2]).getString(); if (!observeIntervall.equals(intervall)) { observeIntervall = intervall; // row cannot be added before it is filled because the // add process triggers // a testing for header alignement -- this is where we // add it after iterating it first if (dataRow != null) { dtbl.addDataRow(dataRow); } // setting row name with localized time group and the // date/time extraction based on the group dataRow = new DataRow(intervall); } if (dataRow != null) { Double count = new Converter(objArr[0]).getDouble(); dataRow.addValue(stepName, count); } } catch (Exception e) { if (dataRow != null) { dataRow.addValue(e.getMessage(), 0.0); } } } } // to add also the last row if (dataRow != null) { dtbl.addDataRow(dataRow); } // now removing headerRow dtbl.removeDataRow(headerRow); return dtbl; }
From source file:org.goobi.production.flow.statistics.hibernate.StatQuestStorage.java
License:Open Source License
@Override public List<DataTable> getDataTables(List<? extends BaseDTO> dataSource) { List<DataTable> allTables = new ArrayList<>(); // gathering IDs from the filter passed by dataSource List<Integer> idList = getIds(dataSource); if (idList == null || idList.size() == 0) { return null; }/*from www . j a v a2 s. co m*/ // TODO: filter results according to date without sql query // adding time restrictions String natSQL = new SQLStorage(this.timeFilterFrom, this.timeFilterTo, this.timeGrouping, idList).getSQL(); Session session = Helper.getHibernateSession(); SQLQuery query = session.createSQLQuery(natSQL); // needs to be there otherwise an exception is thrown query.addScalar("storage", StandardBasicTypes.DOUBLE); query.addScalar("intervall", StandardBasicTypes.STRING); @SuppressWarnings("rawtypes") List list = query.list(); DataTable dtbl = new DataTable( StatisticsMode.getByClassName(this.getClass()).getTitle() + " " + Helper.getTranslation("_inGB")); DataRow dataRow; // each data row comes out as an Array of Objects the only way to // extract the data is by knowing // in which order they come out for (Object obj : list) { dataRow = new DataRow(null); // TODO: Don't use arrays Object[] objArr = (Object[]) obj; try { // getting localized time group unit // setting row name with date/time extraction based on the group dataRow.setName(new Converter(objArr[1]).getString() + ""); dataRow.addValue(Helper.getTranslation("storageDifference"), (new Converter(objArr[0]).getGB())); } catch (Exception e) { dataRow.addValue(e.getMessage(), 0.0); } // finally adding dataRow to DataTable and fetching next row dtbl.addDataRow(dataRow); } // a list of DataTables is expected as return Object, even if there is // only one Data Table as it is here in this implementation dtbl.setUnitLabel(Helper.getTranslation(this.timeGrouping.getSingularTitle())); allTables.add(dtbl); return allTables; }
From source file:org.goobi.production.flow.statistics.hibernate.StatQuestThroughput.java
License:Open Source License
/** * Method generates a DataTable based on the input SQL. Methods success is * depending on a very specific data structure ... so don't use it if you don't * exactly understand it/* w w w.j a v a 2 s. co m*/ * * @param natSQL * headerFromSQL -> to be used, if headers need to be read in first * in order to get a certain sorting * @return DataTable */ // TODO Remove redundant code private DataTable buildDataTableFromSQL(String natSQL, String headerFromSQL) { Session session = Helper.getHibernateSession(); // creating header row from headerSQL (gets all columns in one row DataRow headerRow = null; if (headerFromSQL != null) { headerRow = new DataRow(null); SQLQuery headerQuery = session.createSQLQuery(headerFromSQL); // needs to be there otherwise an exception is thrown headerQuery.addScalar("stepCount", StandardBasicTypes.DOUBLE); headerQuery.addScalar("stepName", StandardBasicTypes.STRING); headerQuery.addScalar("stepOrder", StandardBasicTypes.DOUBLE); headerQuery.addScalar("intervall", StandardBasicTypes.STRING); @SuppressWarnings("rawtypes") List headerList = headerQuery.list(); for (Object obj : headerList) { Object[] objArr = (Object[]) obj; try { headerRow.setName(new Converter(objArr[3]).getString() + ""); headerRow.addValue( new Converter(new Converter(objArr[2]).getInteger()).getString() + " (" + new Converter(objArr[1]).getString() + ")", (new Converter(objArr[0]).getDouble())); } catch (Exception e) { headerRow.addValue(e.getMessage(), 0.0); } } } SQLQuery query = session.createSQLQuery(natSQL); // needs to be there otherwise an exception is thrown query.addScalar("stepCount", StandardBasicTypes.DOUBLE); query.addScalar("stepName", StandardBasicTypes.STRING); query.addScalar("stepOrder", StandardBasicTypes.DOUBLE); query.addScalar("intervall", StandardBasicTypes.STRING); @SuppressWarnings("rawtypes") List list = query.list(); DataTable dtbl = new DataTable(""); // if headerRow is set then add it to the DataTable to set columns // needs to be removed later if (headerRow != null) { dtbl.addDataRow(headerRow); } DataRow dataRow = null; // each data row comes out as an Array of Objects // the only way to extract the data is by knowing // in which order they come out // checks if intervall has changed which then triggers the start for a // new row // intervall here is the timeGroup Expression (e.g. "2006/05" or // "2006-10-05") String observeIntervall = ""; for (Object obj : list) { Object[] objArr = (Object[]) obj; try { // objArr[3] if (!observeIntervall.equals(new Converter(objArr[3]).getString())) { observeIntervall = new Converter(objArr[3]).getString(); // row cannot be added before it is filled because the add // process triggers // a testing for header alignement -- this is where we add // it after iterating it first if (dataRow != null) { dtbl.addDataRow(dataRow); } dataRow = new DataRow(null); // setting row name with localized time group and the // date/time extraction based on the group dataRow.setName(new Converter(objArr[3]).getString() + ""); } dataRow.addValue( new Converter(new Converter(objArr[2]).getInteger()).getString() + " (" + new Converter(objArr[1]).getString() + ")", (new Converter(objArr[0]).getDouble())); } catch (Exception e) { dataRow.addValue(e.getMessage(), 0.0); } } // to add the last row if (dataRow != null) { dtbl.addDataRow(dataRow); } // now removing headerRow if (headerRow != null) { dtbl.removeDataRow(headerRow); } return dtbl; }