Example usage for java.sql ResultSet getBigDecimal

List of usage examples for java.sql ResultSet getBigDecimal

Introduction

In this page you can find the example usage for java.sql ResultSet getBigDecimal.

Prototype

BigDecimal getBigDecimal(String columnLabel) throws SQLException;

Source Link

Document

Retrieves the value of the designated column in the current row of this ResultSet object as a java.math.BigDecimal with full precision.

Usage

From source file:com.nextep.designer.sqlgen.postgre.impl.PostgreSqlCapturer.java

@Override
public Collection<ISequence> getSequences(ICaptureContext context, IProgressMonitor m) {
    final IProgressMonitor monitor = new CustomProgressMonitor(SubMonitor.convert(m, 500), PROGRESS_RANGE);
    final Connection conn = (Connection) context.getConnectionObject();
    final Collection<ISequence> sequences = new ArrayList<ISequence>();
    final String seqSql = "SELECT min_value, max_value, increment_by, is_cycled, cache_value, last_value FROM "; //$NON-NLS-1$

    Statement stmt = null;//ww w. j ava  2  s  . c o m
    ResultSet rset = null;
    ResultSet rsetInfo = null;
    long start = 0;
    try {
        stmt = conn.createStatement();

        if (LOGGER.isDebugEnabled())
            start = System.currentTimeMillis();

        DatabaseMetaData md = conn.getMetaData();
        rset = md.getTables(context.getCatalog(), context.getSchema(), "%", //$NON-NLS-1$
                new String[] { "SEQUENCE" }); //$NON-NLS-1$

        while (rset.next()) {
            final String name = rset.getString("TABLE_NAME"); //$NON-NLS-1$
            final String desc = rset.getString("REMARKS"); //$NON-NLS-1$
            final IVersionable<ISequence> seqV = VersionableFactory.createVersionable(ISequence.class);
            final ISequence seq = seqV.getVersionnedObject().getModel();

            seq.setName(name);
            seq.setDescription(desc);
            seq.setOrdered(false); // Ordered sequences are not supported by
            // PostgreSQL

            try {
                rsetInfo = stmt.executeQuery(seqSql + name);

                if (rsetInfo.next()) {
                    monitor.worked(1);

                    final BigDecimal min = rsetInfo.getBigDecimal("min_value"); //$NON-NLS-1$
                    final BigDecimal max = rsetInfo.getBigDecimal("max_value"); //$NON-NLS-1$
                    final Long inc = rsetInfo.getLong("increment_by"); //$NON-NLS-1$
                    final String cycle = rsetInfo.getString("is_cycled"); //$NON-NLS-1$
                    final int cacheSize = rsetInfo.getInt("cache_value"); //$NON-NLS-1$
                    final BigDecimal seqStart = rsetInfo.getBigDecimal("last_value"); //$NON-NLS-1$

                    seq.setMinValue(min);
                    seq.setMaxValue(max);
                    seq.setIncrement(inc);
                    seq.setCycle("Y".equals(cycle)); //$NON-NLS-1$
                    seq.setCacheSize(cacheSize);
                    seq.setCached(cacheSize > 0);
                    seq.setStart(seqStart);

                    sequences.add(seq);
                }
            } catch (SQLException e) {
                LOGGER.warn(
                        MessageFormat.format(SQLGenMessages.getString("capturer.error.genericCapturerError"), //$NON-NLS-1$
                                DBVendor.POSTGRE) + e.getMessage(),
                        e);
            } finally {
                CaptureHelper.safeClose(rsetInfo, null);
            }
        }
        if (LOGGER.isDebugEnabled())
            LOGGER.debug("[Sequences] fetching time: " + (System.currentTimeMillis() - start) //$NON-NLS-1$
                    + "ms"); //$NON-NLS-1$
    } catch (SQLException e) {
        LOGGER.warn(MessageFormat.format(SQLGenMessages.getString("capturer.error.genericCapturerError"), //$NON-NLS-1$
                DBVendor.POSTGRE) + e.getMessage(), e);
    } finally {
        CaptureHelper.safeClose(rset, stmt);
    }

    return sequences;
}

From source file:ca.sqlpower.matchmaker.address.AddressPool.java

public void load(Logger engineLogger) throws SQLException, SQLObjectException {
    setCancelled(false);/*from  ww w. j a va2s  . co  m*/
    setStarted(true);
    setFinished(false);
    setProgress(0);

    SQLTable resultTable = project.getResultTable();
    Connection con = null;
    Statement stmt = null;
    ResultSet rs = null;

    setJobSize(getNumRowsToProcess());

    try {
        con = project.createResultTableConnection();

        stmt = con.createStatement();

        StringBuilder sql = new StringBuilder("SELECT * FROM ");
        appendFullyQualifiedTableName(sql, resultTable);

        rs = stmt.executeQuery(sql.toString());

        while (rs.next()) {
            List<Object> keyValues = new ArrayList<Object>();
            int numKeys = project.getSourceTableIndex().getChildCount();

            // We need to convert the column types to the base set of
            // String, Boolean, BigDecimal, and Date that we use in the
            // Transformations. Otherwise, when we cannot properly compare
            // the key values of these loaded. Addresses with the ones
            // coming through the transformations.
            for (int i = 0; i < numKeys; i++) {
                int type = project.getSourceTableIndex().getChild(i).getColumn().getType();
                Class c = TypeMap.typeClass(type);

                if (c == BigDecimal.class) {
                    keyValues.add(rs.getBigDecimal(SOURCE_ADDRESS_KEY_COLUMN_BASE + i));
                } else if (c == Date.class) {
                    /*
                     * KLUDGE. DateTime types are converted to Date's, thus losing
                     * the Time portion of the value. When paging through results
                     * and a DateTime column is used as part of the key, then inconsistent
                     * paging will occur as the comparison logic will be comparing just
                     * Date values. To avoid breaking any other parts of the application
                     * as it is only the paging that is affected by this change,
                     * explicitly check for the Timestamp type, and retrieve the right 
                     * type from the ResultSet here, instead of altering TypeMap.typeClass().
                     */
                    if (type == Types.TIMESTAMP) {
                        keyValues.add(rs.getTimestamp(SOURCE_ADDRESS_KEY_COLUMN_BASE + i));
                    } else {
                        keyValues.add(rs.getDate(SOURCE_ADDRESS_KEY_COLUMN_BASE + i));
                    }
                } else if (c == Boolean.class) {
                    keyValues.add(rs.getBoolean(SOURCE_ADDRESS_KEY_COLUMN_BASE + i));
                } else {
                    keyValues.add(rs.getString(SOURCE_ADDRESS_KEY_COLUMN_BASE + i));
                }
            }

            String addressLine1 = rs.getString(INPUT_ADDRESS_LINE1);
            String addressLine2 = rs.getString(INPUT_ADDRESS_LINE2);
            String municipality = rs.getString(INPUT_MUNICIPALITY);
            String province = rs.getString(INPUT_PROVINCE);
            String country = rs.getString(INPUT_COUNTRY);
            String postalCode = rs.getString(INPUT_POSTAL_CODE);

            Address address = new Address();
            address.setCountry(rs.getString(OUTPUT_COUNTRY));
            String deliveryInstallName;
            try {
                deliveryInstallName = rs.getString(OUTPUT_DELIVERY_INSTALLATION_NAME);
            } catch (SQLException e) {
                deliveryInstallName = rs.getString(OLD_OUTPUT_DELIVERY_INSTALLATION_NAME);
            }
            address.setDeliveryInstallationName(deliveryInstallName);
            String deliveryInstallType;
            try {
                deliveryInstallType = rs.getString(OUTPUT_DELIVERY_INSTALLATION_TYPE);
            } catch (SQLException e) {
                deliveryInstallType = rs.getString(OLD_OUTPUT_DELIVERY_INSTALLATION_TYPE);
            }
            address.setDeliveryInstallationType(deliveryInstallType);
            address.setDirectionPrefix(rs.getBoolean(OUTPUT_DIRECTION_PREFIX));
            address.setFailedParsingString(rs.getString(OUTPUT_FAILED_PARSING_STRING));
            address.setGeneralDeliveryName(rs.getString(OUTPUT_GENERAL_DELIVERY_NAME));
            address.setLockBoxNumber(rs.getString(OUTPUT_LOCK_BOX_NUMBER));
            address.setLockBoxType(rs.getString(OUTPUT_LOCK_BOX_TYPE));
            address.setMunicipality(rs.getString(OUTPUT_MUNICIPALITY));
            address.setPostalCode(rs.getString(OUTPUT_POSTAL_CODE));
            address.setProvince(rs.getString(OUTPUT_PROVINCE));
            address.setRuralRouteNumber(rs.getString(OUTPUT_RURAL_ROUTE_NUMBER));
            address.setRuralRouteType(rs.getString(OUTPUT_RURAL_ROUTE_TYPE));
            address.setStreet(rs.getString(OUTPUT_STREET_NAME));
            address.setStreetDirection(rs.getString(OUTPUT_STREET_DIRECTION));
            address.setStreetNumberSuffix(rs.getString(OUTPUT_STREET_NUMBER_SUFFIX));
            String streetNumSuffix;
            try {
                streetNumSuffix = rs.getString(OUTPUT_STREET_NUMBER_SUFFIX_SEPARATE);
            } catch (SQLException e) {
                streetNumSuffix = rs.getString(OLD_OUTPUT_STREET_NUMBER_SUFFIX_SEPARATE);
            }
            address.setStreetNumberSuffix(streetNumSuffix);
            address.setStreetNumber(rs.getInt(OUTPUT_STREET_NUMBER));
            address.setStreetType(rs.getString(OUTPUT_STREET_TYPE));
            address.setStreetTypePrefix(rs.getBoolean(OUTPUT_STREET_TYPE_PREFIX));
            address.setSuite(rs.getString(OUTPUT_SUITE));
            address.setSuitePrefix(rs.getBoolean(OUTPUT_SUITE_PREFIX));
            address.setSuiteType(rs.getString(OUTPUT_SUITE_TYPE));
            String typeString = rs.getString(OUTPUT_TYPE);
            if (typeString != null) {
                address.setType(RecordType.valueOf(rs.getString(OUTPUT_TYPE)));
            }
            address.setUnparsedAddressLine1(rs.getString(OUTPUT_UNPARSED_ADDRESS));
            address.setUrbanBeforeRural(rs.getBoolean(OUTPUT_URBAN_BEFORE_RURAL));

            Boolean valid = rs.getBoolean(OUTPUT_VALID);

            AddressResult result = new AddressResult(keyValues, addressLine1, addressLine2, municipality,
                    province, postalCode, country, address, valid);
            result.markClean();

            addresses.put(keyValues, result);
            incrementProgress();
        }
        engineLogger.debug("Loaded " + addresses.size() + " addresses from the result table");
    } finally {
        setFinished(true);
        if (rs != null)
            rs.close();
        if (stmt != null)
            stmt.close();
        if (con != null)
            con.close();
    }
}

From source file:org.adempiere.apps.graph.ChartBuilder.java

private void addData(MChartDatasource ds) {

    String value = ds.getValueColumn();
    String category;/*  w ww  .  j av  a  2  s . co m*/
    String unit = "D";

    if (!chartModel.isTimeSeries())
        category = ds.getCategoryColumn();
    else {
        if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Week)) {
            unit = "W";
        } else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Month)) {
            unit = "MM";
        } else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Quarter)) {
            unit = "Q";
        } else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Year)) {
            unit = "Y";
        }

        category = " TRUNC(" + ds.getDateColumn() + ", '" + unit + "') ";
    }

    String series = DB.TO_STRING(ds.getName());
    boolean hasSeries = false;
    if (ds.getSeriesColumn() != null) {
        series = ds.getSeriesColumn();
        hasSeries = true;
    }

    String where = ds.getWhereClause();
    if (!Util.isEmpty(where)) {
        where = Env.parseContext(Env.getCtx(), chartModel.getWindowNo(), where, true);
    }

    boolean hasWhere = false;

    String sql = "SELECT " + value + ", " + category + ", " + series + " FROM " + ds.getFromClause();
    if (!Util.isEmpty(where)) {
        sql += " WHERE " + where;
        hasWhere = true;
    }

    Date currentDate = Env.getContextAsDate(Env.getCtx(), "#Date");
    Date startDate = null;
    Date endDate = null;

    int scope = chartModel.getTimeScope();
    int offset = ds.getTimeOffset();

    if (chartModel.isTimeSeries() && scope != 0) {
        offset += -scope;
        startDate = increment(currentDate, chartModel.getTimeUnit(), offset);
        endDate = increment(startDate, chartModel.getTimeUnit(), scope);
    }

    if (startDate != null && endDate != null) {
        sql += hasWhere ? " AND " : " WHERE ";
        sql += category + ">=TRUNC(" + DB.TO_DATE(new Timestamp(startDate.getTime())) + ", '" + unit
                + "') AND ";
        sql += category + "<=TRUNC(" + DB.TO_DATE(new Timestamp(endDate.getTime())) + ", '" + unit + "') ";
    }

    if (sql.indexOf('@') >= 0) {
        sql = Env.parseContext(Env.getCtx(), 0, sql, false, true);
    }

    MRole role = MRole.getDefault(Env.getCtx(), false);
    sql = role.addAccessSQL(sql, null, true, false);

    if (hasSeries)
        sql += " GROUP BY " + series + ", " + category + " ORDER BY " + series + ", " + category;
    else
        sql += " GROUP BY " + category + " ORDER BY " + category;

    log.log(Level.FINE, sql);

    PreparedStatement pstmt = null;
    ResultSet rs = null;
    TimeSeries tseries = null;
    Dataset dataset = getDataset();

    try {
        pstmt = DB.prepareStatement(sql, null);
        rs = pstmt.executeQuery();
        while (rs.next()) {

            String key = rs.getString(2);
            String seriesName = rs.getString(3);
            if (seriesName == null)
                seriesName = ds.getName();
            String queryWhere = "";
            if (hasWhere)
                queryWhere += where + " AND ";

            queryWhere += series + " = " + DB.TO_STRING(seriesName) + " AND " + category + " = ";

            if (chartModel.isTimeSeries() && dataset instanceof TimeSeriesCollection) {

                if (tseries == null || !tseries.getKey().equals(seriesName)) {
                    if (tseries != null)
                        ((TimeSeriesCollection) dataset).addSeries(tseries);

                    tseries = new TimeSeries(seriesName);
                }

                Date date = rs.getDate(2);
                RegularTimePeriod period = null;

                if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Day))
                    period = new Day(date);
                else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Week))
                    period = new Week(date);
                else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Month))
                    period = new Month(date);
                else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Quarter))
                    period = new Quarter(date);
                else if (chartModel.getTimeUnit().equals(MChart.TIMEUNIT_Year))
                    period = new Year(date);

                tseries.add(period, rs.getBigDecimal(1));
                key = period.toString();
                queryWhere += DB.TO_DATE(new Timestamp(date.getTime()));
            } else {
                queryWhere += DB.TO_STRING(key);
            }

            MQuery query = new MQuery(ds.getAD_Table_ID());
            String keyCol = MTable.get(Env.getCtx(), ds.getAD_Table_ID()).getKeyColumns()[0];
            String whereClause = keyCol + " IN (SELECT " + ds.getKeyColumn() + " FROM " + ds.getFromClause()
                    + " WHERE " + queryWhere + " )";
            query.addRestriction(whereClause.toString());
            query.setRecordCount(1);

            HashMap<String, MQuery> map = getQueries();

            if (dataset instanceof DefaultPieDataset) {
                ((DefaultPieDataset) dataset).setValue(key, rs.getBigDecimal(1));
                map.put(key, query);
            } else if (dataset instanceof DefaultCategoryDataset) {
                ((DefaultCategoryDataset) dataset).addValue(rs.getBigDecimal(1), seriesName, key);
                map.put(seriesName + "__" + key, query);
            } else if (dataset instanceof TimeSeriesCollection) {
                map.put(seriesName + "__" + key, query);
            }
        }
    } catch (SQLException e) {
        throw new DBException(e, sql);
    } finally {
        DB.close(rs, pstmt);
        rs = null;
        pstmt = null;
    }

    if (tseries != null)
        ((TimeSeriesCollection) dataset).addSeries(tseries);

}

From source file:org.apache.phoenix.end2end.index.IndexExpressionIT.java

private void helpTestUpdatableViewIndex(boolean local) throws Exception {
    Connection conn = DriverManager.getConnection(getUrl());
    try {//  ww  w .j a  v  a  2s . c  o  m
        String ddl = "CREATE TABLE t (k1 INTEGER NOT NULL, k2 INTEGER NOT NULL, k3 DECIMAL, s1 VARCHAR, s2 VARCHAR CONSTRAINT pk PRIMARY KEY (k1, k2, k3))";
        conn.createStatement().execute(ddl);
        ddl = "CREATE VIEW v AS SELECT * FROM t WHERE k1 = 1";
        conn.createStatement().execute(ddl);
        conn.createStatement().execute("UPSERT INTO v(k2,s1,s2,k3) VALUES(120,'foo0','bar0',50.0)");
        conn.createStatement().execute("UPSERT INTO v(k2,s1,s2,k3) VALUES(121,'foo1','bar1',51.0)");
        conn.commit();

        ResultSet rs;
        conn.createStatement()
                .execute("CREATE " + (local ? "LOCAL" : "") + " INDEX i1 on v(k1+k2+k3) include (s1, s2)");
        conn.createStatement().execute("UPSERT INTO v(k2,s1,s2,k3) VALUES(120,'foo2','bar2',50.0)");
        conn.commit();

        String query = "SELECT k1, k2, k3, s1, s2 FROM v WHERE    k1+k2+k3 = 173.0";
        rs = conn.createStatement().executeQuery("EXPLAIN " + query);
        String queryPlan = QueryUtil.getExplainPlan(rs);
        if (local) {
            assertEquals(
                    "CLIENT PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_T [-32768,173]\n" + "CLIENT MERGE SORT",
                    queryPlan);
        } else {
            assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER _IDX_T [" + Short.MIN_VALUE + ",173]",
                    queryPlan);
        }
        rs = conn.createStatement().executeQuery(query);
        assertTrue(rs.next());
        assertEquals(1, rs.getInt(1));
        assertEquals(121, rs.getInt(2));
        assertTrue(BigDecimal.valueOf(51.0).compareTo(rs.getBigDecimal(3)) == 0);
        assertEquals("foo1", rs.getString(4));
        assertEquals("bar1", rs.getString(5));
        assertFalse(rs.next());

        conn.createStatement().execute("CREATE " + (local ? "LOCAL" : "") + " INDEX i2 on v(s1||'_'||s2)");

        query = "SELECT k1, k2, s1||'_'||s2 FROM v WHERE (s1||'_'||s2)='foo2_bar2'";
        rs = conn.createStatement().executeQuery("EXPLAIN " + query);
        if (local) {
            assertEquals(
                    "CLIENT PARALLEL 1-WAY RANGE SCAN OVER _LOCAL_IDX_T [" + (Short.MIN_VALUE + 1)
                            + ",'foo2_bar2']\n" + "    SERVER FILTER BY FIRST KEY ONLY\n" + "CLIENT MERGE SORT",
                    QueryUtil.getExplainPlan(rs));
        } else {
            assertEquals("CLIENT PARALLEL 1-WAY RANGE SCAN OVER _IDX_T [" + (Short.MIN_VALUE + 1)
                    + ",'foo2_bar2']\n" + "    SERVER FILTER BY FIRST KEY ONLY", QueryUtil.getExplainPlan(rs));
        }
        rs = conn.createStatement().executeQuery(query);
        assertTrue(rs.next());
        assertEquals(1, rs.getInt(1));
        assertEquals(120, rs.getInt(2));
        assertEquals("foo2_bar2", rs.getString(3));
        assertFalse(rs.next());
    } finally {
        conn.close();
    }
}

From source file:org.springframework.jdbc.core.AbstractBeanPropertyRowMapper.java

protected Object doMapRow(ResultSet rs, int rowNumber) throws SQLException {
    if (getMappedClass() == null)
        throw new InvalidDataAccessApiUsageException("Target class was not specified - it is mandatory");
    Object result;/* w ww  .  ja  v  a  2  s. c  om*/
    try {
        result = this.defaultConstruct.newInstance((Object[]) null);
    } catch (IllegalAccessException e) {
        throw new DataAccessResourceFailureException("Failed to load class " + this.mappedClass.getName(), e);
    } catch (InvocationTargetException e) {
        throw new DataAccessResourceFailureException("Failed to load class " + this.mappedClass.getName(), e);
    } catch (InstantiationException e) {
        throw new DataAccessResourceFailureException("Failed to load class " + this.mappedClass.getName(), e);
    }
    ResultSetMetaData rsmd = rs.getMetaData();
    int columns = rsmd.getColumnCount();
    for (int i = 1; i <= columns; i++) {
        String column = JdbcUtils.lookupColumnName(rsmd, i).toLowerCase();
        PersistentField fieldMeta = (PersistentField) this.mappedFields.get(column);
        if (fieldMeta != null) {
            BeanWrapper bw = new BeanWrapperImpl(mappedClass);
            bw.setWrappedInstance(result);
            fieldMeta.setSqlType(rsmd.getColumnType(i));
            Object value = null;
            Class fieldType = fieldMeta.getJavaType();
            if (fieldType.equals(String.class)) {
                value = rs.getString(column);
            } else if (fieldType.equals(byte.class) || fieldType.equals(Byte.class)) {
                value = new Byte(rs.getByte(column));
            } else if (fieldType.equals(short.class) || fieldType.equals(Short.class)) {
                value = new Short(rs.getShort(column));
            } else if (fieldType.equals(int.class) || fieldType.equals(Integer.class)) {
                value = new Integer(rs.getInt(column));
            } else if (fieldType.equals(long.class) || fieldType.equals(Long.class)) {
                value = new Long(rs.getLong(column));
            } else if (fieldType.equals(float.class) || fieldType.equals(Float.class)) {
                value = new Float(rs.getFloat(column));
            } else if (fieldType.equals(double.class) || fieldType.equals(Double.class)) {
                value = new Double(rs.getDouble(column));
            } else if (fieldType.equals(BigDecimal.class)) {
                value = rs.getBigDecimal(column);
            } else if (fieldType.equals(boolean.class) || fieldType.equals(Boolean.class)) {
                value = (rs.getBoolean(column)) ? Boolean.TRUE : Boolean.FALSE;
            } else if (fieldType.equals(java.util.Date.class) || fieldType.equals(java.sql.Timestamp.class)
                    || fieldType.equals(java.sql.Time.class) || fieldType.equals(Number.class)) {
                value = JdbcUtils.getResultSetValue(rs, rs.findColumn(column));
            }
            if (value != null) {
                if (bw.isWritableProperty(fieldMeta.getFieldName())) {
                    try {
                        if (logger.isDebugEnabled() && rowNumber == 0) {
                            logger.debug("Mapping column named \"" + column + "\""
                                    + " containing values of SQL type " + fieldMeta.getSqlType()
                                    + " to property \"" + fieldMeta.getFieldName() + "\"" + " of type "
                                    + fieldMeta.getJavaType());
                        }
                        bw.setPropertyValue(fieldMeta.getFieldName(), value);
                    } catch (NotWritablePropertyException ex) {
                        throw new DataRetrievalFailureException(
                                "Unable to map column " + column + " to property " + fieldMeta.getFieldName(),
                                ex);
                    }
                } else {
                    if (rowNumber == 0) {
                        logger.warn("Unable to access the setter for " + fieldMeta.getFieldName()
                                + ".  Check that " + "set" + StringUtils.capitalize(fieldMeta.getFieldName())
                                + " is declared and has public access.");
                    }
                }
            }
        }
    }
    return result;
}

From source file:com.nextep.designer.sqlgen.oracle.impl.OracleCapturer.java

@Override
public Collection<ISequence> getSequences(ICaptureContext context, IProgressMonitor m) {
    Collection<ISequence> sequences = new ArrayList<ISequence>();
    final Connection conn = (Connection) context.getConnectionObject();
    final IProgressMonitor monitor = new CustomProgressMonitor(m, PROGRESS_RANGE);
    monitor.subTask("Retrieving sequences...");
    Statement stmt = null;/*from  www.  jav  a  2 s. c  o  m*/
    ResultSet rset = null;
    long start = 0;

    try {
        stmt = conn.createStatement();

        if (LOGGER.isDebugEnabled())
            start = System.currentTimeMillis();
        rset = stmt.executeQuery(
                "SELECT sequence_name, min_value, max_value, increment_by, cycle_flag, order_flag, cache_size, last_number " //$NON-NLS-1$
                        + "FROM user_sequences"); //$NON-NLS-1$
        if (LOGGER.isDebugEnabled())
            LOGGER.debug("[Sequences] query time: " + (System.currentTimeMillis() - start) + "ms"); //$NON-NLS-2$

        if (LOGGER.isDebugEnabled())
            start = System.currentTimeMillis();
        while (rset.next()) {
            monitor.worked(1);
            final String name = rset.getString(1);
            final BigDecimal min = rset.getBigDecimal(2);
            final BigDecimal max = rset.getBigDecimal(3);
            final Long inc = rset.getLong(4);
            final String cycle = rset.getString(5);
            final String order = rset.getString(6);
            final int cacheSize = rset.getInt(7);
            final BigDecimal seqStart = rset.getBigDecimal(8);

            IVersionable<ISequence> v = VersionableFactory.createVersionable(ISequence.class);
            ISequence seq = v.getVersionnedObject().getModel();
            seq.setName(name);
            seq.setMinValue(min);
            seq.setStart(seqStart);
            seq.setMaxValue(max);
            seq.setIncrement(inc);
            seq.setCycle("Y".equals(cycle)); //$NON-NLS-1$
            seq.setCached(cacheSize > 0);
            seq.setCacheSize(cacheSize);
            seq.setOrdered("Y".equals(order)); //$NON-NLS-1$
            sequences.add(seq);
        }
        if (LOGGER.isDebugEnabled())
            LOGGER.debug("[Sequences] fetching time: " + (System.currentTimeMillis() - start) + "ms"); //$NON-NLS-2$
    } catch (SQLException e) {
        LOGGER.warn(MessageFormat.format(SQLGenMessages.getString("capturer.error.genericCapturerError"), //$NON-NLS-1$
                context.getConnection().getDBVendor().toString()) + e.getMessage(), e);
    } finally {
        CaptureHelper.safeClose(rset, stmt);
    }

    return sequences;
}

From source file:com.alibaba.wasp.jdbc.TestJdbcResultSet.java

@Test
public void testDoubleFloat() throws SQLException, IOException {
    trace("test DOUBLE - FLOAT");

    ResultInHBasePrinter.printFMETA(TEST_UTIL.getConfiguration(), LOG);
    ResultInHBasePrinter.printMETA(TEST_UTIL.getConfiguration(), LOG);
    ResultSet rs;
    Object o;//from   w ww  . j  a va  2s. c o  m

    stat = conn.createStatement();
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(11, -1, -1, 2, 'testDoubleFloat')");
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(12,.0, .0, 2, 'testDoubleFloat')");
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(13, 1., 1., 2, 'testDoubleFloat')");
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(14, 12345678.89, 12345678.89, 2, 'testDoubleFloat')");
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(15, 99999999.99, 99999999.99, 2, 'testDoubleFloat')");
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(16, -99999999.99, -99999999.99, 2, 'testDoubleFloat')");
    stat.execute(
            "INSERT INTO test (column1,column5,column4,column2,column3) VALUES(17, -99999999.99, -99999999.99, 2, 'testDoubleFloat')");
    // stat.execute("INSERT INTO test (column1,column5,column4,column2,column3) VALUES(8, NULL, NULL, 2, 'testDoubleFloat')");

    rs = stat.executeQuery(
            "SELECT column1,column5,column4 FROM test where column3='testDoubleFloat'  ORDER BY column1");
    // assertResultSetMeta(rs, 3, new String[] { "ID", "D", "R" }, new int[] {
    // Types.INTEGER, Types.DOUBLE, Types.REAL }, new int[] { 10, 17, 7 },
    // new int[] { 0, 0, 0 });
    BigDecimal bd;
    rs.next();
    assertTrue(rs.getInt(1) == 11);
    assertTrue(!rs.wasNull());
    assertTrue(rs.getInt(2) == -1);
    assertTrue(rs.getInt(3) == -1);
    assertTrue(!rs.wasNull());
    bd = rs.getBigDecimal(2);
    assertTrue(bd.compareTo(new BigDecimal("-1.00")) == 0);
    assertTrue(!rs.wasNull());
    o = rs.getObject(2);
    trace(o.getClass().getName());
    assertTrue(o instanceof Double);
    assertTrue(((Double) o).compareTo(new Double("-1.00")) == 0);
    o = rs.getObject(3);
    trace(o.getClass().getName());
    assertTrue(o instanceof Float);
    assertTrue(((Float) o).compareTo(new Float("-1.00")) == 0);
    rs.next();
    assertTrue(rs.getInt(1) == 12);
    assertTrue(!rs.wasNull());
    assertTrue(rs.getInt(2) == 0);
    assertTrue(!rs.wasNull());
    assertTrue(rs.getInt(3) == 0);
    assertTrue(!rs.wasNull());
    bd = rs.getBigDecimal(2);
    assertTrue(bd.compareTo(new BigDecimal("0.00")) == 0);
    assertTrue(!rs.wasNull());
    bd = rs.getBigDecimal(3);
    assertTrue(bd.compareTo(new BigDecimal("0.00")) == 0);
    assertTrue(!rs.wasNull());
    rs.next();
    assertEquals(1.0, rs.getDouble(2));
    assertEquals(1.0f, rs.getFloat(3));
    rs.next();
    assertEquals(12345678.89, rs.getDouble(2));
    assertEquals(12345678.89f, rs.getFloat(3));
    rs.next();
    assertEquals(99999999.99, rs.getDouble(2));
    assertEquals(99999999.99f, rs.getFloat(3));
    rs.next();
    assertEquals(-99999999.99, rs.getDouble(2));
    assertEquals(-99999999.99f, rs.getFloat(3));
    // rs.next();
    // checkColumnBigDecimal(rs, 2, 0, null);
    // checkColumnBigDecimal(rs, 3, 0, null);
    // assertTrue(!rs.next());
    // stat.execute("DROP TABLE test");
}

From source file:org.latticesoft.util.resource.dao.Param.java

private Object readValue(ResultSet rs) throws SQLException {
    Object retVal = null;//w w  w.  j av  a2 s  . c  o  m
    switch (this.sqlType) {
    case Types.VARCHAR:
    case Types.CHAR:
        String s = null;
        if (this.getSqlIndex() == 0) {
            s = rs.getString(this.getSqlName());
        } else {
            s = rs.getString(this.getSqlIndex());
        }
        retVal = s;
        break;
    case Types.BOOLEAN:
        boolean b = false;
        if (this.getSqlIndex() == 0) {
            b = rs.getBoolean(this.getSqlName());
        } else {
            b = rs.getBoolean(this.getSqlIndex());
        }
        retVal = new Boolean(b);
        break;
    case Types.INTEGER:
        int i = 0;
        if (this.getSqlIndex() == 0) {
            i = rs.getInt(this.getSqlName());
        } else {
            i = rs.getInt(this.getSqlIndex());
        }
        retVal = new Integer(i);
        break;
    case Types.SMALLINT:
        short ss = 0;
        if (this.getSqlIndex() == 0) {
            ss = rs.getShort(this.getSqlName());
        } else {
            ss = rs.getShort(this.getSqlIndex());
        }
        retVal = new Short(ss);
        break;
    case Types.TINYINT:
        byte bb = 0;
        if (this.getSqlIndex() == 0) {
            bb = rs.getByte(this.getSqlName());
        } else {
            bb = rs.getByte(this.getSqlIndex());
        }
        retVal = new Byte(bb);
        break;
    case Types.BIGINT:
        long l = 0;
        if (this.getSqlIndex() == 0) {
            l = rs.getLong(this.getSqlName());
        } else {
            l = rs.getLong(this.getSqlIndex());
        }
        retVal = new Long(l);
        break;
    case Types.DOUBLE:
        double dd = 0;
        if (this.getSqlIndex() == 0) {
            dd = rs.getDouble(this.getSqlName());
        } else {
            dd = rs.getDouble(this.getSqlIndex());
        }
        retVal = new Double(dd);
        break;
    case Types.FLOAT:
        float f = 0;
        if (this.getSqlIndex() == 0) {
            f = rs.getFloat(this.getSqlName());
        } else {
            f = rs.getFloat(this.getSqlIndex());
        }
        retVal = new Float(f);
        break;
    case Types.NUMERIC:
        BigDecimal bd = null;
        if (this.getSqlIndex() == 0) {
            bd = rs.getBigDecimal(this.getSqlName());
        } else {
            bd = rs.getBigDecimal(this.getSqlIndex());
        }
        retVal = bd;
        break;
    case Types.TIMESTAMP:
        Timestamp ts = null;
        if (this.getSqlIndex() == 0) {
            ts = rs.getTimestamp(this.getSqlName());
        } else {
            ts = rs.getTimestamp(this.getSqlIndex());
        }
        retVal = ts;
        break;
    default:
        if (this.getSqlIndex() == 0) {
            retVal = rs.getObject(this.getSqlName());
        } else {
            retVal = rs.getObject(this.getSqlIndex());
        }
        break;
    }
    if (log.isDebugEnabled()) {
        log.debug(this.getAttribute() + "=" + retVal);
    }
    return retVal;
}

From source file:com.streamsets.pipeline.stage.origin.jdbc.cdc.oracle.OracleCDCSource.java

private void generateRecords(Offset startingOffset, PreparedStatement selectChanges) {
    // When this is called the first time, Logminer was started either from SCN or from a start date, so we just keep
    // track of the start date etc.
    LOG.info("Attempting to generate records");
    boolean error;
    StringBuilder query = new StringBuilder();
    BigDecimal lastCommitSCN = new BigDecimal(startingOffset.scn);
    int sequenceNumber = startingOffset.sequence;
    LocalDateTime startTime = adjustStartTime(startingOffset.timestamp);
    String lastTxnId = startingOffset.txnId;
    LocalDateTime endTime = getEndTimeForStartTime(startTime);
    ResultSet resultSet = null;
    while (!getContext().isStopped()) {
        error = false;//from w w w  .  jav  a  2 s .co  m
        generationStarted = true;
        try {
            recordQueue.put(new RecordOffset(dummyRecord,
                    new Offset(version, startTime, lastCommitSCN.toPlainString(), sequenceNumber, lastTxnId)));
            selectChanges = getSelectChangesStatement();
            if (!useLocalBuffering) {
                selectChanges.setBigDecimal(1, lastCommitSCN);
                selectChanges.setInt(2, sequenceNumber);
                selectChanges.setBigDecimal(3, lastCommitSCN);
                if (shouldTrackDDL) {
                    selectChanges.setBigDecimal(4, lastCommitSCN);
                }
            }
            selectChanges.setFetchSize(configBean.jdbcFetchSize);
            resultSet = selectChanges.executeQuery();
            while (resultSet.next() && !getContext().isStopped()) {
                String queryFragment = resultSet.getString(5);
                BigDecimal scnDecimal = resultSet.getBigDecimal(1);
                String scn = scnDecimal.toPlainString();
                String xidUsn = String.valueOf(resultSet.getLong(10));
                String xidSlt = String.valueOf(resultSet.getString(11));
                String xidSqn = String.valueOf(resultSet.getString(12));
                String xid = xidUsn + "." + xidSlt + "." + xidSqn;
                // Query Fragment is not null -> we need to process
                // Query Fragment is null AND the query string buffered from previous rows due to CSF == 0 is null,
                // nothing to do, go to next row
                // Query Fragment is null, but there is previously buffered data in the query, go ahead and process.
                if (queryFragment != null) {
                    query.append(queryFragment);
                } else if (queryFragment == null && query.length() == 0) {
                    LOG.debug(READ_NULL_QUERY_FROM_ORACLE, scn, xid);
                    continue;
                }

                // CSF is 1 if the query is incomplete, so read the next row before parsing
                // CSF being 0 means query is complete, generate the record
                if (resultSet.getInt(9) == 0) {
                    if (query.length() == 0) {
                        LOG.debug(READ_NULL_QUERY_FROM_ORACLE, scn, xid);
                        continue;
                    }
                    String queryString = query.toString();
                    query.setLength(0);
                    String username = resultSet.getString(2);
                    short op = resultSet.getShort(3);
                    String timestamp = resultSet.getString(4);
                    LocalDateTime tsDate = Timestamp.valueOf(timestamp).toLocalDateTime();
                    delay.getValue().put("delay", getDelay(tsDate));
                    String table = resultSet.getString(6);
                    BigDecimal commitSCN = resultSet.getBigDecimal(7);
                    int seq = resultSet.getInt(8);

                    String rsId = resultSet.getString(13);
                    Object ssn = resultSet.getObject(14);
                    String schema = String.valueOf(resultSet.getString(15));
                    int rollback = resultSet.getInt(16);
                    String rowId = resultSet.getString(17);
                    SchemaAndTable schemaAndTable = new SchemaAndTable(schema, table);
                    TransactionIdKey key = new TransactionIdKey(xid);
                    bufferedRecordsLock.lock();
                    try {
                        if (useLocalBuffering && bufferedRecords.containsKey(key) && bufferedRecords.get(key)
                                .contains(new RecordSequence(null, null, 0, 0, rsId, ssn, null))) {
                            continue;
                        }
                    } finally {
                        bufferedRecordsLock.unlock();
                    }
                    Offset offset = null;
                    if (LOG.isDebugEnabled()) {
                        LOG.debug(
                                "Commit SCN = {}, SCN = {}, Operation = {}, Txn Id = {}, Timestamp = {}, Row Id = {}, Redo SQL = {}",
                                commitSCN, scn, op, xid, tsDate, rowId, queryString);
                    }

                    if (op != DDL_CODE && op != COMMIT_CODE && op != ROLLBACK_CODE) {
                        if (!useLocalBuffering) {
                            offset = new Offset(version, tsDate, commitSCN.toPlainString(), seq, xid);
                        }
                        Map<String, String> attributes = new HashMap<>();
                        attributes.put(SCN, scn);
                        attributes.put(USER, username);
                        attributes.put(TIMESTAMP_HEADER, timestamp);
                        attributes.put(TABLE, table);
                        attributes.put(SEQ, String.valueOf(seq));
                        attributes.put(XID, xid);
                        attributes.put(RS_ID, rsId);
                        attributes.put(SSN, ssn.toString());
                        attributes.put(SCHEMA, schema);
                        attributes.put(ROLLBACK, String.valueOf(rollback));
                        attributes.put(ROWID_KEY, rowId);
                        if (!useLocalBuffering || getContext().isPreview()) {
                            if (commitSCN.compareTo(lastCommitSCN) < 0
                                    || (commitSCN.compareTo(lastCommitSCN) == 0 && seq < sequenceNumber)) {
                                continue;
                            }
                            lastCommitSCN = commitSCN;
                            sequenceNumber = seq;
                            if (configBean.keepOriginalQuery) {
                                attributes.put(QUERY_KEY, queryString);
                            }
                            try {
                                Record record = generateRecord(queryString, attributes, op);
                                if (record != null && record.getEscapedFieldPaths().size() > 0) {
                                    recordQueue.put(new RecordOffset(record, offset));
                                }
                            } catch (UnparseableSQLException ex) {
                                LOG.error("Parsing failed", ex);
                                unparseable.offer(queryString);
                            }
                        } else {
                            bufferedRecordsLock.lock();
                            try {
                                HashQueue<RecordSequence> records = bufferedRecords.computeIfAbsent(key, x -> {
                                    x.setTxnStartTime(tsDate);
                                    return createTransactionBuffer(key.txnId);
                                });

                                int nextSeq = records.isEmpty() ? 1 : records.tail().seq + 1;
                                RecordSequence node = new RecordSequence(attributes, queryString, nextSeq, op,
                                        rsId, ssn, tsDate);
                                records.add(node);
                            } finally {
                                bufferedRecordsLock.unlock();
                            }
                        }
                    } else if (!getContext().isPreview() && useLocalBuffering
                            && (op == COMMIT_CODE || op == ROLLBACK_CODE)) {
                        // so this commit was previously processed or it is a rollback, so don't care.
                        if (op == ROLLBACK_CODE || scnDecimal.compareTo(lastCommitSCN) < 0) {
                            bufferedRecordsLock.lock();
                            try {
                                bufferedRecords.remove(key);
                            } finally {
                                bufferedRecordsLock.unlock();
                            }
                        } else {
                            bufferedRecordsLock.lock();
                            try {
                                HashQueue<RecordSequence> records = bufferedRecords.getOrDefault(key,
                                        EMPTY_LINKED_HASHSET);
                                if (lastCommitSCN.equals(scnDecimal) && xid.equals(lastTxnId)) {
                                    removeProcessedRecords(records, sequenceNumber);
                                }
                                int bufferedRecordsToBeRemoved = records.size();
                                LOG.debug(FOUND_RECORDS_IN_TRANSACTION, bufferedRecordsToBeRemoved, xid);
                                lastCommitSCN = scnDecimal;
                                lastTxnId = xid;
                                sequenceNumber = addRecordsToQueue(tsDate, scn, xid);
                            } finally {
                                bufferedRecordsLock.unlock();
                            }
                        }
                    } else {
                        offset = new Offset(version, tsDate, scn, 0, xid);
                        boolean sendSchema = false;
                        // Commit/rollback in Preview will also end up here, so don't really do any of the following in preview
                        // Don't bother with DDL events here.
                        if (!getContext().isPreview()) {
                            // Event is sent on every DDL, but schema is not always sent.
                            // Schema sending logic:
                            // CREATE/ALTER: Schema is sent if the schema after the ALTER is newer than the cached schema
                            // (which we would have sent as an event earlier, at the last alter)
                            // DROP/TRUNCATE: Schema is not sent, since they don't change schema.
                            DDL_EVENT type = getDdlType(queryString);
                            if (type == DDL_EVENT.ALTER || type == DDL_EVENT.CREATE) {
                                sendSchema = refreshSchema(scnDecimal, new SchemaAndTable(schema, table));
                            }
                            recordQueue.put(new RecordOffset(createEventRecord(type, queryString,
                                    schemaAndTable, offset.toString(), sendSchema, timestamp), offset));
                        }
                    }
                    query.setLength(0);
                }
            }
        } catch (SQLException ex) {
            error = true;
            // force a restart from the same timestamp.
            if (ex.getErrorCode() == MISSING_LOG_FILE) {
                LOG.warn("SQL Exception while retrieving records", ex);
                addToStageExceptionsQueue(new StageException(JDBC_86, ex));
            } else if (ex.getErrorCode() != RESULTSET_CLOSED_AS_LOGMINER_SESSION_CLOSED) {
                LOG.warn("SQL Exception while retrieving records", ex);
            } else if (ex.getErrorCode() == QUERY_TIMEOUT) {
                LOG.warn("LogMiner select query timed out");
            } else if (ex.getErrorCode() == LOGMINER_START_MUST_BE_CALLED) {
                LOG.warn("Last LogMiner session did not start successfully. Will retry", ex);
            } else {
                LOG.error("Error while reading data", ex);
                addToStageExceptionsQueue(new StageException(JDBC_52, ex));
            }
        } catch (StageException e) {
            LOG.error("Error while reading data", e);
            error = true;
            addToStageExceptionsQueue(e);
        } catch (InterruptedException ex) {
            LOG.error("Interrupted while waiting to add data");
            Thread.currentThread().interrupt();
        } catch (Exception ex) {
            LOG.error("Error while reading data", ex);
            error = true;
            addToStageExceptionsQueue(new StageException(JDBC_52, ex));
        } finally {
            // If an incomplete batch is seen, it means we are going to move the window forward
            // Ending this session and starting a new one helps reduce PGA memory usage.
            try {
                if (resultSet != null && !resultSet.isClosed()) {
                    resultSet.close();
                }
                if (selectChanges != null && !selectChanges.isClosed()) {
                    selectChanges.close();
                }
            } catch (SQLException ex) {
                LOG.warn("Error while attempting to close SQL statements", ex);
            }
            try {
                endLogMnr.execute();
            } catch (SQLException ex) {
                LOG.warn("Error while trying to close logminer session", ex);
            }
            try {
                if (error) {
                    resetConnectionsQuietly();
                } else {
                    discardOldUncommitted(startTime);
                    startTime = adjustStartTime(endTime);
                    endTime = getEndTimeForStartTime(startTime);
                }
                startLogMinerUsingGivenDates(startTime.format(dateTimeColumnHandler.dateFormatter),
                        endTime.format(dateTimeColumnHandler.dateFormatter));
            } catch (SQLException ex) {
                LOG.error("Error while attempting to start LogMiner", ex);
                addToStageExceptionsQueue(new StageException(JDBC_52, ex));
            } catch (StageException ex) {
                LOG.error("Error while attempting to start logminer for redo log dictionary", ex);
                addToStageExceptionsQueue(ex);
            }
        }
    }
}

From source file:pt.ist.fenixedu.contracts.tasks.giafsync.ImportPersonFunctionsAccumulationsFromGiaf.java

@Override
public List<Modification> processChanges(GiafMetadata metadata, PrintWriter log, Logger logger)
        throws Exception {
    List<Modification> modifications = new ArrayList<>();
    PersistentSuportGiaf oracleConnection = PersistentSuportGiaf.getInstance();
    PreparedStatement preparedStatement = oracleConnection.prepareStatement(getQuery());
    ResultSet result = preparedStatement.executeQuery();
    int count = 0;
    int news = 0;
    int notImported = 0;
    int dontExist = 0;
    Set<Person> importedButInvalid = new HashSet<Person>();
    while (result.next()) {
        count++;//from  www  . j a  v a2  s  . c o m
        String numberString = result.getString("emp_num");
        Person person = metadata.getPerson(numberString, logger);
        if (person == null) {
            logger.debug("Invalid person with number: " + numberString);
            dontExist++;
            continue;
        }

        PersonProfessionalData personProfessionalData = person.getPersonProfessionalData();
        if (personProfessionalData == null) {
            logger.debug("Empty personProfessionalData: " + numberString);
            dontExist++;
            continue;
        }

        final GiafProfessionalData giafProfessionalData = personProfessionalData
                .getGiafProfessionalDataByGiafPersonIdentification(numberString);
        if (giafProfessionalData == null) {
            logger.debug("Empty giafProfessionalData: " + numberString);
            dontExist++;
            continue;
        }

        final String functionsAccumulationGiafId = result.getString("TIPO");
        final FunctionsAccumulation functionsAccumulation = metadata.accumulation(functionsAccumulationGiafId);
        if (functionsAccumulation == null) {
            logger.debug("Empty FunctionsAccumulation: " + functionsAccumulationGiafId + " for person number: "
                    + numberString);
            importedButInvalid.add(person);
        }
        String beginDateString = result.getString("DATA_INICIO");
        final LocalDate beginDate = StringUtils.isEmpty(beginDateString) ? null
                : new LocalDate(Timestamp.valueOf(beginDateString));
        if (beginDate == null) {
            logger.debug("Empty beginDate: " + numberString + " FunctionsAccumulation: "
                    + functionsAccumulationGiafId);
            importedButInvalid.add(person);
        }

        String endDateString = result.getString("DATA_FIM");
        final LocalDate endDate = StringUtils.isEmpty(endDateString) ? null
                : new LocalDate(Timestamp.valueOf(endDateString));
        if (beginDate != null && endDate != null) {
            if (beginDate != null && beginDate.isAfter(endDate)) {
                logger.debug("BeginDate after EndDate. Person number: " + numberString + ". begin: " + beginDate
                        + " end:" + endDate);
                importedButInvalid.add(person);
            }
        }
        final BigDecimal hours = result.getBigDecimal("horas_sem");

        final String professionalRegimeGiafId = result.getString("EMP_REGIME");
        final ProfessionalRegime professionalRegime = metadata.regime(professionalRegimeGiafId);
        if (professionalRegime == null) {
            logger.debug("Empty regime: " + numberString);
            importedButInvalid.add(person);
        }
        String creationDateString = result.getString("data_criacao");
        if (StringUtils.isEmpty(creationDateString)) {
            logger.debug("Empty creation Date. Person number: " + numberString + " FunctionsAccumulation: "
                    + functionsAccumulationGiafId);
            notImported++;
            continue;
        }
        final DateTime creationDate = new DateTime(Timestamp.valueOf(creationDateString));

        String modifiedDateString = result.getString("data_alteracao");
        final DateTime modifiedDate = StringUtils.isEmpty(modifiedDateString) ? null
                : new DateTime(Timestamp.valueOf(modifiedDateString));

        if (!hasPersonFunctionsAccumulations(giafProfessionalData, beginDate, endDate, hours,
                functionsAccumulation, functionsAccumulationGiafId, professionalRegime,
                professionalRegimeGiafId, creationDate, modifiedDate)) {
            modifications.add(new Modification() {
                @Override
                public void execute() {
                    new PersonFunctionsAccumulation(giafProfessionalData, beginDate, endDate, hours,
                            functionsAccumulation, functionsAccumulationGiafId, professionalRegime,
                            professionalRegimeGiafId, creationDate, modifiedDate);
                }
            });
            news++;
        }
    }
    result.close();
    preparedStatement.close();

    int deleted = 0;
    int totalInFenix = 0;
    int repeted = 0;

    for (GiafProfessionalData giafProfessionalData : Bennu.getInstance().getGiafProfessionalDataSet()) {
        for (final PersonFunctionsAccumulation personFunctionsAccumulation : giafProfessionalData
                .getPersonFunctionsAccumulationsSet()) {
            if (personFunctionsAccumulation.getAnulationDate() == null) {
                int countThisPersonFunctionsAccumulationOnGiaf = countThisPersonFunctionsAccumulationOnGiaf(
                        oracleConnection, personFunctionsAccumulation, logger);
                if (countThisPersonFunctionsAccumulationOnGiaf == 0) {
                    modifications.add(new Modification() {
                        @Override
                        public void execute() {
                            personFunctionsAccumulation.setAnulationDate(new DateTime());
                        }
                    });
                    deleted++;
                } else {
                    totalInFenix++;
                    if (countThisPersonFunctionsAccumulationOnGiaf > 1) {
                        repeted += countThisPersonFunctionsAccumulationOnGiaf - 1;
                    }
                }
            }
        }
    }

    oracleConnection.closeConnection();
    log.println("-- Function accumuations --");
    log.println("Total GIAF: " + count);
    log.println("New: " + news);
    log.println("Deleted: " + deleted);
    log.println("Not imported: " + notImported);
    log.println("Imported with errors: " + importedButInvalid.size());
    log.println("Repeted: " + repeted);
    log.println("Invalid persons: " + dontExist);
    log.println("Total Fnix: " + totalInFenix);
    log.println("Total Fnix without errors: " + (totalInFenix - importedButInvalid.size()));
    log.println("Missing in Fnix: " + (count - totalInFenix));
    return modifications;
}