Example usage for java.sql ResultSet getBytes

List of usage examples for java.sql ResultSet getBytes

Introduction

In this page you can find the example usage for java.sql ResultSet getBytes.

Prototype

byte[] getBytes(String columnLabel) throws SQLException;

Source Link

Document

Retrieves the value of the designated column in the current row of this ResultSet object as a byte array in the Java programming language.

Usage

From source file:wikipedia.sql.Links.java

/** Returns titles of destination (to) pages by id of source pages (pl_from), table pagelinks are used. 
 * SQL: SELECT pl_title FROM pagelinks WHERE pl_from IN (18991, 22233) AND pl_namespace = 0;
 *  @param namespace only pages with this namespace will be selected, value defined in PageTable.NS_MAIN, etc.
 *  Return empty array if pl_from={0};//from w  ww.  ja va2 s. c  om
 *
 * SELECT MAX(LENGTH(pl_title)) FROM pagelinks WHERE pl_namespace = 0;
 * ruwiki: 255, real application: 92,52
 *
 * Test size of max_titles_len
 * Robot=11651
 * Russina=8811
 * Todo replace titles ArrayList<String>() by huge static array StringBuffer[][256];
 */
public static String[] getTitleToByIDFrom(SessionHolder session, int[] pl_from, PageNamespace namespace) {
    if (null == pl_from || (1 == pl_from.length && 0 == pl_from[0])) {
        return NULL_STRING_ARRAY;
    }

    Statement s = null;
    ResultSet rs = null;
    List<String> titles = new ArrayList<String>();

    sb.setLength(0);
    sb.append("SELECT pl_title FROM pagelinks WHERE pl_from IN (");

    // Prepare SQL IN(...) via pl_from[].page_id
    for (int i = 0; i < pl_from.length - 1; i++) {
        sb.append(pl_from[i]);
        sb.append(",");
    }
    sb.append(pl_from[pl_from.length - 1]); // skip last comma
    sb.append(") AND pl_namespace=");
    sb.append(namespace.toInt());

    int size, i = 0;
    //String str_sql = null;
    try {
        s = session.connect.conn.createStatement();
        //str_sql = "SELECT pl_title FROM pagelinks WHERE " + sb.toString() + " AND pl_namespace="+namespace;

        //System.out.print("GetTitleToByIDFrom sql="+sb.toString());
        s.executeQuery(sb.toString());
        //GetTitleToByIDFromQuery(rs, s, sb);
        //System.out.println(" OK.");

        rs = s.getResultSet();

        while (rs.next()) {
            Encodings e = session.connect.enc;
            String db_str = Encodings.bytesTo(rs.getBytes("pl_title"), e.GetDBEnc());
            String utf8_str = e.EncodeFromDB(db_str);
            if (!session.skipTitle(utf8_str)) {
                titles.add(utf8_str);
                //titles.add(connect.enc.EncodeFromDB(rs.getString("pl_title")));
            }

            /*if(max_pl_title_len < utf8_str.length()) {
            max_pl_title_len = utf8_str.length();
            System.out.println("GetTitleToByIDFrom max_pl_title_len="+max_pl_title_len);
            }*/
        }
        /*if(max_titles_len < titles.size()) {
        max_titles_len = titles.size();
        System.out.println("GetTitleToByIDFrom max_titles_len="+max_titles_len);
        }*/

    } catch (SQLException ex) {
        System.err.println(
                "SQLException (Links.java GetTitleToByIDFrom): sql='" + sb.toString() + "' " + ex.getMessage());
    } finally {
        if (rs != null) {
            try {
                rs.close();
            } catch (SQLException sqlEx) {
            }
            rs = null;
        }
        if (s != null) {
            try {
                s.close();
            } catch (SQLException sqlEx) {
            }
            s = null;
        }
    }
    return (String[]) titles.toArray(NULL_STRING_ARRAY);
}

From source file:org.apache.hawq.pxf.plugins.jdbc.JdbcReadResolver.java

@Override
public List<OneField> getFields(OneRow row) throws Exception {
    ResultSet result = (ResultSet) row.getData();
    LinkedList<OneField> fields = new LinkedList<>();

    for (int i = 0; i < columns.size(); i++) {
        ColumnDescriptor column = columns.get(i);
        String colName = column.columnName();
        Object value = null;/*from  w  w w . j a  v a2  s .  co m*/

        OneField oneField = new OneField();
        oneField.type = column.columnTypeCode();

        switch (DataType.get(oneField.type)) {
        case INTEGER:
            value = result.getInt(colName);
            break;
        case FLOAT8:
            value = result.getDouble(colName);
            break;
        case REAL:
            value = result.getFloat(colName);
            break;
        case BIGINT:
            value = result.getLong(colName);
            break;
        case SMALLINT:
            value = result.getShort(colName);
            break;
        case BOOLEAN:
            value = result.getBoolean(colName);
            break;
        case BYTEA:
            value = result.getBytes(colName);
            break;
        case VARCHAR:
        case BPCHAR:
        case TEXT:
        case NUMERIC:
            value = result.getString(colName);
            break;
        case TIMESTAMP:
        case DATE:
            value = result.getDate(colName);
            break;
        default:
            throw new UnsupportedOperationException("Unknwon Field Type : "
                    + DataType.get(oneField.type).toString() + ", Column : " + column.toString());
        }
        oneField.val = value;
        fields.add(oneField);
    }
    return fields;
}

From source file:biblivre3.administration.ReportsDAO.java

public DelayedLendingsDto getLateReturnLendingsReportData() {
    DelayedLendingsDto dto = new DelayedLendingsDto();
    Connection con = null;// w w  w  .j a v a 2 s .  c  o  m
    try {
        con = getDataSource().getConnection();
        final String sql = "SELECT u.userid, u.username, l.return_date, b.record "
                + "FROM lending l, users u, cataloging_biblio b, cataloging_holdings h "
                + "WHERE l.return_date < to_date(?, 'DD-MM-YYYY') " + "AND l.user_serial = u.userid "
                + "AND l.holding_serial = h.holding_serial " + "AND h.record_serial = b.record_serial ";
        final PreparedStatement st = con.prepareStatement(sql);
        st.setString(1, DateUtils.dd_MM_yyyy.format(new Date()));

        final ResultSet rs = st.executeQuery();
        List<String[]> data = new ArrayList<String[]>();

        while (rs.next()) {
            String[] lending = new String[4];
            lending[0] = String.valueOf(rs.getInt("userid")); // matricula do usuario
            lending[1] = rs.getString("username"); //nome do usuario
            lending[2] = Indexer
                    .listOneTitle(MarcUtils.iso2709ToRecord(new String(rs.getBytes("record"), "UTF-8"))); //titulo
            lending[3] = DateUtils.dd_MM_yyyy.format(rs.getDate("return_date"));
            data.add(lending);
        }
        dto.setData(data);
    } catch (Exception e) {
        log.error(e.getMessage(), e);
        throw new ExceptionUser("ERROR_REPORT_DAO_EXCEPTION");
    } finally {
        closeConnection(con);
    }

    return dto;
}

From source file:com.mirth.connect.donkey.test.util.TestUtils.java

public static void assertAttachmentExists(String channelId, long messageId, Attachment attachment)
        throws SQLException {
    long localChannelId = ChannelController.getInstance().getLocalChannelId(channelId);
    Connection connection = null;
    PreparedStatement statement = null;
    ResultSet result = null;

    try {//from w  w w  .j av  a 2s.co  m
        connection = getConnection();
        statement = connection
                .prepareStatement("SELECT * FROM d_ma" + localChannelId + " WHERE message_id = ? AND id = ?");
        statement.setLong(1, messageId);
        statement.setString(2, attachment.getId());
        result = statement.executeQuery();
        assertTrue(result.next());
        byte[] content = result.getBytes("content");
        String type = result.getString("type");
        assertTrue(Arrays.equals(content, attachment.getContent()));
        assertTrue(testEquality(type, attachment.getType()));
    } finally {
        close(result);
        close(statement);
        close(connection);
    }
}

From source file:org.fao.geonet.arcgis.ArcSDEJdbcConnection.java

@Override
public Map<String, String> retrieveMetadata(AtomicBoolean cancelMonitor, String arcSDEVersion)
        throws Exception {
    Map<String, String> results = new HashMap<>();

    ArcSDEVersionFactory arcSDEVersionFactory = new ArcSDEVersionFactory();
    String metadataTable = arcSDEVersionFactory.getTableName(arcSDEVersion);
    String columnName = arcSDEVersionFactory.getMetadataColumnName(arcSDEVersion);

    String sqlQuery = "SELECT " + columnName + ", UUID FROM " + metadataTable;

    getJdbcTemplate().query(sqlQuery, new RowCallbackHandler() {
        @Override//from  w  w  w . ja  va 2  s  .c o m
        public void processRow(ResultSet rs) throws SQLException {
            // Cancel processing
            if (cancelMonitor.get()) {
                Log.warning(ARCSDE_LOG_MODULE_NAME,
                        "Cancelling metadata retrieve using " + "ArcSDE connection (via JDBC)");
                rs.getStatement().cancel();
                results.clear();
            }

            String document = "";
            int colId = rs.findColumn(columnName);
            int colIdUuid = rs.findColumn("UUID");
            // very simple type check:
            if (rs.getObject(colId) != null) {
                if (rs.getMetaData().getColumnType(colId) == Types.BLOB) {
                    Blob blob = rs.getBlob(columnName);
                    byte[] bdata = blob.getBytes(1, (int) blob.length());
                    document = new String(bdata);

                } else if (rs.getMetaData().getColumnType(colId) == Types.LONGVARBINARY) {
                    byte[] byteData = rs.getBytes(colId);
                    document = new String(byteData);

                } else if (rs.getMetaData().getColumnType(colId) == Types.LONGNVARCHAR
                        || rs.getMetaData().getColumnType(colId) == Types.LONGVARCHAR
                        || rs.getMetaData().getColumnType(colId) == Types.VARCHAR
                        || rs.getMetaData().getColumnType(colId) == Types.SQLXML) {
                    document = rs.getString(colId);

                } else {
                    throw new SQLException("Trying to harvest from a column with an invalid datatype: "
                            + rs.getMetaData().getColumnTypeName(colId));
                }

                String uuid = rs.getString(colIdUuid);
                ;
                results.put(uuid, document);
            }

        }
    });

    Log.info(ARCSDE_LOG_MODULE_NAME,
            "Finished retrieving metadata, found: #" + results.size() + " metadata records");

    return results;
}

From source file:org.sakaiproject.content.impl.serialize.impl.conversion.Type1BlobCollectionConversionHandler.java

public Object getValidateSource(String id, ResultSet rs) throws SQLException {
    ResultSetMetaData metadata = rs.getMetaData();
    byte[] rv = null;
    switch (metadata.getColumnType(1)) {
    case Types.BLOB:
        Blob blob = rs.getBlob(1);
        if (blob != null) {
            //System.out.println("getValidateSource(" + id + ") blob == " + blob + " blob.length == " + blob.length());
            rv = blob.getBytes(1L, (int) blob.length());
        } else {//from w  ww . ja va2  s  . c  o m
            System.out.println("getValidateSource(" + id + ") blob is null");
        }
        break;
    case Types.CLOB:
        Clob clob = rs.getClob(1);
        if (clob != null) {
            rv = clob.getSubString(1L, (int) clob.length()).getBytes();
        }
        break;
    case Types.CHAR:
    case Types.LONGVARCHAR:
    case Types.VARCHAR:
        rv = rs.getString(1).getBytes();
        break;
    case Types.BINARY:
    case Types.VARBINARY:
    case Types.LONGVARBINARY:
        rv = rs.getBytes(1);
        break;
    }
    //System.out.println("getValidateSource(" + id + ") \n" + rv + "\n");
    return rv;
}

From source file:org.sakaiproject.content.impl.serialize.impl.conversion.Type1BlobResourcesConversionHandler.java

public Object getValidateSource(String id, ResultSet rs) throws SQLException {
    ResultSetMetaData metadata = rs.getMetaData();
    byte[] rv = null;
    switch (metadata.getColumnType(1)) {
    case Types.BLOB:
        Blob blob = rs.getBlob(1);
        if (blob != null) {
            //System.out.println("getValidateSource(" + id + ") blob == " + blob + " blob.length == " + blob.length());
            rv = blob.getBytes(1L, (int) blob.length());
        } else {/*w w w.j  a v  a 2 s  .  c o  m*/
            System.out.println("getValidateSource(" + id + ") blob is null");
        }
        break;
    case Types.CLOB:
        Clob clob = rs.getClob(1);
        if (clob != null) {
            rv = clob.getSubString(1L, (int) clob.length()).getBytes();
        }
        break;
    case Types.CHAR:
    case Types.LONGVARCHAR:
    case Types.VARCHAR:
        rv = rs.getString(1).getBytes();
        break;
    case Types.BINARY:
    case Types.VARBINARY:
    case Types.LONGVARBINARY:
        rv = rs.getBytes(1);
        break;
    }
    // System.out.println("getValidateSource(" + id + ") \n" + rv + "\n");
    return rv;

    //return rs.getBytes(1);
}

From source file:org.apache.gora.sql.store.SqlStore.java

protected byte[] getBytes(ResultSet resultSet, int columnIndex, Schema schema, Column column)
        throws SQLException, IOException {
    switch (column.getJdbcType()) {
    case BLOB://ww w .  j  ava  2  s. c  o m
        Blob blob = resultSet.getBlob(columnIndex);
        return IOUtils.readFully(blob.getBinaryStream());
    case BINARY:
    case VARBINARY:
        return resultSet.getBytes(columnIndex);
    case LONGVARBINARY:
        return IOUtils.readFully(resultSet.getBinaryStream(columnIndex));
    }
    return null;
}

From source file:org.ojbc.adapters.rapbackdatastore.dao.RapbackDAOImpl.java

private CivilInitialResults buildCivilIntialResult(ResultSet rs) throws SQLException {
    CivilInitialResults civilInitialResults = new CivilInitialResults();
    civilInitialResults.setId(rs.getInt("civil_initial_result_id"));
    civilInitialResults.setTransactionNumber(rs.getString("transaction_number"));
    civilInitialResults.setResultsSender(ResultSender.values()[rs.getInt("results_sender_id") - 1]);
    try {//from w  w w. ja  v  a  2  s  . co  m
        civilInitialResults.setSearchResultFile(ZipUtils.unzip(rs.getBytes("search_result_file")));
    } catch (Exception e) {
        log.error("Got exception extracting the search result file for "
                + civilInitialResults.getTransactionNumber(), e);
    }
    civilInitialResults.setTimestamp(toDateTime(rs.getTimestamp("report_timestamp")));
    return civilInitialResults;
}

From source file:net.antidot.semantic.rdf.rdb2rdf.dm.core.DirectMappingEngineWD20120529.java

private Row extractRow(DriverType driver, StdHeader header, String tableName, ResultSet valueSet,
        String timeZone, int index) throws UnsupportedEncodingException {
    TreeMap<String, byte[]> values = new TreeMap<String, byte[]>();
    for (String columnName : header.getColumnNames()) {
        try {//w  w  w.j  a va2 s  . c  o m
            byte[] value = null;
            // SQLType type =
            // SQLType.toSQLType(Integer.valueOf(header.getDatatypes().get(columnName)));
            value = valueSet.getBytes(columnName);

            // http://bugs.mysql.com/bug.php?id=65943
            if (value != null && driver.equals(DriverType.MysqlDriver) && SQLType.toSQLType(
                    valueSet.getMetaData().getColumnType(valueSet.findColumn(columnName))) == SQLType.CHAR) {
                value = valueSet.getString(columnName).getBytes();
            }

            values.put(columnName, value);
        } catch (SQLException e) {
            log.error("[DirectMappingEngine:extractRow] SQL Error during row extraction");
            e.printStackTrace();
        }
    }
    Row row = new Row(values, null, index);
    return row;
}