List of usage examples for java.sql ResultSetMetaData getColumnType
int getColumnType(int column) throws SQLException;
From source file:org.dashbuilder.dataprovider.backend.sql.JDBCUtils.java
public static List<Column> getColumns(ResultSet resultSet, String[] exclude) throws SQLException { List<Column> columnList = new ArrayList<Column>(); List<String> columnExcluded = exclude == null ? new ArrayList<String>() : Arrays.asList(exclude); ResultSetMetaData meta = resultSet.getMetaData(); for (int i = 1; i <= meta.getColumnCount(); i++) { String name = meta.getColumnName(i); String alias = meta.getColumnLabel(i); if (!columnExcluded.contains(name) && !columnExcluded.contains(alias)) { ColumnType type = JDBCUtils.calculateType(meta.getColumnType(i)); int size = meta.getColumnDisplaySize(i); Column column = column(name, type, size).as(alias); columnList.add(column);/*ww w . ja va 2 s. c o m*/ } } return columnList; }
From source file:org.apache.jackrabbit.oak.plugins.document.rdb.RDBExport.java
private static void dumpJDBC(String url, String user, String pw, String table, String query, Format format, PrintStream out, List<String> fieldNames, RDBDocumentSerializer ser) throws SQLException { String driver = RDBJDBCTools.driverForDBType(RDBJDBCTools.jdbctype(url)); try {// w ww . j a va2 s .c om Class.forName(driver); } catch (ClassNotFoundException ex) { System.err.println( RDBExport.class.getName() + ":attempt to load class " + driver + " failed:" + ex.getMessage()); } Connection c = DriverManager.getConnection(url, user, pw); c.setReadOnly(true); Statement stmt = c.createStatement(); String sql = "select ID, MODIFIED, MODCOUNT, CMODCOUNT, HASBINARY, DELETEDONCE, DATA, BDATA from " + table; if (query != null) { sql += " where " + query; } sql += " order by id"; ResultSet rs = stmt.executeQuery(sql); if (format == Format.JSONARRAY) { out.println("["); } else if (format == Format.CSV) { out.println(dumpFieldNames(fieldNames)); } boolean needComma = format == Format.JSONARRAY; ResultSetMetaData rsm = null; boolean idIsAscii = true; while (rs.next()) { if (rsm == null) { rsm = rs.getMetaData(); idIsAscii = !isBinaryType(rsm.getColumnType(1)); } String id = idIsAscii ? rs.getString("ID") : new String(rs.getBytes("ID"), UTF8); long modified = rs.getLong("MODIFIED"); long modcount = rs.getLong("MODCOUNT"); long cmodcount = rs.getLong("CMODCOUNT"); long hasBinary = rs.getLong("HASBINARY"); long deletedOnce = rs.getLong("DELETEDONCE"); String data = rs.getString("DATA"); byte[] bdata = rs.getBytes("BDATA"); RDBRow row = new RDBRow(id, hasBinary == 1, deletedOnce == 1, modified, modcount, cmodcount, data, bdata); StringBuilder fulljson = dumpRow(ser, id, row); if (format == Format.CSV) { out.println(asCSV(fieldNames, fulljson)); } else { fulljson = asJSON(fieldNames, fulljson); if (format == Format.JSONARRAY && needComma && !rs.isLast()) { fulljson.append(","); } out.println(fulljson); needComma = true; } } if (format == Format.JSONARRAY) { out.println("]"); } out.close(); rs.close(); stmt.close(); c.close(); }
From source file:org.dashbuilder.dataprovider.sql.JDBCUtils.java
public static List<Column> getColumns(ResultSet resultSet, String[] exclude) throws SQLException { List<Column> columnList = new ArrayList<Column>(); List<String> columnExcluded = exclude == null ? new ArrayList<String>() : Arrays.asList(exclude); ResultSetMetaData meta = resultSet.getMetaData(); for (int i = 1; i <= meta.getColumnCount(); i++) { String name = meta.getColumnName(i); String alias = meta.getColumnLabel(i); if (alias != null && !alias.trim().isEmpty()) { name = alias.trim();//from w w w . j a v a 2s. co m } if (!columnExcluded.contains(name) && !columnExcluded.contains(alias)) { ColumnType type = JDBCUtils.calculateType(meta.getColumnType(i)); if (type != null) { int size = meta.getColumnDisplaySize(i); Column column = SQLFactory.column(name, type, size); columnList.add(column); } } } return columnList; }
From source file:funcoes.funcoes.java
@SuppressWarnings("rawtypes") public static Vector<Comparable> proximaLinha(ResultSet rs, ResultSetMetaData rsmd) throws SQLException { Vector<Comparable> LinhaAtual = new Vector<Comparable>(); try {//from w w w . j a v a 2 s .c om for (int i = 1; i <= rsmd.getColumnCount(); ++i) { switch (rsmd.getColumnType(i)) { case Types.VARCHAR: LinhaAtual.addElement(rs.getString(i)); break; case Types.TIMESTAMP: LinhaAtual.addElement(rs.getDate(i).toLocaleString().substring(0, 10)); break; case Types.INTEGER: LinhaAtual.addElement(rs.getInt(i)); break; case Types.DECIMAL: LinhaAtual.addElement(funcoes.paraFormatoDinheiro(rs.getDouble(i))); break; case Types.DOUBLE: LinhaAtual.addElement(funcoes.paraFormatoDinheiro(rs.getDouble(i))); break; } } } catch (SQLException e) { } return LinhaAtual; }
From source file:com.streamsets.pipeline.stage.BaseHiveIT.java
/** * Validate structure of the result set (column names and types). *///from w w w . j av a 2 s . c o m public static void assertResultSetStructure(ResultSet rs, Pair<String, Integer>... columns) throws Exception { ResultSetMetaData metaData = rs.getMetaData(); Assert.assertEquals(Utils.format("Unexpected number of columns"), columns.length, metaData.getColumnCount()); int i = 1; for (Pair<String, Integer> column : columns) { Assert.assertEquals(Utils.format("Unexpected name for column {}", i), column.getLeft(), metaData.getColumnName(i)); Assert.assertEquals(Utils.format("Unexpected type for column {}", i), (int) column.getRight(), metaData.getColumnType(i)); i++; } }
From source file:org.apache.nifi.util.hive.HiveJdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }//from w w w. j av a2 s . c o m for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == BLOB || javaSqlType == CLOB) { // bytes requires little bit different handling ByteBuffer bb = null; if (value instanceof byte[]) { bb = ByteBuffer.wrap((byte[]) value); } else if (value instanceof ByteBuffer) { bb = (ByteBuffer) value; } if (bb != null) { rec.put(i - 1, bb); } else { throw new IOException( "Could not process binary object of type " + value.getClass().getName()); } } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof BigDecimal || value instanceof BigInteger) { // Avro can't handle BigDecimal and BigInteger as numbers - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" rec.put(i - 1, value.toString()); } else if (value instanceof Number) { // Need to call the right getXYZ() method (instead of the getObject() method above), since Doubles are sometimes returned // when the JDBC type is 6 (Float) for example. if (javaSqlType == FLOAT) { value = rs.getFloat(i); } else if (javaSqlType == DOUBLE) { value = rs.getDouble(i); } else if (javaSqlType == INTEGER || javaSqlType == TINYINT || javaSqlType == SMALLINT) { value = rs.getInt(i); } rec.put(i - 1, value); } else if (value instanceof Boolean) { rec.put(i - 1, value); } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; } return nrOfRows; } }
From source file:org.apache.nifi.processors.standard.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }//www. j a v a 2s . c om for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" rec.put(i - 1, value.toString()); } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:be.dataminded.nifi.plugins.util.JdbcCommon.java
public static long convertToAvroStream(final ResultSet rs, final OutputStream outStream, String recordName, ResultSetRowCallback callback, final int maxRows, boolean convertNames) throws SQLException, IOException { final Schema schema = createSchema(rs, recordName, convertNames); final GenericRecord rec = new GenericData.Record(schema); final DatumWriter<GenericRecord> datumWriter = new GenericDatumWriter<>(schema); try (final DataFileWriter<GenericRecord> dataFileWriter = new DataFileWriter<>(datumWriter)) { dataFileWriter.create(schema, outStream); final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); long nrOfRows = 0; while (rs.next()) { if (callback != null) { callback.processRow(rs); }/*from ww w . jav a2 s . c o m*/ for (int i = 1; i <= nrOfColumns; i++) { final int javaSqlType = meta.getColumnType(i); // Need to handle CLOB and BLOB before getObject() is called, due to ResultSet's maximum portability statement if (javaSqlType == CLOB) { Clob clob = rs.getClob(i); if (clob != null) { long numChars = clob.length(); char[] buffer = new char[(int) numChars]; InputStream is = clob.getAsciiStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (char) c; c = is.read(); } rec.put(i - 1, new String(buffer)); clob.free(); } else { rec.put(i - 1, null); } continue; } if (javaSqlType == BLOB) { Blob blob = rs.getBlob(i); if (blob != null) { long numChars = blob.length(); byte[] buffer = new byte[(int) numChars]; InputStream is = blob.getBinaryStream(); int index = 0; int c = is.read(); while (c > 0) { buffer[index++] = (byte) c; c = is.read(); } ByteBuffer bb = ByteBuffer.wrap(buffer); rec.put(i - 1, bb); blob.free(); } else { rec.put(i - 1, null); } continue; } final Object value = rs.getObject(i); if (value == null) { rec.put(i - 1, null); } else if (javaSqlType == BINARY || javaSqlType == VARBINARY || javaSqlType == LONGVARBINARY || javaSqlType == ARRAY) { // bytes requires little bit different handling byte[] bytes = rs.getBytes(i); ByteBuffer bb = ByteBuffer.wrap(bytes); rec.put(i - 1, bb); } else if (value instanceof Byte) { // tinyint(1) type is returned by JDBC driver as java.sql.Types.TINYINT // But value is returned by JDBC as java.lang.Byte // (at least H2 JDBC works this way) // direct put to avro record results: // org.apache.avro.AvroRuntimeException: Unknown datum type java.lang.Byte rec.put(i - 1, ((Byte) value).intValue()); } else if (value instanceof Short) { //MS SQL returns TINYINT as a Java Short, which Avro doesn't understand. rec.put(i - 1, ((Short) value).intValue()); } else if (value instanceof BigDecimal) { // Avro can't handle BigDecimal as a number - it will throw an AvroRuntimeException such as: "Unknown datum type: java.math.BigDecimal: 38" try { int scale = meta.getScale(i); BigDecimal bigDecimal = ((BigDecimal) value); if (scale == 0) { if (meta.getPrecision(i) < 10) { rec.put(i - 1, bigDecimal.intValue()); } else { rec.put(i - 1, bigDecimal.longValue()); } } else { rec.put(i - 1, bigDecimal.doubleValue()); } } catch (Exception e) { rec.put(i - 1, value.toString()); } } else if (value instanceof BigInteger) { // Check the precision of the BIGINT. Some databases allow arbitrary precision (> 19), but Avro won't handle that. // It the SQL type is BIGINT and the precision is between 0 and 19 (inclusive); if so, the BigInteger is likely a // long (and the schema says it will be), so try to get its value as a long. // Otherwise, Avro can't handle BigInteger as a number - it will throw an AvroRuntimeException // such as: "Unknown datum type: java.math.BigInteger: 38". In this case the schema is expecting a string. if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { try { rec.put(i - 1, ((BigInteger) value).longValueExact()); } catch (ArithmeticException ae) { // Since the value won't fit in a long, convert it to a string rec.put(i - 1, value.toString()); } } } else { rec.put(i - 1, value.toString()); } } else if (value instanceof Number || value instanceof Boolean) { if (javaSqlType == BIGINT) { int precision = meta.getPrecision(i); if (precision < 0 || precision > MAX_DIGITS_IN_BIGINT) { rec.put(i - 1, value.toString()); } else { rec.put(i - 1, value); } } else { rec.put(i - 1, value); } } else { // The different types that we support are numbers (int, long, double, float), // as well as boolean values and Strings. Since Avro doesn't provide // timestamp types, we want to convert those to Strings. So we will cast anything other // than numbers or booleans to strings by using the toString() method. rec.put(i - 1, value.toString()); } } dataFileWriter.append(rec); nrOfRows += 1; if (maxRows > 0 && nrOfRows == maxRows) break; } return nrOfRows; } }
From source file:it.greenvulcano.gvesb.utils.ResultSetUtils.java
/** * Returns all values from the ResultSet as an XML. * For instance, if the ResultSet has 3 values, the returned XML will have following fields: * <RowSet> * <data> * <row> * <col>value1</col> * <col>value2</col> * <col>value3</col> * </row> * <row> * <col>value4</col> * <col>value5</col> * <col>value6</col> * </row> * ..// w w w. j a v a2 s .c o m * <row> * <col>valuex</col> * <col>valuey</col> * <col>valuez</col> * </row> * </data> * </RowSet> * @param rs * @return * @throws Exception */ public static Document getResultSetAsDOM(ResultSet rs) throws Exception { XMLUtils xml = XMLUtils.getParserInstance(); try { Document doc = xml.newDocument("RowSet"); Element docRoot = doc.getDocumentElement(); if (rs != null) { try { ResultSetMetaData metadata = rs.getMetaData(); Element data = null; Element row = null; Element col = null; Text text = null; String textVal = null; while (rs.next()) { boolean restartResultset = false; for (int j = 1; j <= metadata.getColumnCount() && !restartResultset; j++) { col = xml.createElement(doc, "col"); restartResultset = false; switch (metadata.getColumnType(j)) { case Types.CLOB: { Clob clob = rs.getClob(j); if (clob != null) { Reader is = clob.getCharacterStream(); StringWriter strW = new StringWriter(); IOUtils.copy(is, strW); is.close(); textVal = strW.toString(); } else { textVal = ""; } } break; case Types.BLOB: { Blob blob = rs.getBlob(j); if (blob != null) { InputStream is = blob.getBinaryStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); IOUtils.copy(is, baos); is.close(); try { byte[] buffer = Arrays.copyOf(baos.toByteArray(), (int) blob.length()); textVal = new String(Base64.getEncoder().encode(buffer)); } catch (SQLFeatureNotSupportedException exc) { textVal = new String(Base64.getEncoder().encode(baos.toByteArray())); } } else { textVal = ""; } } break; case -10: { // OracleTypes.CURSOR Object obj = rs.getObject(j); if (obj instanceof ResultSet) { rs = (ResultSet) obj; metadata = rs.getMetaData(); } restartResultset = true; } break; default: { textVal = rs.getString(j); if (textVal == null) { textVal = ""; } } } if (restartResultset) { continue; } if (row == null || j == 1) { row = xml.createElement(doc, "row"); } if (textVal != null) { text = doc.createTextNode(textVal); col.appendChild(text); } row.appendChild(col); } if (row != null) { if (data == null) { data = xml.createElement(doc, "data"); } data.appendChild(row); } } if (data != null) { docRoot.appendChild(data); } } finally { if (rs != null) { try { rs.close(); } catch (Exception exc) { // do nothing } rs = null; } } } return doc; } finally { XMLUtils.releaseParserInstance(xml); } }
From source file:org.georepublic.db.utils.ResultSetConverter.java
public static JSONArray convertGeoJson(ResultSet rs) throws SQLException, JSONException { JSONArray json = new JSONArray(); ResultSetMetaData rsmd = rs.getMetaData(); while (rs.next()) { int numColumns = rsmd.getColumnCount(); JSONObject obj = new JSONObject(); JSONObject feat = new JSONObject(); feat.put("type", "Feature"); for (int i = 1; i < numColumns + 1; i++) { String column_name = rsmd.getColumnName(i); if (StringUtils.equals(column_name, "the_geom")) { continue; }//from w ww. ja v a 2 s .c o m if (StringUtils.equals(column_name, "geojson")) { continue; } if (rsmd.getColumnType(i) == java.sql.Types.ARRAY) { obj.put(column_name, rs.getArray(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BIGINT) { obj.put(column_name, rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BOOLEAN) { obj.put(column_name, rs.getBoolean(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BLOB) { obj.put(column_name, rs.getBlob(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.DOUBLE) { obj.put(column_name, rs.getDouble(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.FLOAT) { obj.put(column_name, rs.getFloat(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.INTEGER) { obj.put(column_name, rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.NVARCHAR) { obj.put(column_name, rs.getNString(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.VARCHAR) { obj.put(column_name, rs.getString(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TINYINT) { obj.put(column_name, rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.SMALLINT) { obj.put(column_name, rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.DATE) { obj.put(column_name, rs.getDate(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TIMESTAMP) { obj.put(column_name, rs.getTimestamp(column_name)); } else { obj.put(column_name, rs.getObject(column_name)); } } feat.put("properties", obj); try { rs.findColumn("lon"); rs.findColumn("lat"); JSONObject geo = new JSONObject(); JSONArray coord = new JSONArray(); coord.put(rs.getDouble("lon")); coord.put(rs.getDouble("lat")); geo.put("type", "point"); geo.put("coordinates", coord); feat.put("geometry", geo); } catch (Exception ex1) { ; } json.put(feat); } return json; }