List of usage examples for java.sql ResultSetMetaData getColumnType
int getColumnType(int column) throws SQLException;
From source file:org.moqui.impl.entity.EntityJavaUtil.java
public static Object getResultSetValue(ResultSet rs, int index, FieldInfo fi, EntityFacade efi) throws EntityException { if (fi.typeValue == -1) throw new EntityException("No typeValue found for " + fi.entityName + "." + fi.name); Object value = null;//from w ww . j a v a2 s . c om try { switch (fi.typeValue) { case 1: // getMetaData and the column type are somewhat slow (based on profiling), and String values are VERY // common, so only do for text-very-long if (fi.isTextVeryLong) { ResultSetMetaData rsmd = rs.getMetaData(); if (Types.CLOB == rsmd.getColumnType(index)) { // if the String is empty, try to get a text input stream, this is required for some databases // for larger fields, like CLOBs Clob valueClob = rs.getClob(index); Reader valueReader = null; if (valueClob != null) valueReader = valueClob.getCharacterStream(); if (valueReader != null) { // read up to 4096 at a time char[] inCharBuffer = new char[4096]; StringBuilder strBuf = new StringBuilder(); try { int charsRead; while ((charsRead = valueReader.read(inCharBuffer, 0, 4096)) > 0) { strBuf.append(inCharBuffer, 0, charsRead); } valueReader.close(); } catch (IOException e) { throw new EntityException("Error reading long character stream for field [" + fi.name + "] of entity [" + fi.entityName + "]", e); } value = strBuf.toString(); } } else { value = rs.getString(index); } } else { value = rs.getString(index); } break; case 2: try { value = rs.getTimestamp(index, efi.getCalendarForTzLc()); } catch (SQLException e) { if (logger.isTraceEnabled()) logger.trace( "Ignoring SQLException for getTimestamp(), leaving null (found this in MySQL with a date/time value of [0000-00-00 00:00:00]): " + e.toString()); } break; case 3: value = rs.getTime(index, efi.getCalendarForTzLc()); break; case 4: value = rs.getDate(index, efi.getCalendarForTzLc()); break; case 5: int intValue = rs.getInt(index); if (!rs.wasNull()) value = intValue; break; case 6: long longValue = rs.getLong(index); if (!rs.wasNull()) value = longValue; break; case 7: float floatValue = rs.getFloat(index); if (!rs.wasNull()) value = floatValue; break; case 8: double doubleValue = rs.getDouble(index); if (!rs.wasNull()) value = doubleValue; break; case 9: BigDecimal bigDecimalValue = rs.getBigDecimal(index); if (!rs.wasNull()) value = bigDecimalValue != null ? bigDecimalValue.stripTrailingZeros() : null; break; case 10: boolean booleanValue = rs.getBoolean(index); if (!rs.wasNull()) value = booleanValue; break; case 11: Object obj = null; byte[] originalBytes = rs.getBytes(index); InputStream binaryInput = null; if (originalBytes != null && originalBytes.length > 0) { binaryInput = new ByteArrayInputStream(originalBytes); } if (originalBytes != null && originalBytes.length <= 0) { logger.warn("Got byte array back empty for serialized Object with length [" + originalBytes.length + "] for field [" + fi.name + "] (" + index + ")"); } if (binaryInput != null) { ObjectInputStream inStream = null; try { inStream = new ObjectInputStream(binaryInput); obj = inStream.readObject(); } catch (IOException ex) { if (logger.isTraceEnabled()) logger.trace("Unable to read BLOB from input stream for field [" + fi.name + "] (" + index + "): " + ex.toString()); } catch (ClassNotFoundException ex) { if (logger.isTraceEnabled()) logger.trace("Class not found: Unable to cast BLOB data to an Java object for field [" + fi.name + "] (" + index + "); most likely because it is a straight byte[], so just using the raw bytes: " + ex.toString()); } finally { if (inStream != null) { try { inStream.close(); } catch (IOException e) { throw new EntityException("Unable to close binary input stream for field [" + fi.name + "] (" + index + "): " + e.toString(), e); } } } } if (obj != null) { value = obj; } else { value = originalBytes; } break; case 12: SerialBlob sblob = null; try { // NOTE: changed to try getBytes first because Derby blows up on getBlob and on then calling getBytes for the same field, complains about getting value twice byte[] fieldBytes = rs.getBytes(index); if (!rs.wasNull()) sblob = new SerialBlob(fieldBytes); // fieldBytes = theBlob != null ? theBlob.getBytes(1, (int) theBlob.length()) : null } catch (SQLException e) { if (logger.isTraceEnabled()) logger.trace("Ignoring exception trying getBytes(), trying getBlob(): " + e.toString()); Blob theBlob = rs.getBlob(index); if (!rs.wasNull()) sblob = new SerialBlob(theBlob); } value = sblob; break; case 13: value = new SerialClob(rs.getClob(index)); break; case 14: case 15: value = rs.getObject(index); break; } } catch (SQLException sqle) { logger.error("SQL Exception while getting value for field: [" + fi.name + "] (" + index + ")", sqle); throw new EntityException( "SQL Exception while getting value for field: [" + fi.name + "] (" + index + ")", sqle); } return value; }
From source file:com.bigdata.etl.util.DwUtil.java
public static void bulkInsert(String tableName, List<Map<String, String>> lst) { ResultSet rs = null;/*from w ww .j av a 2 s . c om*/ java.sql.Statement stmt = null; try (java.sql.Connection conn = DataSource.getConnection()) { stmt = conn.createStatement(); rs = stmt.executeQuery("select top 0 * from " + tableName); try (SQLServerBulkCopy bulk = new SQLServerBulkCopy(url + "user=" + user + ";password=" + password)) { SQLServerBulkCopyOptions sqlsbc = new SQLServerBulkCopyOptions(); sqlsbc.setBulkCopyTimeout(60 * 60 * 1000); bulk.setBulkCopyOptions(sqlsbc); bulk.setDestinationTableName(tableName); ResultSetMetaData rsmd = rs.getMetaData(); if (lst == null) { return; } // System.out.println(LocalTime.now() + " "+Thread.currentThread().getId()+" "+lst.size()); try (CachedRowSetImpl x = new CachedRowSetImpl()) { x.populate(rs); for (int k = 0; k < lst.size(); k++) { Map<String, String> map = lst.get(k); x.last(); x.moveToInsertRow(); for (int i = 1; i <= rsmd.getColumnCount(); i++) { String name = rsmd.getColumnName(i).toUpperCase(); int type = rsmd.getColumnType(i);//package java.sql.Type? try { switch (type) { case Types.VARCHAR: case Types.NVARCHAR: int len = rsmd.getColumnDisplaySize(i); String v = map.get(name); if (map.containsKey(name)) { x.updateString(i, v.length() > len ? v.substring(0, len) : v); } else { x.updateNull(i); } break; case Types.BIGINT: if (map.containsKey(name) && map.get(name).matches("\\d{1,}")) { x.updateLong(i, Long.valueOf(map.get(name))); } else { // x.updateLong(i, 0); x.updateNull(i); } break; case Types.FLOAT: if (map.containsKey(name) && map.get(name).matches("([+-]?)\\d*\\.\\d+$")) { x.updateFloat(i, Float.valueOf(map.get(name))); } else { x.updateNull(i); } break; case Types.DOUBLE: if (map.containsKey(name) && map.get(name).trim().length() > 0 && StringUtils.isNumeric(map.get(name))) { x.updateDouble(i, Double.valueOf(map.get(name))); } else { x.updateNull(i); } break; case Types.INTEGER: if (map.containsKey(name) && map.get(name).matches("\\d{1,}")) { x.updateInt(i, Integer.valueOf(map.get(name))); } else { x.updateNull(i); } break; default: throw new RuntimeException("? " + type); } /* if(map.containsKey("SYS_TELECOM")) System.err.println(map.get("SYS_TELECOM")); */ } catch (RuntimeException | SQLException e) { Logger.getLogger(DwUtil.class.getName()).log(Level.SEVERE, "? name=" + name + " v=" + map.get(name), e); } } x.insertRow(); x.moveToCurrentRow(); //x.acceptChanges(); } long start = System.currentTimeMillis(); bulk.writeToServer(x); long end = System.currentTimeMillis(); System.out.println(LocalTime.now() + " " + Thread.currentThread().getId() + " " + (end - start) + "ms" + " " + x.size()); } } } catch (SQLException e) { Logger.getLogger(DwUtil.class.getName()).log(Level.SEVERE, null, e); } finally { try { if (rs != null) { rs.close(); } if (stmt != null) { stmt.close(); } } catch (SQLException ex) { Logger.getLogger(DwUtil.class.getName()).log(Level.SEVERE, null, ex); } } }
From source file:com.thinkbiganalytics.util.JdbcCommon.java
/** * Examines the result set of a JDBC query and creates an Avro schema with appropriately mapped types to accept rows from the JDBC result. * * @param rs A result set used to obtain data type information * @return an instance of Avro Schema/*w w w. j a v a2 s. c o m*/ * @throws SQLException if errors occur while reading data from the database */ public static Schema createSchema(final ResultSet rs) throws SQLException { final ResultSetMetaData meta = rs.getMetaData(); final int nrOfColumns = meta.getColumnCount(); String tableName = ""; try { tableName = meta.getTableName(1); } catch (SQLException e) { } if (StringUtils.isBlank(tableName)) { tableName = "NiFi_ExecuteSQL_Record"; } final FieldAssembler<Schema> builder = SchemaBuilder.record(tableName).namespace("any.data").fields(); /** * Some missing Avro types - Decimal, Date types. May need some additional work. */ for (int i = 1; i <= nrOfColumns; i++) { switch (meta.getColumnType(i)) { case CHAR: case LONGNVARCHAR: case LONGVARCHAR: case NCHAR: case NVARCHAR: case VARCHAR: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().stringType() .endUnion().noDefault(); break; case BIT: case BOOLEAN: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().booleanType() .endUnion().noDefault(); break; case INTEGER: if (meta.isSigned(i)) { builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().intType() .endUnion().noDefault(); } else { builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().longType() .endUnion().noDefault(); } break; case SMALLINT: case TINYINT: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().intType() .endUnion().noDefault(); break; case BIGINT: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().longType() .endUnion().noDefault(); break; // java.sql.RowId is interface, is seems to be database // implementation specific, let's convert to String case ROWID: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().stringType() .endUnion().noDefault(); break; case FLOAT: case REAL: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().floatType() .endUnion().noDefault(); break; case DOUBLE: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().doubleType() .endUnion().noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DECIMAL: case NUMERIC: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().stringType() .endUnion().noDefault(); break; // Did not find direct suitable type, need to be clarified!!!! case DATE: case TIME: case TIMESTAMP: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().stringType() .endUnion().noDefault(); break; case BINARY: case VARBINARY: case LONGVARBINARY: case ARRAY: case BLOB: case CLOB: builder.name(meta.getColumnName(i)).type().unionOf().nullBuilder().endNull().and().bytesType() .endUnion().noDefault(); break; default: throw new IllegalArgumentException("createSchema: Unknown SQL type " + meta.getColumnType(i) + " cannot be converted to Avro type"); } } return builder.endRecord(); }
From source file:com.healthmarketscience.jackcess.util.ImportUtil.java
/** * Returns a List of Column instances converted from the given * ResultSetMetaData (this is the same method used by the various {@code * importResultSet()} methods).//from ww w .j ava 2 s. co m * * @return a List of Columns */ public static List<ColumnBuilder> toColumns(ResultSetMetaData md) throws SQLException { List<ColumnBuilder> columns = new LinkedList<ColumnBuilder>(); for (int i = 1; i <= md.getColumnCount(); i++) { ColumnBuilder column = new ColumnBuilder(md.getColumnName(i)).escapeName(); int lengthInUnits = md.getColumnDisplaySize(i); column.setSQLType(md.getColumnType(i), lengthInUnits); DataType type = column.getType(); // we check for isTrueVariableLength here to avoid setting the length // for a NUMERIC column, which pretends to be var-len, even though it // isn't if (type.isTrueVariableLength() && !type.isLongValue()) { column.setLengthInUnits((short) lengthInUnits); } if (type.getHasScalePrecision()) { int scale = md.getScale(i); int precision = md.getPrecision(i); if (type.isValidScale(scale)) { column.setScale((byte) scale); } if (type.isValidPrecision(precision)) { column.setPrecision((byte) precision); } } columns.add(column); } return columns; }
From source file:org.jabsorb.ext.DataList.java
/** * Build an array of ColumnMetaData object from a ResultSetMetaData object. * * @param rmd ResultSetMetaData to build ColumnMetaData from. * @return ColumnMetaData array or null if ResultSetMetaData is null. * * @throws SQLException if there is a problem processing the * ResultSetMetaData object./*ww w. j a v a 2s .c o m*/ */ public static ColumnMetaData[] buildColumnMetaDataFromResultSetMetaData(ResultSetMetaData rmd) throws SQLException { if (rmd == null) { return null; } int j = rmd.getColumnCount(); ColumnMetaData[] cmd = new ColumnMetaData[j]; for (int i = 1; i <= j; i++) { ColumnMetaData c = new ColumnMetaData(); c.setColumnName(rmd.getColumnName(i)); c.setCatalogName(rmd.getCatalogName(i)); c.setColumnClassName(rmd.getColumnClassName(i)); c.setColumnDisplaySize(rmd.getColumnDisplaySize(i)); c.setColumnLabel(rmd.getColumnLabel(i)); c.setColumnType(rmd.getColumnType(i)); c.setColumnTypeName(rmd.getColumnTypeName(i)); c.setPrecision(rmd.getPrecision(i)); c.setScale(rmd.getScale(i)); c.setSchemaName(rmd.getSchemaName(i)); c.setTableName(rmd.getTableName(i)); c.setAutoIncrement(rmd.isAutoIncrement(i)); c.setCaseSensitive(rmd.isCaseSensitive(i)); c.setCurrency(rmd.isCurrency(i)); c.setNullable(rmd.isNullable(i)); c.setReadOnly(rmd.isReadOnly(i)); c.setSearchable(rmd.isSearchable(i)); c.setSigned(rmd.isSigned(i)); c.setWritable(rmd.isWritable(i)); c.setDefinitelyWritable(rmd.isDefinitelyWritable(i)); cmd[i - 1] = c; } return cmd; }
From source file:com.espertech.esper.epl.db.DatabasePollingViewableFactory.java
private static Map<String, DBOutputTypeDesc> compileResultMetaData(ResultSetMetaData resultMetaData, ColumnSettings columnSettings) throws SQLException { Map<String, DBOutputTypeDesc> outputProperties = new HashMap<String, DBOutputTypeDesc>(); for (int i = 0; i < resultMetaData.getColumnCount(); i++) { String columnName = resultMetaData.getColumnLabel(i + 1); if (columnName == null) { columnName = resultMetaData.getColumnName(i + 1); }// ww w .ja va 2 s . c om int columnType = resultMetaData.getColumnType(i + 1); String javaClass = resultMetaData.getColumnTypeName(i + 1); ConfigurationDBRef.ColumnChangeCaseEnum caseEnum = columnSettings.getColumnCaseConversionEnum(); if ((caseEnum != null) && (caseEnum == ConfigurationDBRef.ColumnChangeCaseEnum.LOWERCASE)) { columnName = columnName.toLowerCase(); } if ((caseEnum != null) && (caseEnum == ConfigurationDBRef.ColumnChangeCaseEnum.UPPERCASE)) { columnName = columnName.toUpperCase(); } DatabaseTypeBinding binding = null; String javaTypeBinding = null; if (columnSettings.getJavaSqlTypeBinding() != null) { javaTypeBinding = columnSettings.getJavaSqlTypeBinding().get(columnType); } if (javaTypeBinding != null) { binding = DatabaseTypeEnum.getEnum(javaTypeBinding).getBinding(); } DBOutputTypeDesc outputType = new DBOutputTypeDesc(columnType, javaClass, binding); outputProperties.put(columnName, outputType); } return outputProperties; }
From source file:org.apache.calcite.avatica.jdbc.JdbcMeta.java
/** * Convert from JDBC metadata to Avatica columns. *//* ww w .ja v a2s . co m*/ protected static List<ColumnMetaData> columns(ResultSetMetaData metaData) throws SQLException { if (metaData == null) { return Collections.emptyList(); } final List<ColumnMetaData> columns = new ArrayList<>(); for (int i = 1; i <= metaData.getColumnCount(); i++) { final SqlType sqlType = SqlType.valueOf(metaData.getColumnType(i)); final ColumnMetaData.Rep rep = ColumnMetaData.Rep.of(sqlType.internal); ColumnMetaData.AvaticaType t = ColumnMetaData.scalar(metaData.getColumnType(i), metaData.getColumnTypeName(i), rep); ColumnMetaData md = new ColumnMetaData(i - 1, metaData.isAutoIncrement(i), metaData.isCaseSensitive(i), metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i), metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i), metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getPrecision(i), metaData.getScale(i), metaData.getTableName(i), metaData.getCatalogName(i), t, metaData.isReadOnly(i), metaData.isWritable(i), metaData.isDefinitelyWritable(i), metaData.getColumnClassName(i)); columns.add(md); } return columns; }
From source file:org.openmrs.contrib.databaseexporter.DatabaseExporter.java
public static void export(final Configuration configuration) throws Exception { FileOutputStream fos = null;//ww w.j ava 2 s . c o m Connection connection = null; try { connection = DbUtil.openConnection(configuration); if (configuration.getTargetDirectory() != null) { File f = new File(configuration.getTargetDirectory()); if (!f.exists()) { f.mkdirs(); } } File outputFile = configuration.getOutputFile(); fos = new FileOutputStream(outputFile); OutputStreamWriter osWriter = new OutputStreamWriter(fos, "UTF-8"); PrintWriter out = new PrintWriter(osWriter); final ExportContext context = new ExportContext(configuration, out); context.log("Context initialized"); DbUtil.writeExportHeader(context); try { List<RowFilter> rowFilters = new ArrayList<RowFilter>(); rowFilters.add(Util.nvl(configuration.getPatientFilter(), new PatientFilter())); rowFilters.add(Util.nvl(configuration.getUserFilter(), new UserFilter())); rowFilters.add(Util.nvl(configuration.getProviderFilter(), new ProviderFilter())); for (RowFilter filter : rowFilters) { context.log("Applying filter: " + filter.getClass().getSimpleName()); filter.filter(context); } for (RowFilter filter : rowFilters) { for (DependencyFilter df : filter.getDependencyFilters()) { df.filter(context); } } ListMap<String, RowTransform> tableTransforms = new ListMap<String, RowTransform>(true); for (final String table : context.getTableData().keySet()) { TableConfig tableConfig = context.getTableData().get(table); if (tableConfig.isExportSchema()) { DbUtil.writeTableSchema(table, context); context.log(table + " schema exported"); } if (tableConfig.isExportData()) { context.log("Starting " + table + " data export"); DbUtil.writeTableExportHeader(table, context); context.log("Constructing query"); String query = context.buildQuery(table, context); context.log("Determining applicable transforms for table"); for (RowFilter filter : rowFilters) { tableTransforms.putAll(table, filter.getTransforms()); } for (RowTransform transform : configuration.getRowTransforms()) { if (transform.canTransform(table, context)) { tableTransforms.putInList(table, transform); } } final List<RowTransform> transforms = Util.nvl(tableTransforms.get(table), new ArrayList<RowTransform>()); context.log("Determining number of rows for table"); String rowNumQuery = query.replace(table + ".*", "count(*)"); final Long totalRows = context.executeQuery(rowNumQuery, new ScalarHandler<Long>()); final int batchSize = context.getConfiguration().getBatchSize(); context.log("***************************** Executing query **************************"); context.log("Query: " + query); context.log("Transforms: " + transforms); context.log("Total Rows: " + totalRows); Integer rowsAdded = context.executeQuery(query, new ResultSetHandler<Integer>() { public Integer handle(ResultSet rs) throws SQLException { List<TableRow> results = new ArrayList<TableRow>(); ResultSetMetaData md = rs.getMetaData(); int numColumns = md.getColumnCount(); int rowsChecked = 0; int rowsAdded = 0; int rowIndex = 0; while (rs.next()) { rowsChecked++; TableRow row = new TableRow(table); for (int i = 1; i <= numColumns; i++) { String columnName = md.getColumnName(i); ColumnValue value = new ColumnValue(table, columnName, md.getColumnType(i), rs.getObject(i)); row.addColumnValue(columnName, value); } boolean includeRow = true; for (RowTransform transform : transforms) { includeRow = includeRow && transform.transformRow(row, context); } if (includeRow) { rowsAdded++; rowIndex = (rowIndex >= batchSize ? 0 : rowIndex) + 1; DbUtil.writeInsertRow(row, rowIndex, rowsAdded, context); } if (rowsChecked % 1000 == 0) { context.log("Processed " + table + " rows " + (rowsChecked - 1000) + " to " + rowsChecked + " (" + Util.toPercent(rowsChecked, totalRows, 0) + "%)"); } } return rowsAdded; } }); if (rowsAdded % batchSize != 0) { context.write(";"); context.write(""); } context.log(rowsAdded + " rows retrieved and transformed from initial queries"); context.log("********************************************************************"); DbUtil.writeTableExportFooter(table, context); } } // Handle any post-processing transforms that have been defined for (String table : tableTransforms.keySet()) { List<TableRow> rows = new ArrayList<TableRow>(); for (RowTransform transform : tableTransforms.get(table)) { rows.addAll(transform.postProcess(table, context)); } if (rows != null && !rows.isEmpty()) { DbUtil.writeTableExportHeader(table, context); for (int i = 1; i <= rows.size(); i++) { TableRow row = rows.get(i - 1); DbUtil.writeInsertRow(row, i, i, context); } context.write(";"); context.write(""); DbUtil.writeTableExportFooter(table, context); } } } catch (Exception e) { context.log("An error occurred during export: " + e.getMessage()); e.printStackTrace(System.out); } finally { context.log("Cleaning up temporary tables"); context.cleanupTemporaryTables(); } DbUtil.writeExportFooter(context); context.log("Exporting Database Completed"); context.log("***** Summary Data *****"); for (final String table : context.getTableData().keySet()) { TableConfig tableConfig = context.getTableData().get(table); context.log( tableConfig.getTableMetadata().getTableName() + ": " + tableConfig.getNumRowsExported()); } context.log("**************************"); context.log("Export completed in: " + Util.formatTimeDifference(context.getEventLog().getTotalTime())); out.flush(); } finally { IOUtils.closeQuietly(fos); DbUtil.closeConnection(connection); } }
From source file:com.webbfontaine.valuewebb.model.util.Utils.java
public static List<Object[]> transformToList(ResultSet rs) throws SQLException { ResultSetMetaData rsMetaData = rs.getMetaData(); int numberOfColumns = rsMetaData.getColumnCount(); List<Object[]> result = new ArrayList<>(numberOfColumns); while (rs.next()) { Object[] row = new Object[numberOfColumns]; for (int i = 0; i < numberOfColumns; i++) { if (rsMetaData.getColumnType(i + 1) == Types.TIMESTAMP) { row[i] = rs.getDate(i + 1); } else { row[i] = rs.getObject(i + 1); }/*from www . j a va2 s . c o m*/ } result.add(row); } return result; }
From source file:org.georepublic.db.utils.ResultSetConverter.java
public static StringBuffer convertCsv(ResultSet rs) throws SQLException { String column_name = new String(); StringBuffer retval = new StringBuffer(); ResultSetMetaData rsmd = rs.getMetaData(); int numColumns = rsmd.getColumnCount(); for (int h = 1; h < numColumns + 1; h++) { column_name = rsmd.getColumnName(h); if (h > 1) { retval.append(","); }/*from w w w . j ava 2 s . com*/ retval.append(column_name); } retval.append("\n"); while (rs.next()) { for (int i = 1; i < numColumns + 1; i++) { column_name = rsmd.getColumnName(i); if (StringUtils.equals(column_name, "the_geom")) { continue; } if (StringUtils.equals(column_name, "geojson")) { continue; } if (i > 1) { retval.append(","); } if (rsmd.getColumnType(i) == java.sql.Types.ARRAY) { retval.append(rs.getArray(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BIGINT) { retval.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BOOLEAN) { retval.append(rs.getBoolean(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.BLOB) { retval.append(rs.getBlob(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.DOUBLE) { retval.append(rs.getDouble(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.FLOAT) { retval.append(rs.getFloat(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.INTEGER) { retval.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.NVARCHAR) { retval.append(rs.getNString(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.VARCHAR) { retval.append(rs.getString(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TINYINT) { retval.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.SMALLINT) { retval.append(rs.getInt(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.DATE) { retval.append(rs.getDate(column_name)); } else if (rsmd.getColumnType(i) == java.sql.Types.TIMESTAMP) { retval.append(rs.getTimestamp(column_name)); } else { retval.append(rs.getObject(column_name)); } } retval.append("\n"); } return retval; }