Example usage for java.sql ResultSetMetaData getSchemaName

List of usage examples for java.sql ResultSetMetaData getSchemaName

Introduction

In this page you can find the example usage for java.sql ResultSetMetaData getSchemaName.

Prototype

String getSchemaName(int column) throws SQLException;

Source Link

Document

Get the designated column's table's schema.

Usage

From source file:org.apache.hadoop.hive.jdbc.storagehandler.AtsdDBRecordReader.java

private ResultSet replaceDotsInColumnNames(ResultSet resultSet) throws SQLException {
    ResultSetMetaData metaData = resultSet.getMetaData();
    int columnCount = metaData.getColumnCount();

    if (columnCount > 0) {
        CachedRowSetImpl crs = new CachedRowSetImpl();
        crs.populate(resultSet);/*from   ww  w.  ja  va 2 s . c  om*/

        RowSetMetaDataImpl rwsm = new RowSetMetaDataImpl();

        rwsm.setColumnCount(columnCount);

        for (int i = 1; i <= columnCount; i++) {
            String columnName = metaData.getColumnName(i);
            if (columnName.contains(".")) {
                columnName = columnName.replaceAll("\\.", "\\$");
            }
            rwsm.setColumnName(i, columnName);
            rwsm.setColumnLabel(i, metaData.getColumnLabel(i));
            rwsm.setCatalogName(i, metaData.getCatalogName(i));
            rwsm.setColumnType(i, metaData.getColumnType(i));
            rwsm.setColumnTypeName(i, metaData.getColumnTypeName(i));
            rwsm.setSchemaName(i, metaData.getSchemaName(i));
            rwsm.setTableName(i, metaData.getTableName(i));
        }
        crs.setMetaData(rwsm);
        return crs;
    }
    return resultSet;
}

From source file:com.kylinolap.rest.service.QueryService.java

/**
 * @param sql//from   w ww.j ava2  s. c  om
 * @param project
 * @return
 * @throws Exception
 */
private SQLResponse execute(String sql, SQLRequest sqlRequest) throws Exception {
    Connection conn = null;
    Statement stat = null;
    ResultSet resultSet = null;
    List<List<String>> results = new LinkedList<List<String>>();
    List<SelectedColumnMeta> columnMetas = new LinkedList<SelectedColumnMeta>();

    try {
        conn = getOLAPDataSource(sqlRequest.getProject()).getConnection();

        if (sqlRequest instanceof PrepareSqlRequest) {
            PreparedStatement preparedState = conn.prepareStatement(sql);

            for (int i = 0; i < ((PrepareSqlRequest) sqlRequest).getParams().length; i++) {
                setParam(preparedState, i + 1, ((PrepareSqlRequest) sqlRequest).getParams()[i]);
            }

            resultSet = preparedState.executeQuery();
        } else {
            stat = conn.createStatement();
            resultSet = stat.executeQuery(sql);
        }

        ResultSetMetaData metaData = resultSet.getMetaData();
        int columnCount = metaData.getColumnCount();

        // Fill in selected column meta
        for (int i = 1; i <= columnCount; ++i) {
            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
                    metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i),
                    metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i),
                    metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i),
                    metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i),
                    metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i),
                    metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
        }

        List<String> oneRow = new LinkedList<String>();

        // fill in results
        while (resultSet.next()) {
            for (int i = 0; i < columnCount; i++) {
                oneRow.add((resultSet.getString(i + 1)));
            }

            results.add(new LinkedList<String>(oneRow));
            oneRow.clear();
        }
    } catch (Exception e) {
        logger.error(e.getLocalizedMessage(), e);
        throw e;
    } finally {
        close(resultSet, stat, conn);
    }

    boolean isPartialResult = false;
    String cube = "";
    long totalScanCount = 0;
    for (OLAPContext ctx : OLAPContext.getThreadLocalContexts()) {
        isPartialResult |= ctx.storageContext.isPartialResultReturned();
        cube = ctx.cubeInstance.getName();
        totalScanCount += ctx.storageContext.getTotalScanCount();
    }

    SQLResponse response = new SQLResponse(columnMetas, results, cube, 0, false, null, isPartialResult);
    response.setTotalScanCount(totalScanCount);

    return response;
}

From source file:de.innovationgate.webgate.api.jdbc.custom.JDBCSource.java

/**
 * @param resultSet//from w  w  w . ja  v a 2  s. c o m
 * @return
 */
private Map extractRowKey(ResultSet resultSet, String tableHint) throws SQLException {
    ResultSetMetaData rsMeta = resultSet.getMetaData();
    TableName tableName = new TableName(rsMeta.getCatalogName(1), rsMeta.getSchemaName(1),
            rsMeta.getTableName(1));
    String completeTableName = tableName.getCompleteName();
    if (completeTableName == null || completeTableName.trim().equals("")) {
        completeTableName = tableHint;
    }

    Map keys = new KeyMap();
    List keyColumns = (List) _tables.get(String.valueOf(completeTableName).toLowerCase());

    // If key columns are not retrievable, just return the empty map as key
    if (keyColumns == null) {
        return keys;
    }

    Iterator keyColumnsIt = keyColumns.iterator();
    while (keyColumnsIt.hasNext()) {
        String keyColumn = (String) keyColumnsIt.next();
        Object keyValue = resultSet.getObject(keyColumn);

        // Since the key columns from KeyMap originate from the _tables list, we should use the column names unmodified
        // keys.put(keyColumn.toLowerCase(), keyValue);
        keys.put(keyColumn, keyValue);
    }

    return keys;

}

From source file:kenh.xscript.database.beans.ResultSetBean.java

/**
 * Use result set to initial a bean.// w ww. j a  v a2 s.co m
 * 
 * @param rs
 * @param includeFieldName
 * @throws SQLException
 * @throws IllegalAccessException
 * @throws InstantiationException
 */
public ResultSetBean(ResultSet rs, boolean includeFieldName)
        throws SQLException, IllegalAccessException, InstantiationException {
    include_field_name = includeFieldName;

    LazyDynaClass beanClass = new LazyDynaClass();

    ResultSetMetaData m = rs.getMetaData();
    for (int i = 1; i <= m.getColumnCount(); i++) {
        Column c = new Column();

        try {
            c.catalogName = m.getCatalogName(i);
        } catch (SQLException e) {
        }
        try {
            c.className = m.getColumnClassName(i);
        } catch (SQLException e) {
        }
        try {
            c.displaySize = m.getColumnDisplaySize(i);
        } catch (SQLException e) {
        }
        try {
            c.label = m.getColumnLabel(i);
        } catch (SQLException e) {
        }
        try {
            c.name = m.getColumnName(i);
        } catch (SQLException e) {
        }
        try {
            c.type = m.getColumnType(i);
        } catch (SQLException e) {
        }
        try {
            c.typeName = m.getColumnTypeName(i);
        } catch (SQLException e) {
        }
        try {
            c.precision = m.getPrecision(i);
        } catch (SQLException e) {
        }
        try {
            c.scale = m.getScale(i);
        } catch (SQLException e) {
        }
        try {
            c.schemaName = m.getSchemaName(i);
        } catch (SQLException e) {
        }
        try {
            c.tableName = m.getTableName(i);
        } catch (SQLException e) {
        }

        beanClass.add(m.getColumnLabel(i).toLowerCase());
        beanClass.add("" + i);

        cols.add(c);
    }

    DynaBean colBean = beanClass.newInstance();
    int i = 1;
    for (Column col : cols) {
        String field = col.getLabel().toLowerCase();
        colBean.set(field, col.getLabel());
        colBean.set("" + i, col.getLabel());
        i++;
    }

    if (include_field_name)
        rows.add(colBean);

    while (rs.next()) {
        DynaBean bean = beanClass.newInstance();
        i = 1;
        for (Column c : cols) {
            String field = c.getLabel().toLowerCase();
            Object obj = rs.getObject(field);
            bean.set(field, obj);
            bean.set("" + i, obj);
            i++;
        }
        rows.add(bean);
    }

}

From source file:org.apache.kylin.rest.service.QueryService.java

/**
 * @param correctedSql/*from   w w w .  j  av  a2  s. com*/
 * @param sqlRequest
 * @return
 * @throws Exception
 */
private SQLResponse execute(String correctedSql, SQLRequest sqlRequest) throws Exception {
    Connection conn = null;
    Statement stat = null;
    ResultSet resultSet = null;
    Boolean isPushDown = false;

    List<List<String>> results = Lists.newArrayList();
    List<SelectedColumnMeta> columnMetas = Lists.newArrayList();

    try {
        conn = cacheService.getOLAPDataSource(sqlRequest.getProject()).getConnection();

        // special case for prepare query. 
        if (BackdoorToggles.getPrepareOnly()) {
            return getPrepareOnlySqlResponse(correctedSql, conn, isPushDown, results, columnMetas);
        }

        stat = conn.createStatement();
        processStatementAttr(stat, sqlRequest);
        resultSet = stat.executeQuery(correctedSql);

        ResultSetMetaData metaData = resultSet.getMetaData();
        int columnCount = metaData.getColumnCount();

        // Fill in selected column meta
        for (int i = 1; i <= columnCount; ++i) {
            columnMetas.add(new SelectedColumnMeta(metaData.isAutoIncrement(i), metaData.isCaseSensitive(i),
                    metaData.isSearchable(i), metaData.isCurrency(i), metaData.isNullable(i),
                    metaData.isSigned(i), metaData.getColumnDisplaySize(i), metaData.getColumnLabel(i),
                    metaData.getColumnName(i), metaData.getSchemaName(i), metaData.getCatalogName(i),
                    metaData.getTableName(i), metaData.getPrecision(i), metaData.getScale(i),
                    metaData.getColumnType(i), metaData.getColumnTypeName(i), metaData.isReadOnly(i),
                    metaData.isWritable(i), metaData.isDefinitelyWritable(i)));
        }

        // fill in results
        while (resultSet.next()) {
            List<String> oneRow = Lists.newArrayListWithCapacity(columnCount);
            for (int i = 0; i < columnCount; i++) {
                oneRow.add((resultSet.getString(i + 1)));
            }

            results.add(oneRow);
        }
    } catch (SQLException sqlException) {
        isPushDown = PushDownUtil.doPushDownQuery(sqlRequest.getProject(), correctedSql, results, columnMetas,
                sqlException);
    } finally {
        close(resultSet, stat, conn);
    }

    return getSqlResponse(isPushDown, results, columnMetas);
}

From source file:org.pentaho.reporting.engine.classic.core.modules.misc.datafactory.sql.ResultSetTableModelFactory.java

public static AttributeMap<Object> collectData(final ResultSetMetaData rsmd, final int column,
        final String name) throws SQLException {
    AttributeMap<Object> metaData = new AttributeMap<Object>();
    metaData.setAttribute(MetaAttributeNames.Core.NAMESPACE, MetaAttributeNames.Core.TYPE,
            TypeMapper.mapForColumn(rsmd, column));
    metaData.setAttribute(MetaAttributeNames.Core.NAMESPACE, MetaAttributeNames.Core.NAME, name);
    try {/*from ww w  .  ja  va2s  .  c o m*/
        if (rsmd.isCurrency(column + 1)) {
            metaData.setAttribute(MetaAttributeNames.Numeric.NAMESPACE, MetaAttributeNames.Numeric.CURRENCY,
                    Boolean.TRUE);
        } else {
            metaData.setAttribute(MetaAttributeNames.Numeric.NAMESPACE, MetaAttributeNames.Numeric.CURRENCY,
                    Boolean.FALSE);
        }
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#isCurrency. Driver does not implement the JDBC specs correctly. ",
                e);
    }
    try {

        if (rsmd.isSigned(column + 1)) {
            metaData.setAttribute(MetaAttributeNames.Numeric.NAMESPACE, MetaAttributeNames.Numeric.SIGNED,
                    Boolean.TRUE);
        } else {
            metaData.setAttribute(MetaAttributeNames.Numeric.NAMESPACE, MetaAttributeNames.Numeric.SIGNED,
                    Boolean.FALSE);
        }
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#isSigned. Driver does not implement the JDBC specs correctly. ", e);
    }

    try {
        final String tableName = rsmd.getTableName(column + 1);
        if (tableName != null) {
            metaData.setAttribute(MetaAttributeNames.Database.NAMESPACE, MetaAttributeNames.Database.TABLE,
                    tableName);
        }
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getTableName. Driver does not implement the JDBC specs correctly. ",
                e);
    }

    try {
        final String schemaName = rsmd.getSchemaName(column + 1);
        if (schemaName != null) {
            metaData.setAttribute(MetaAttributeNames.Database.NAMESPACE, MetaAttributeNames.Database.SCHEMA,
                    schemaName);
        }
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getSchemaName. Driver does not implement the JDBC specs correctly. ",
                e);
    }

    try {
        final String catalogName = rsmd.getCatalogName(column + 1);
        if (catalogName != null) {
            metaData.setAttribute(MetaAttributeNames.Database.NAMESPACE, MetaAttributeNames.Database.CATALOG,
                    catalogName);
        }
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getTableName. Driver does not implement the JDBC specs correctly. ",
                e);
    }

    try {
        final String label = rsmd.getColumnLabel(column + 1);
        if (label != null) {
            metaData.setAttribute(MetaAttributeNames.Formatting.NAMESPACE, MetaAttributeNames.Formatting.LABEL,
                    label);
        }
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getTableName. Driver does not implement the JDBC specs correctly. ",
                e);
    }

    try {
        final int displaySize = rsmd.getColumnDisplaySize(column + 1);
        metaData.setAttribute(MetaAttributeNames.Formatting.NAMESPACE,
                MetaAttributeNames.Formatting.DISPLAY_SIZE, IntegerCache.getInteger(displaySize));
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getTableName. Driver does not implement the JDBC specs correctly. ",
                e);
    }

    try {
        final int precision = rsmd.getPrecision(column + 1);
        metaData.setAttribute(MetaAttributeNames.Numeric.NAMESPACE, MetaAttributeNames.Numeric.PRECISION,
                IntegerCache.getInteger(precision));
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getTableName. Driver does not implement the JDBC specs correctly. ",
                e);
    }

    try {
        final int scale = rsmd.getScale(column + 1);
        metaData.setAttribute(MetaAttributeNames.Numeric.NAMESPACE, MetaAttributeNames.Numeric.SCALE,
                IntegerCache.getInteger(scale));
    } catch (SQLException e) {
        logger.debug(
                "Error on ResultSetMetaData#getTableName. Driver does not implement the JDBC specs correctly. ",
                e);
    }
    return metaData;
}

From source file:solidbase.core.plugins.DumpJSON.java

public boolean execute(CommandProcessor processor, Command command, boolean skip) throws SQLException {
    if (!triggerPattern.matcher(command.getCommand()).matches())
        return false;

    if (command.isTransient()) {
        /* DUMP JSON DATE_CREATED ON | OFF */

        SQLTokenizer tokenizer = new SQLTokenizer(
                SourceReaders.forString(command.getCommand(), command.getLocation()));

        // TODO Maybe DUMP JSON CONFIG or DUMP JSON SET
        // TODO What about other configuration settings?
        tokenizer.get("DUMP");
        tokenizer.get("JSON");
        tokenizer.get("DATE_CREATED"); // FIXME This should be CREATED_DATE
        Token t = tokenizer.get("ON", "OFF");
        tokenizer.get((String) null);

        // TODO I think we should have a scope that is restricted to the current file and a scope that gets inherited when running or including another file.
        AbstractScope scope = processor.getContext().getScope();
        scope.set("solidbase.dump_json.dateCreated", t.eq("ON")); // TODO Make this a constant

        return true;
    }//from   w w  w  .  j  av a 2  s.c om

    if (skip)
        return true;

    Parsed parsed = parse(command);

    AbstractScope scope = processor.getContext().getScope();
    Object object = scope.get("solidbase.dump_json.dateCreated");
    boolean dateCreated = object == null || object instanceof Boolean && (Boolean) object;

    Resource jsvResource = new FileResource(new File(parsed.fileName)); // Relative to current folder

    try {
        OutputStream out = jsvResource.getOutputStream();
        if (parsed.gzip)
            out = new BufferedOutputStream(new GZIPOutputStream(out, 65536), 65536); // TODO Ctrl-C, close the outputstream?

        JSONWriter jsonWriter = new JSONWriter(out);
        try {
            Statement statement = processor.createStatement();
            try {
                ResultSet result = statement.executeQuery(parsed.query);
                ResultSetMetaData metaData = result.getMetaData();

                // Define locals

                int columns = metaData.getColumnCount();
                int[] types = new int[columns];
                String[] names = new String[columns];
                boolean[] ignore = new boolean[columns];
                FileSpec[] fileSpecs = new FileSpec[columns];
                String schemaNames[] = new String[columns];
                String tableNames[] = new String[columns];

                // Analyze metadata

                for (int i = 0; i < columns; i++) {
                    int col = i + 1;
                    String name = metaData.getColumnName(col).toUpperCase();
                    types[i] = metaData.getColumnType(col);
                    if (types[i] == Types.DATE && parsed.dateAsTimestamp)
                        types[i] = Types.TIMESTAMP;
                    names[i] = name;
                    if (parsed.columns != null) {
                        ColumnSpec columnSpec = parsed.columns.get(name);
                        if (columnSpec != null)
                            if (columnSpec.skip)
                                ignore[i] = true;
                            else
                                fileSpecs[i] = columnSpec.toFile;
                    }
                    if (parsed.coalesce != null && parsed.coalesce.notFirst(name))
                        ignore[i] = true;
                    // TODO STRUCT serialize
                    // TODO This must be optional and not the default
                    else if (types[i] == 2002 || JDBCSupport.toTypeName(types[i]) == null)
                        ignore[i] = true;
                    tableNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getTableName(col), null));
                    schemaNames[i] = StringUtils
                            .upperCase(StringUtils.defaultIfEmpty(metaData.getSchemaName(col), null));
                }

                if (parsed.coalesce != null)
                    parsed.coalesce.bind(names);

                // Write header

                JSONObject properties = new JSONObject();
                properties.set("version", "1.0");
                properties.set("format", "record-stream");
                properties.set("description", "SolidBase JSON Data Dump File");
                properties.set("createdBy", new JSONObject("product", "SolidBase", "version", "2.0.0"));

                if (dateCreated) {
                    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
                    properties.set("createdDate", format.format(new Date()));
                }

                if (parsed.binaryFileName != null) {
                    // TODO FIXME Should be wrapped in a SourceException: solidbase.solidstack.io.FatalURISyntaxException: java.net.URISyntaxException: Illegal character in path at index 1: &{folder}/JIADHOCCH
                    Resource binResource = Resources.getResource(parsed.binaryFileName);
                    Resource resource = Resources.getResource(parsed.fileName);
                    properties.set("binaryFile", binResource.getPathFrom(resource).toString());
                }

                JSONArray fields = new JSONArray();
                properties.set("fields", fields);
                for (int i = 0; i < columns; i++)
                    if (!ignore[i]) {
                        JSONObject field = new JSONObject();
                        field.set("schemaName", schemaNames[i]);
                        field.set("tableName", tableNames[i]);
                        field.set("name", names[i]);
                        field.set("type", JDBCSupport.toTypeName(types[i])); // TODO Better error message when type is not recognized, for example Oracle's 2007 for a user type
                        FileSpec spec = fileSpecs[i];
                        if (spec != null && !spec.generator.isDynamic()) {
                            Resource fileResource = new FileResource(spec.generator.fileName);
                            field.set("file", fileResource.getPathFrom(jsvResource).toString());
                        }
                        fields.add(field);
                    }

                FileSpec binaryFile = parsed.binaryFileName != null
                        ? new FileSpec(true, parsed.binaryFileName, 0)
                        : null;

                jsonWriter.writeFormatted(properties, 120);
                jsonWriter.getWriter().write('\n');

                Counter counter = null;
                if (parsed.logRecords > 0)
                    counter = new FixedCounter(parsed.logRecords);
                else if (parsed.logSeconds > 0)
                    counter = new TimedCounter(parsed.logSeconds);

                try {
                    while (result.next()) {
                        Object[] values = new Object[columns];
                        for (int i = 0; i < values.length; i++)
                            values[i] = JDBCSupport.getValue(result, types, i);

                        if (parsed.coalesce != null)
                            parsed.coalesce.coalesce(values);

                        JSONArray array = new JSONArray();
                        for (int i = 0; i < columns; i++)
                            if (!ignore[i]) {
                                Object value = values[i];
                                if (value == null) {
                                    array.add(null);
                                    continue;
                                }

                                // TODO 2 columns can't be written to the same dynamic filename

                                FileSpec spec = fileSpecs[i];
                                if (spec != null) // The column is redirected to its own file
                                {
                                    String relFileName = null;
                                    int startIndex;
                                    if (spec.binary) {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.out == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.out = fileResource.getOutputStream();
                                        }
                                        if (value instanceof Blob) {
                                            InputStream in = ((Blob) value).getBinaryStream();
                                            startIndex = spec.index;
                                            byte[] buf = new byte[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.out.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else if (value instanceof byte[]) {
                                            startIndex = spec.index;
                                            spec.out.write((byte[]) value);
                                            spec.index += ((byte[]) value).length;
                                        } else
                                            throw new SourceException(names[i] + " ("
                                                    + value.getClass().getName()
                                                    + ") is not a binary column. Only binary columns like BLOB, RAW, BINARY VARYING can be written to a binary file",
                                                    command.getLocation());
                                        if (spec.generator.isDynamic()) {
                                            spec.out.close();
                                            JSONObject ref = new JSONObject();
                                            ref.set("file", relFileName);
                                            ref.set("size", spec.index - startIndex);
                                            array.add(ref);
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    } else {
                                        if (spec.generator.isDynamic()) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new DeferringWriter(spec.threshold, fileResource,
                                                    jsonWriter.getEncoding());
                                            spec.index = 0;
                                            relFileName = fileResource.getPathFrom(jsvResource).toString();
                                        } else if (spec.writer == null) {
                                            String fileName = spec.generator.generateFileName(result);
                                            Resource fileResource = new FileResource(fileName);
                                            spec.writer = new OutputStreamWriter(fileResource.getOutputStream(),
                                                    jsonWriter.getEncoding());
                                        }
                                        if (value instanceof Blob || value instanceof byte[])
                                            throw new SourceException(names[i]
                                                    + " is a binary column. Binary columns like BLOB, RAW, BINARY VARYING cannot be written to a text file",
                                                    command.getLocation());
                                        if (value instanceof Clob) {
                                            Reader in = ((Clob) value).getCharacterStream();
                                            startIndex = spec.index;
                                            char[] buf = new char[4096];
                                            for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                                spec.writer.write(buf, 0, read);
                                                spec.index += read;
                                            }
                                            in.close();
                                        } else {
                                            String val = value.toString();
                                            startIndex = spec.index;
                                            spec.writer.write(val);
                                            spec.index += val.length();
                                        }
                                        if (spec.generator.isDynamic()) {
                                            DeferringWriter writer = (DeferringWriter) spec.writer;
                                            if (writer.isBuffered())
                                                array.add(writer.clearBuffer());
                                            else {
                                                JSONObject ref = new JSONObject();
                                                ref.set("file", relFileName);
                                                ref.set("size", spec.index - startIndex);
                                                array.add(ref);
                                            }
                                            writer.close();
                                        } else {
                                            JSONObject ref = new JSONObject();
                                            ref.set("index", startIndex);
                                            ref.set("length", spec.index - startIndex);
                                            array.add(ref);
                                        }
                                    }
                                } else if (value instanceof Clob)
                                    array.add(((Clob) value).getCharacterStream());
                                else if (binaryFile != null
                                        && (value instanceof Blob || value instanceof byte[])) {
                                    if (binaryFile.out == null) {
                                        String fileName = binaryFile.generator.generateFileName(null);
                                        Resource fileResource = new FileResource(fileName);
                                        binaryFile.out = fileResource.getOutputStream();
                                        if (parsed.binaryGzip)
                                            binaryFile.out = new BufferedOutputStream(
                                                    new GZIPOutputStream(binaryFile.out, 65536), 65536); // TODO Ctrl-C, close the outputstream?
                                    }
                                    int startIndex = binaryFile.index;
                                    if (value instanceof Blob) {
                                        InputStream in = ((Blob) value).getBinaryStream();
                                        byte[] buf = new byte[4096];
                                        for (int read = in.read(buf); read >= 0; read = in.read(buf)) {
                                            binaryFile.out.write(buf, 0, read);
                                            binaryFile.index += read;
                                        }
                                        in.close();
                                    } else {
                                        binaryFile.out.write((byte[]) value);
                                        binaryFile.index += ((byte[]) value).length;
                                    }
                                    JSONObject ref = new JSONObject();
                                    ref.set("index", startIndex);
                                    ref.set("length", binaryFile.index - startIndex);
                                    array.add(ref);
                                } else
                                    array.add(value);
                            }

                        for (ListIterator<Object> i = array.iterator(); i.hasNext();) {
                            Object value = i.next();
                            if (value instanceof java.sql.Date || value instanceof java.sql.Time
                                    || value instanceof java.sql.Timestamp || value instanceof java.sql.RowId)
                                i.set(value.toString());
                        }
                        jsonWriter.write(array);
                        jsonWriter.getWriter().write('\n');

                        if (counter != null && counter.next())
                            processor.getProgressListener()
                                    .println("Exported " + counter.total() + " records.");
                    }
                    if (counter != null && counter.needFinal())
                        processor.getProgressListener().println("Exported " + counter.total() + " records.");
                } finally {
                    // Close files that have been left open
                    for (FileSpec fileSpec : fileSpecs)
                        if (fileSpec != null) {
                            if (fileSpec.out != null)
                                fileSpec.out.close();
                            if (fileSpec.writer != null)
                                fileSpec.writer.close();
                        }
                    if (binaryFile != null && binaryFile.out != null)
                        binaryFile.out.close();
                }
            } finally {
                processor.closeStatement(statement, true);
            }
        } finally {
            jsonWriter.close();
        }
    } catch (IOException e) {
        throw new SystemException(e);
    }

    return true;
}