Example usage for org.apache.commons.csv CSVPrinter flush

List of usage examples for org.apache.commons.csv CSVPrinter flush

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVPrinter flush.

Prototype

@Override
public void flush() throws IOException 

Source Link

Document

Flushes the underlying stream.

Usage

From source file:canreg.client.gui.analysis.FrequenciesByYearInternalFrame.java

@Action
public void saveTableAction() {
    LinkedList<String> filesCreated = new LinkedList<String>();
    if (!resultTable.isVisible()) {
        refresh();/*from w w  w.  j ava  2s. c  om*/
    }
    resultScrollPane.setVisible(false);
    Writer writer = null;
    try {
        String fileName = null;
        String pivotFileName = null;
        if (chooser == null) {
            String path = localSettings.getProperty(LocalSettings.TABLES_PATH_KEY);
            if (path == null) {
                chooser = new JFileChooser();
            } else {
                chooser = new JFileChooser(path);
            }
        }
        int returnVal = chooser.showSaveDialog(this);
        if (returnVal == JFileChooser.APPROVE_OPTION) {
            try {
                localSettings.setProperty(LocalSettings.TABLES_PATH_KEY,
                        chooser.getSelectedFile().getParentFile().getCanonicalPath());
                fileName = chooser.getSelectedFile().getAbsolutePath();
                pivotFileName = fileName + "-pivot.csv";
                // we force the .csv ending to the file
                if (!(fileName.endsWith(".csv") || fileName.endsWith(".CSV"))) {
                    fileName += ".csv";
                }
            } catch (IOException ex) {
                Logger.getLogger(TableBuilderInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
            }
        } else {
            // cancelled
            return;
        }
        writer = new FileWriter(fileName);
        // CSVWriter csvwriter = new CSVWriter(writer, ',');
        String[] headers = new String[resultTable.getColumnCount()];

        // Write the column names
        for (int j = 0; j < headers.length; j++) {
            headers[j] = resultTable.getColumnName(j);
        }
        CSVFormat format = CSVFormat.DEFAULT.withDelimiter(',').withHeader(headers);

        CSVPrinter csvPrinter = new CSVPrinter(writer, format);

        Object[] nextLine = new String[resultTable.getColumnCount()];

        // write the rows
        for (int i = 0; i < resultTable.getRowCount(); i++) {
            for (int j = 0; j < nextLine.length; j++) {
                nextLine[j] = resultTable.getValueAt(i, j).toString();
            }
            csvPrinter.printRecord(nextLine);
        }
        csvPrinter.flush();
        csvPrinter.close();

        // We need 3 columns to work with        
        if (headers.length == 3) {
            createPivot(pivotFileName);
            filesCreated.add(pivotFileName);
            JOptionPane.showMessageDialog(this,
                    "Table written to file: " + fileName + "\nPivot table written to:" + pivotFileName, "OK",
                    JOptionPane.INFORMATION_MESSAGE);
        } else {
            JOptionPane.showMessageDialog(this, "Table written to file: " + fileName, "OK",
                    JOptionPane.INFORMATION_MESSAGE);
        }
        filesCreated.add(fileName);

    } catch (IOException ex) {
        JOptionPane.showMessageDialog(this, "File NOT written.\n" + ex.getLocalizedMessage(), "ERROR",
                JOptionPane.ERROR_MESSAGE);
        Logger.getLogger(FrequenciesByYearInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (writer != null) {
                writer.close();
            }
            for (String fn : filesCreated) {
                Tools.openFile(fn);
            }
        } catch (IOException ex) {
            Logger.getLogger(FrequenciesByYearInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
        }
        resultScrollPane.setVisible(true);
    }
}

From source file:javalibs.CSVExtractor.java

/**
 * Writes the CSV to the path specified in the c'tor. Returns the absolute path to
 * the output CSV file//from   www.j ava 2 s .  co m
 * @return The absolute path to the output CSV file
 */
public String writeCSV() {
    BufferedWriter bw = null;
    CSVPrinter printer = null;

    try {
        bw = Files.newBufferedWriter(Paths.get(this.outCSV));
        printer = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader(this.orderedExtractionCols));
    } catch (IOException e) {
        log_.die(e);
    }

    log_.require(bw != null, "BufferedWriter cannot be null");
    log_.require(printer != null, "CSVPrinter cannot be null");

    for (CSVRecord rec : this.inRecords) {
        List<String> writerCells = new ArrayList<>();
        for (String col : this.headersInOrder) {
            if (!this.extractionCols.contains(col))
                continue;

            String colVal = null;
            try {
                colVal = rec.get(col);
            } catch (IllegalArgumentException e) {
                log_.err("Could not find column: " + col);
                log_.die(e);
            }
            writerCells.add(colVal);
        }
        try {
            printer.printRecord(writerCells.toArray());
        } catch (IOException e) {
            log_.die(e);
        }
    }
    try {
        printer.flush();
    } catch (IOException e) {
        log_.die(e);
    }

    return new File(this.outCSV).getAbsolutePath();
}

From source file:canreg.client.gui.analysis.FrequenciesByYearInternalFrame.java

public void createPivot(String fileName) {
    String[] columnNames = new String[resultTable.getColumnCount()];

    // We need 3 columns to work with        
    if (columnNames.length != 3) {
        return;/* ww  w.j  a v a 2 s .  c o  m*/
    }
    // TODO Extend this to at least 4
    String[] nextLine = new String[resultTable.getColumnCount()];

    // Find the column names
    for (int j = 0; j < columnNames.length; j++) {
        columnNames[j] = resultTable.getColumnName(j);
    }

    // variable we lock
    String lockVariable = columnNames[1];

    HashMap<String, HashMap> data = new HashMap<String, HashMap>();
    HashMap<String, String> years;
    Set<String> allYears = new TreeSet<String>();

    // load up the data
    for (int i = 0; i < resultTable.getRowCount(); i++) {
        String year, cases, code;

        for (int j = 0; j < nextLine.length; j++) {
            nextLine[j] = resultTable.getValueAt(i, j).toString();
        }
        year = nextLine[0];
        if (year.trim().length() == 0) {
            year = "MISSING";
        }

        allYears.add(year);

        code = nextLine[1];

        if (code.trim().length() == 0) {
            code = "MISSING";
        }

        cases = nextLine[2];

        years = data.get(code);
        if (years == null) {
            years = new HashMap<String, String>();
            data.put(code, years);
        }
        years.put(year, cases);
    }

    // find variables element for the lockedvariable
    DatabaseVariablesListElement lockedDatabaseVariablesListElement = null;
    for (DatabaseVariablesListElement vle : chosenVariables) {
        if (vle.getShortName().compareToIgnoreCase(lockVariable) == 0) {
            lockedDatabaseVariablesListElement = vle;
        }
    }

    if (lockedDatabaseVariablesListElement == null) {
        return;
    } // this happens if user has updated the selection of variables

    int dictionaryID = lockedDatabaseVariablesListElement.getDictionaryID();
    Dictionary dict = null;
    if (dictionaryID >= 0) {
        dict = dictionary.get(dictionaryID);
    }

    String[] allYearsArray = allYears.toArray(new String[0]);
    Writer writer = null;
    try {
        writer = new FileWriter(fileName);

        // Write the column names
        String[] codeArray = { lockedDatabaseVariablesListElement.getFullName(), lockVariable };

        String[] headers = (String[]) ArrayUtils.addAll(codeArray, allYearsArray);

        String[] codes = data.keySet().toArray(new String[0]);
        Arrays.sort(codes);

        CSVFormat format = CSVFormat.DEFAULT.withDelimiter(',').withHeader(headers);

        CSVPrinter csvPrinter = new CSVPrinter(writer, format);

        // write the rows    
        for (String code : codes) {
            LinkedList<String> row = new LinkedList<String>();
            if (dict != null) {
                DictionaryEntry dictionaryEntry = dict.getDictionaryEntry(code);
                if (dictionaryEntry != null) {
                    row.add(dictionaryEntry.getDescription());
                } else {
                    row.add("");
                }
            } else {
                row.add("");
            }
            row.add(code);

            years = data.get(code);
            for (String year : allYears) {
                String cell = years.get(year);
                if (cell == null) {
                    row.add("0");
                } else {
                    row.add(cell);
                }
            }
            csvPrinter.printRecord(row);
        }
        csvPrinter.flush();

    } catch (IOException ex) {
        // JOptionPane.showMessageDialog(this, "File NOT written.\n" + ex.getLocalizedMessage(), "ERROR", JOptionPane.ERROR_MESSAGE);
        Logger.getLogger(FrequenciesByYearInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
    } finally {
        try {
            if (writer != null) {
                writer.close();
            }
        } catch (IOException ex) {
            Logger.getLogger(FrequenciesByYearInternalFrame.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
}

From source file:de.speexx.jira.jan.command.transition.IssueTransitionFetcher.java

void exportAsCsv(final List<IssueInfo> issues, final AtomicBoolean doHeader) {
    try {/*ww  w.  j  av  a2  s  .co  m*/
        final CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8),
                RFC4180);

        final String[] header = new String[] { "issue-key", "type", "issue-creation-datetime", "priority",
                "resolution", "from-stage", "stage", "stage-enter-datetime", "stage-duration" };

        if (!doHeader.get()) {
            csvPrinter.printRecord((Object[]) header);
            doHeader.set(true);
        }

        issues.forEach(info -> {
            info.stageInfoAsDuration().forEach(stageDuration -> {

                final String[] values = new String[header.length];
                values[0] = info.key;
                values[1] = info.issueType;
                values[2] = DateTimeFormatter.ISO_DATE_TIME.format(info.created);
                values[3] = info.priority;
                values[4] = resolutionAdjustment(info);

                values[5] = stageDuration.fromStageName != null ? "" + stageDuration.fromStageName : "";
                values[6] = "" + stageDuration.stageName;
                values[7] = DateTimeFormatter.ISO_DATE_TIME.format(stageDuration.stageStart);
                values[8] = "" + stageDuration.getDurationSeconds();

                try {
                    csvPrinter.printRecord((Object[]) values);
                } catch (final IOException e) {
                    throw new JiraAnalyzeException(e);
                }
            });
        });
        csvPrinter.flush();

    } catch (final IOException e) {
        throw new JiraAnalyzeException(e);
    }
}

From source file:javalibs.CSVDataNormalizer.java

public void normalize() {
    BufferedWriter bw = null;//from  w ww.  j av  a  2 s  . co m
    CSVPrinter printer = null;

    try {
        bw = Files.newBufferedWriter(Paths.get(this.savePath));
        printer = new CSVPrinter(bw, CSVFormat.DEFAULT.withHeader(this.headersInOrder));
    } catch (IOException e) {
        log_.die(e);
    }

    for (CSVRecord rec : this.allRecords) {
        List<String> writerCells = new ArrayList<>();
        for (int i = 0; i < this.numCols; ++i) {
            String colName = this.colNumToName.get(i);
            if (columnsToNormalize.contains(colName)) {
                double curVal = NumUtils.getDoubleFromStr(rec.get(colName));
                Pair<Double, Double> maxMin = this.colsToMaxMinPairs.get(colName);
                double normal = NumUtils.normalizeBetweenZeroOne(maxMin.right(), maxMin.left(), curVal);
                if (normal > 1.0) {
                    log_.warn("Normalized value greater than 1.0: " + normal + " from curVal: " + curVal
                            + " setting normal to 1.");
                    normal = 1.0;
                } else if (normal < 0.0) {
                    log_.warn("Normalized value less than 0.0: " + normal + " from curVal : " + curVal
                            + " setting normal to 0.");
                    normal = 0.0;
                }

                writerCells.add(Double.toString(normal));
            } else
                writerCells.add(rec.get(i));
        }
        try {
            printer.printRecord(writerCells.toArray());
        } catch (IOException e) {
            log_.die(e);
        }
    }
    try {
        printer.flush();
    } catch (IOException e) {
        log_.die(e);
    }
}

From source file:de.speexx.jira.jan.command.issuequery.CsvCreator.java

void printIssueData(final IssueData issueData, final List<FieldNamePath> currentFieldNames,
        final List<FieldName> historyFieldNames, final TemporalChangeOutput temporalOutput) {
    assert !Objects.isNull(issueData);
    assert !Objects.isNull(currentFieldNames);
    assert !Objects.isNull(historyFieldNames);
    assert !Objects.isNull(temporalOutput);

    final List<String> currentFieldEntries = fetchCurrentFieldEntries(issueData, currentFieldNames);

    try {//www.  j a  v a  2 s.  c om
        final CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(System.out, StandardCharsets.UTF_8),
                RFC4180);

        if (issueData.getHistoricalCount() == 0) {
            final int fieldsPerChangeEntry = calculateHistoricalFieldSize(temporalOutput);
            final int max = historyFieldNames.size() * fieldsPerChangeEntry;
            final List<String> out = new ArrayList(currentFieldEntries);
            addEmptyChangeData(out, max);
            csvPrinter.printRecord(out);

        } else {
            final int fieldsPerChangeEntry = calculateHistoricalFieldSize(temporalOutput);
            final int historyFieldNamesSize = historyFieldNames.size();

            for (int idx = 0; idx < historyFieldNamesSize; idx++) {
                final FieldName fieldName = historyFieldNames.get(idx);

                final List<HistoricalDataEntry> historicalData = issueData.getHistoricalIssueData(fieldName);
                LocalDateTime lastChangeDate = issueData.getCreatedDate()
                        .orElseThrow(() -> new IllegalStateException("No createdDate available"));

                for (final HistoricalDataEntry entry : historicalData) {
                    final List<String> out = new ArrayList();
                    for (int i = 0; i < historyFieldNamesSize; i++) {
                        if (i != idx) {
                            addEmptyChangeData(out, fieldsPerChangeEntry);
                        } else {
                            lastChangeDate = addChangeData(out, entry, temporalOutput, lastChangeDate);
                        }
                    }
                    final List<String> outList = new ArrayList<>(currentFieldEntries);
                    outList.addAll(out);
                    csvPrinter.printRecord(outList.toArray());
                }
            }
        }

        csvPrinter.flush();
    } catch (final IOException e) {
        throw new JiraAnalyzeException(e);
    }
}

From source file:com.siemens.sw360.portal.portlets.admin.UserPortlet.java

public void backUpUsers(ResourceRequest request, ResourceResponse response)
        throws PortletException, IOException, SystemException, PortalException {
    List<User> liferayUsers;
    try {//from   w  w w . j a v a 2  s .  c o m
        liferayUsers = UserLocalServiceUtil.getUsers(QueryUtil.ALL_POS, QueryUtil.ALL_POS);
    } catch (SystemException e) {
        log.error("Could not get user List from liferay", e);
        liferayUsers = Collections.emptyList();
    }

    final ByteArrayOutputStream outB = new ByteArrayOutputStream();
    Writer out = new BufferedWriter(new OutputStreamWriter(outB));

    CSVPrinter csvPrinter = new CSVPrinter(out, CommonUtils.sw360CsvFormat);

    csvPrinter.printRecord("GivenName", "Lastname", "Email", "Department", "UserGroup", "GID", "isMale",
            "PasswdHash");
    for (User liferayUser : liferayUsers) {

        String firstName = liferayUser.getFirstName();
        String lastName = liferayUser.getLastName();
        String emailAddress = liferayUser.getEmailAddress();
        List<Organization> organizations = liferayUser.getOrganizations();

        String department = "";

        if (organizations != null && organizations.size() > 0) {
            department = organizations.get(0).getName();
        }

        String userGroup = "";

        List<Role> roles = liferayUser.getRoles();
        List<String> roleNames = new ArrayList<>();

        for (Role role : roles) {
            roleNames.add(role.getName());
        }

        for (UserGroup group : UserGroup.values()) {
            String roleConstantFromUserGroup = getRoleConstantFromUserGroup(group);
            if (roleNames.contains(roleConstantFromUserGroup)) {
                userGroup = group.toString();
                break;
            }
        }

        String gid = liferayUser.getOpenId();
        boolean isMale = liferayUser.isMale();
        String passwordHash = liferayUser.getPassword();
        if (isNullOrEmpty(emailAddress) || isNullOrEmpty(department))
            continue;
        csvPrinter.printRecord(firstName, lastName, emailAddress, department, userGroup, gid, isMale,
                passwordHash);
    }

    csvPrinter.flush();
    csvPrinter.close();

    ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(outB.toByteArray());
    PortletResponseUtil.sendFile(request, response, "Users.csv", byteArrayInputStream, "text/csv");
}

From source file:edu.harvard.mcz.imagecapture.RunnableJobReportDialog.java

protected void serializeTableModel() {
    PrintWriter out = null;/*from   w  w w  .j a v a 2  s .c  o m*/
    CSVPrinter writer = null;
    try {
        int cols = jTable.getModel().getColumnCount();
        CSVFormat csvFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL)
                .withHeaderComments(jTextArea.getText());
        TableModel model = jTable.getModel();
        switch (cols) {
        case 9:
            csvFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL)
                    .withHeader(model.getColumnName(0), model.getColumnName(1), model.getColumnName(2),
                            model.getColumnName(3), model.getColumnName(4), model.getColumnName(5),
                            model.getColumnName(6), model.getColumnName(7), model.getColumnName(8))
                    .withCommentMarker('*').withHeaderComments(jTextArea.getText());
            break;
        case 6:
            csvFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL)
                    .withHeader(model.getColumnName(0), model.getColumnName(1), model.getColumnName(2),
                            model.getColumnName(3), model.getColumnName(4), model.getColumnName(5))
                    .withCommentMarker('*').withHeaderComments(jTextArea.getText());
            break;
        case 5:
            csvFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL)
                    .withHeader(model.getColumnName(0), model.getColumnName(1), model.getColumnName(2),
                            model.getColumnName(3), model.getColumnName(4))
                    .withCommentMarker('*').withHeaderComments(jTextArea.getText());
            break;
        case 4:
            csvFormat = CSVFormat.DEFAULT.withQuoteMode(QuoteMode.ALL)
                    .withHeader(model.getColumnName(0), model.getColumnName(1), model.getColumnName(2),
                            model.getColumnName(3))
                    .withCommentMarker('*').withHeaderComments(jTextArea.getText());
            break;
        }

        log.debug(jTextArea.getText());
        log.debug(csvFormat.getHeaderComments());

        Date now = new Date();
        SimpleDateFormat dateFormat = new SimpleDateFormat("yyyymmdd_HHmmss");
        String time = dateFormat.format(now);
        String filename = "jobreport_" + time + ".csv";
        out = new PrintWriter(filename);

        writer = new CSVPrinter(out, csvFormat);
        writer.flush();

        int rows = jTable.getModel().getRowCount();
        for (int i = 0; i < rows; i++) {
            ArrayList<String> values = new ArrayList<String>();
            for (int col = 0; col < cols; col++) {
                values.add((String) jTable.getModel().getValueAt(i, col));
            }

            writer.printRecord(values);
        }
        writer.flush();
        writer.close();
        JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(),
                "Saved report to file: " + filename, "Report to CSV file", JOptionPane.OK_OPTION);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } finally {
        try {
            out.close();
        } catch (Exception e) {
        }
        try {
            writer.close();
        } catch (Exception e) {
        }

    }
}

From source file:ai.grakn.test.graql.analytics.ScalingTestIT.java

@Ignore
@Test//ww w  .j a  v  a  2  s .co  m
public void countIT() throws InterruptedException, ExecutionException, InvalidGraphException, IOException {
    CSVPrinter printer = createCSVPrinter("countIT.txt");

    // Insert super nodes into graph
    simpleOntology(keyspace);

    // get a count before adding any data
    Long emptyCount = Grakn.session(Grakn.DEFAULT_URI, keyspace).open(GraknTxType.WRITE).admin()
            .getTinkerTraversal().count().next();
    LOGGER.info("gremlin count before data is: " + emptyCount);

    Set<String> superNodes = makeSuperNodes(keyspace);

    int previousGraphSize = 0;
    for (int graphSize : graphSizes) {
        LOGGER.info("current scale - super " + NUM_SUPER_NODES + " - nodes " + graphSize);
        Long conceptCount = (long) (NUM_SUPER_NODES * (graphSize + 1) + graphSize);
        printer.print(String.valueOf(conceptCount));

        LOGGER.info("start generate graph " + System.currentTimeMillis() / 1000L + "s");
        addNodesToSuperNodes(keyspace, superNodes, previousGraphSize, graphSize);
        previousGraphSize = graphSize;
        LOGGER.info("stop generate graph " + System.currentTimeMillis() / 1000L + "s");

        Long gremlinCount = (long) (NUM_SUPER_NODES * (3 * graphSize + 1) + graphSize);
        LOGGER.info("gremlin count is: " + Grakn.session(Grakn.DEFAULT_URI, keyspace).open(GraknTxType.WRITE)
                .admin().getTinkerTraversal().count().next());
        gremlinCount += emptyCount;
        LOGGER.info("expected gremlin count is: " + gremlinCount);

        for (int workerNumber : workerNumbers) {
            LOGGER.info("Setting number of workers to: " + workerNumber);

            Long countTime = 0L;

            for (int i = 0; i < REPEAT; i++) {
                LOGGER.info("repeat number: " + i);
                Long startTime = System.currentTimeMillis();
                Long count = getCountQuery(Grakn.DEFAULT_URI, keyspace, workerNumber).execute();
                assertEquals(conceptCount, count);
                LOGGER.info("count: " + count);
                Long stopTime = System.currentTimeMillis();
                countTime += stopTime - startTime;
                LOGGER.info("count time: " + countTime / ((i + 1) * 1000));
            }

            countTime /= REPEAT * 1000;
            LOGGER.info("time to count: " + countTime);
            printer.print(String.valueOf(countTime));
        }
        printer.println();
        printer.flush();
    }

    printer.flush();
    printer.close();
}

From source file:com.gs.obevo.db.apps.reveng.CsvStaticDataWriter.java

private void writeTable(DbPlatform dbtype, PhysicalSchema schema, String tableName, File directory,
        MutableSet<String> updateTimeColumns, final CSVFormat csvFormat) {
    directory.mkdirs();/* w w w.ja v  a  2s  .  c o  m*/
    DaTable table = this.metadataManager.getTableInfo(schema.getPhysicalName(), tableName,
            new DaSchemaInfoLevel().setRetrieveTableColumns(true));
    if (table == null) {
        System.out.println("No data found for table " + tableName);
        return;
    }
    MutableList<String> columnNames = table.getColumns().collect(DaNamedObject.TO_NAME).toList();
    final String updateTimeColumnForTable = updateTimeColumns == null ? null
            : updateTimeColumns.detect(Predicates.in(columnNames));
    if (updateTimeColumnForTable != null) {
        columnNames.remove(updateTimeColumnForTable);
        System.out.println("Will mark " + updateTimeColumnForTable + " as an updateTimeColumn on this table");
    }

    final File tableFile = new File(directory, tableName + ".csv");
    final String selectSql = String.format("SELECT %s FROM %s%s", columnNames.makeString(", "),
            dbtype.getSchemaPrefix(schema), tableName);

    // using the jdbcTempate and ResultSetHandler to avoid sql-injection warnings in findbugs
    sqlExecutor.executeWithinContext(schema, new Procedure<Connection>() {
        @Override
        public void value(Connection conn) {
            sqlExecutor.getJdbcTemplate().query(conn, selectSql, new ResultSetHandler<Void>() {
                @Override
                public Void handle(ResultSet rs) throws SQLException {
                    CSVPrinter writer = null;
                    try {
                        FileWriter fw = new FileWriter(tableFile);
                        writer = new CSVPrinter(fw, csvFormat);

                        if (updateTimeColumnForTable != null) {
                            String metadataLine = String.format("//// METADATA %s=\"%s\"",
                                    TextMarkupDocumentReader.ATTR_UPDATE_TIME_COLUMN, updateTimeColumnForTable);
                            fw.write(metadataLine + "\n"); // writing using the FileWriter directly to avoid having the quotes
                            // delimited
                        }

                        DateFormat dateFormat = new SimpleDateFormat("yyyy-MM-dd");
                        DateFormat dateTimeFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.SSS");

                        int columnCount = rs.getMetaData().getColumnCount();

                        // print headers
                        for (int i = 1; i <= columnCount; ++i) {
                            writer.print(rs.getMetaData().getColumnName(i));
                        }
                        writer.println();

                        while (rs.next()) {
                            for (int i = 1; i <= columnCount; ++i) {
                                Object object = rs.getObject(i);
                                if (object != null) {
                                    switch (rs.getMetaData().getColumnType(i)) {
                                    case Types.DATE:
                                        object = dateFormat.format(object);
                                        break;
                                    case Types.TIMESTAMP:
                                        object = dateTimeFormat.format(object);
                                        break;
                                    case Types.LONGVARCHAR:
                                    case Types.VARCHAR:
                                    case Types.CHAR:
                                        // escape the string text if declared so that the input CSV can also handle the escapes
                                        if (csvFormat.getEscapeCharacter() != null
                                                && object instanceof String) {
                                            object = ((String) object).replace(
                                                    "" + csvFormat.getEscapeCharacter(),
                                                    "" + csvFormat.getEscapeCharacter()
                                                            + csvFormat.getEscapeCharacter());
                                        }
                                        break;
                                    }
                                }
                                writer.print(object);
                            }

                            writer.println();
                        }

                        writer.flush();
                    } catch (IOException e) {
                        throw new RuntimeException(e);
                    } finally {
                        IOUtils.closeQuietly(writer);
                    }

                    return null;
                }
            });
        }
    });

    int blankFileSize = updateTimeColumnForTable == null ? 1 : 2;

    if (!tableFile.canRead() || FileUtilsCobra.readLines(tableFile).size() <= blankFileSize) {
        System.out.println("No data found for table " + tableName + "; will clean up file");
        FileUtils.deleteQuietly(tableFile);
    }
}