Example usage for org.apache.commons.csv CSVPrinter CSVPrinter

List of usage examples for org.apache.commons.csv CSVPrinter CSVPrinter

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVPrinter CSVPrinter.

Prototype

public CSVPrinter(final Appendable out, final CSVFormat format) throws IOException 

Source Link

Document

Creates a printer that will print values to the given stream following the CSVFormat.

Usage

From source file:com.rodaxsoft.mailgun.CampaignManager.java

/**
 * Saves campaign events to a CSV file with the following format:
 * <code>&lt;campaign name&gt;_(&lt;campaign id&gt;)_&lt;timestamp&gt;.csv</code>
 * @param campaignId The campaign ID//from   w  w w.  j  av  a  2  s  . c  o  m
 * @throws ContextedException if a processing error occurs
 * @throws IOException if an I/O error occurs
 */
void saveCampaignEventsToCSV(String campaignId) throws ContextedException, IOException {

    Campaign campaign = getCampaign(campaignId);

    SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");

    String dateTime = format.format(new Date());
    String fileName;

    if (campaign != null) {

        String name = StringUtils.replace(campaign.getName(), " ", "_");
        fileName = new StringBuilder(name).append("_(").append(campaignId).append(")_").append(dateTime)
                .append(".csv").toString();
    } else {
        fileName = campaignId + "_" + dateTime + ".csv";
    }

    CSVPrinter csvPrinter = null;
    PrintWriter pw = null;
    CSVFormat csvFormat = null;
    try {

        pw = new PrintWriter(fileName);
        final List<Map<String, Object>> events = getEvents(campaignId);

        for (Map<String, Object> map : events) {

            if (null == csvPrinter) {
                final Set<String> keySet = map.keySet();
                int size = keySet.size();
                String[] keys = keySet.toArray(new String[size]);
                csvFormat = CSVFormat.DEFAULT.withHeader(keys);
                csvPrinter = new CSVPrinter(pw, csvFormat);
            }
            //            city   domain   tags   timestamp   region   ip   country   recipient   event   user_vars

            String[] headers = csvFormat.getHeader();
            for (String key : headers) {
                csvPrinter.print(map.get(key));
            }

            csvPrinter.println();
        }

    } finally {

        if (csvPrinter != null) {
            csvPrinter.flush();
        }

        IOUtils.closeQuietly(csvPrinter);
    }

}

From source file:br.edimarmanica.weir2.integration.ScoredPairs.java

private void persiste(Site site1, String rule1, Site site2, String rule2, double score) {
    List<String> dataRecord = new ArrayList<>();
    dataRecord.add(site1.toString());// ww  w .  ja  v  a2 s . co  m
    dataRecord.add(rule1);
    dataRecord.add(site2.toString());
    dataRecord.add(rule2);
    dataRecord.add(score + "");

    File dirOutput = new File(Paths.PATH_WEIR_V2 + "/" + domain.getPath());
    dirOutput.mkdirs();

    File file = new File(dirOutput.getAbsolutePath() + "/scores.csv");

    CSVFormat format;
    if (append) {
        format = CSVFormat.EXCEL;
    } else {
        String[] HEADER = { "SITE1", "RULE1", "SITE2", "RULE2", "SCORE" };
        format = CSVFormat.EXCEL.withHeader(HEADER);
    }

    try (Writer out = new FileWriter(file, append)) {
        try (CSVPrinter csvFilePrinter = new CSVPrinter(out, format)) {
            csvFilePrinter.printRecord(dataRecord);
        }
    } catch (IOException ex) {
        Logger.getLogger(RulesFilter.class.getName()).log(Level.SEVERE, null, ex);
    }
    append = true;
}

From source file:com.itemanalysis.jmetrik.file.JmetrikFileWriter.java

/**
 * Opens a connection to the file by instantiating a OutputStreamWriter and a CSVPrinter.
 * Assumes the output file is not a temporary file that should be deleted. The output file
 * is a permanent file./*  w ww.j a  va2  s  .  c om*/
 *
 * @throws IOException
 */
public void openConnection() throws IOException {
    writer = new BufferedWriter(new OutputStreamWriter(Files.newOutputStream(file)));
    printer = new CSVPrinter(writer, CSVFormat.DEFAULT.withCommentMarker('#'));
}

From source file:br.edimarmanica.trinity.intrasitemapping.manual.OffsetToRule.java

private void print(String page, List<String> values) {
    File dir = new File(Paths.PATH_TRINITY + "/" + site.getPath() + "/extracted_values/");

    if (!append) {
        FileUtils.deleteDir(dir);/*  w  w  w .j  av a  2  s.co m*/
        dir.mkdirs();
    }

    for (int ruleID = 0; ruleID < values.size(); ruleID++) {

        File file = new File(dir.getAbsolutePath() + "/rule_" + ruleID + ".csv");
        CSVFormat format;
        if (append) {
            format = CSVFormat.EXCEL;
        } else {
            format = CSVFormat.EXCEL.withHeader(header);
        }

        try (Writer out = new FileWriter(file, append)) {
            try (CSVPrinter csvFilePrinter = new CSVPrinter(out, format)) {
                List<String> dataRecord = new ArrayList<>();
                dataRecord.add(page);
                dataRecord.add(values.get(ruleID));
                csvFilePrinter.printRecord(dataRecord);
            }
        } catch (IOException ex) {
            Logger.getLogger(Printer.class.getName()).log(Level.SEVERE, null, ex);
        }
    }
    append = true;
}

From source file:ddf.catalog.transformer.csv.CsvQueryResponseTransformer.java

private Appendable writeSearchResultsToCsv(final SourceResponse upstreamResponse,
        Map<String, String> columnAliasMap, List<AttributeDescriptor> sortedAttributeDescriptors)
        throws CatalogTransformerException {
    StringBuilder stringBuilder = new StringBuilder();

    try {/* w ww .jav a 2 s.co m*/
        CSVPrinter csvPrinter = new CSVPrinter(stringBuilder, CSVFormat.RFC4180);
        printColumnHeaders(csvPrinter, sortedAttributeDescriptors, columnAliasMap);

        upstreamResponse.getResults().stream().map(Result::getMetacard)
                .forEach(mc -> printMetacard(csvPrinter, mc, sortedAttributeDescriptors));

        return csvPrinter.getOut();
    } catch (IOException ioe) {
        throw new CatalogTransformerException(ioe.getMessage(), ioe);
    }
}

From source file:com.apkcategorychecker.writer.WriterCSV.java

@Override
public void createHeader(String _csvPath) throws IOException {
    /*--Create the CSVFormat object--*/

    CSVFormat format = CSVFormat.EXCEL.withHeader().withDelimiter(',');

    /*--Writing in a CSV file--*/

    File _fileCSV = new File(_csvPath);
    FileWriter _out;//from  ww  w  .  j a v a2s.c o  m
    _out = new FileWriter(_fileCSV);
    CSVPrinter printer;
    printer = new CSVPrinter(_out, format.withDelimiter('#'));
    System.out.println("Creating the CSV file....");
    try {
        printer.printRecord("App_ID", "APK_File_Name", "APK_File_Path", "APK_Package", "Main_Framework",
                "Base_Framework", "HTML", "JS", "CSS", "Android_Debuggable", "Android_Permissions",
                "Android_MinSdkVersion", "Android_MaxSdkVersion", "Android_TargetSdkVersion",
                "File_Size(Bytes)", "Start_Analysis_Time(milliseconds)", "Duration_Analysis_Time(milliseconds)",
                "Decode_Success");
    } catch (IOException ex) {
        Logger.getLogger(WriterCSV.class.getName()).log(Level.SEVERE, null, ex);
    }
    printer.close();

}

From source file:com.streamsets.pipeline.lib.jdbc.JdbcLoadRecordWriter.java

@Override
public List<OnRecordErrorException> writeBatch(Iterator<Record> recordIterator) throws StageException {
    final List<OnRecordErrorException> errorRecords = new LinkedList<>();
    if (!recordIterator.hasNext()) {
        return errorRecords;
    }//ww  w  . j a  va  2  s .c o  m

    // Assume all records have the same columns.
    final Record first = recordIterator.next();
    SortedMap<String, String> columnsToParameters = recordReader.getColumnsToParameters(first,
            OperationType.LOAD_CODE, getColumnsToParameters(), getColumnsToFields());
    if (columnsToParameters.isEmpty()) {
        throw new StageException(JdbcErrors.JDBC_22);
    }

    final Set<String> columnNames = columnsToParameters.keySet();
    final String loadSql = "LOAD DATA LOCAL INFILE '' " + duplicateKeyAction.getKeyword() + " INTO TABLE "
            + getTableName() + " (" + Joiner.on(", ").join(columnNames) + ")";
    try (Connection connection = getDataSource().getConnection()) {
        Connection conn = connection.unwrap(Connection.class);
        try (PreparedStatement statement = conn.prepareStatement(loadSql)) {
            PipedInputStream is = new PipedInputStream();
            PipedOutputStream os = new PipedOutputStream(is);
            statement.getClass().getMethod("setLocalInfileInputStream", InputStream.class).invoke(statement,
                    is);

            Future<?> future = loadOutputExecutor.submit(() -> {
                try (OutputStreamWriter writer = new OutputStreamWriter(os)) {
                    CSVPrinter printer = new CSVPrinter(writer, CSVFormat.MYSQL);
                    Record record = first;
                    while (record != null) {
                        int opCode = getOperationCode(record, errorRecords);
                        if (opCode == OperationType.LOAD_CODE) {
                            for (String column : columnNames) {
                                Field field = record.get(getColumnsToFields().get(column));
                                printer.print(field.getValue());
                            }
                            printer.println();
                        } else if (opCode > 0) {
                            LOG.debug("Sending record to error due to unsupported operation {}", opCode);
                            errorRecords.add(new OnRecordErrorException(record, JdbcErrors.JDBC_70, opCode));
                        } else {
                            // It should be added to the error records.
                        }
                        record = recordIterator.hasNext() ? recordIterator.next() : null;
                    }
                    ;
                } catch (IOException e) {
                    throw new RuntimeException(e);
                }
            });

            if (LOG.isDebugEnabled()) {
                LOG.debug("Executing query: {}", statement.toString());
            }
            statement.execute();
            future.get();
        }
        connection.commit();
    } catch (SQLException e) {
        handleSqlException(e);
    } catch (Exception e) {
        throw new StageException(JdbcErrors.JDBC_14, e.getMessage(), e);
    }
    return errorRecords;
}

From source file:com.px100systems.util.CsvParser.java

/**
 * Export - the opposite of parseHeaderlessAsMap().
 *
 * @param lines of Strings/*from www  . j  a v  a  2 s.  c o  m*/
 * @return the CSV text
 */
public String write(List<List<String>> lines) {
    StringBuilder result = new StringBuilder();

    Writer writer = new StringBuilderWriter(result);
    CSVPrinter printer = new CSVPrinter(writer, csvStrategy);
    try {
        for (List<String> line : lines)
            printer.println(line.toArray(new String[line.size()]));
    } catch (IOException e) {
        throw new RuntimeException(e);
    } finally {
        IOUtils.closeQuietly(writer);
    }

    return result.toString();
}

From source file:com.coverity.report.analysis.ProtecodeSCToolProcessor.java

@Override
public void readData() throws ToolProcessorException {
    componentsDataList = restService.getComponentsInfo(config.productId);

    vulnerabilitiesDataList = restService.getVulnerabilitiesInfo(config.productId);

    systemInfo = restService.getSystemInfo();

    productScan = restService.getProductScanResult(config.productId);
    try {/*  w  w w .ja v  a 2s  .  c om*/
        File componentsTempFile = File.createTempFile(Personality.getInstance().tempFileNameBase(), ".csv");
        File vulnerabilitiesTempFile = File.createTempFile(Personality.getInstance().tempFileNameBase(),
                ".csv");

        String[] componentsHeadings = new String[] { "component name", "version", "license", "license type",
                "vulnerability count", "object" };
        try (FileWriter writer = new FileWriter(componentsTempFile)) {
            CSVPrinter printer = new CSVPrinter(writer, CSVFormat.EXCEL);
            printer.printRecord(componentsHeadings);
            componentsDataList.stream().forEach(info -> {
                String[] componentsRecord = new String[componentsHeadings.length];
                componentsRecord[0] = info.getComponent();
                componentsRecord[1] = info.getVersion();
                componentsRecord[2] = info.getLicense();
                componentsRecord[3] = info.getLicenseType();
                componentsRecord[4] = String.valueOf(info.getVulnerabilityCount());
                componentsRecord[5] = info.getObject();
                try {
                    printer.printRecord(componentsRecord);
                } catch (Exception ignored) {
                }

            });
            dataFiles.add(new DataFile("protecode-components.csv", componentsTempFile));
        } catch (IOException e) {
            throw makeException("Could not write protecode components data to CSV file "
                    + componentsTempFile.getAbsolutePath(), e);
        }

        String[] vulnerabilitiesHeadings = new String[] { "component", "version", "CVE", "CVSS" };
        try (FileWriter writer = new FileWriter(vulnerabilitiesTempFile)) {
            CSVPrinter printer = new CSVPrinter(writer, CSVFormat.EXCEL);
            printer.printRecord(vulnerabilitiesHeadings);
            vulnerabilitiesDataList.stream().forEach(info -> {
                String[] vulnerabilitiesRecord = new String[vulnerabilitiesHeadings.length];
                vulnerabilitiesRecord[0] = info.getComponent();
                vulnerabilitiesRecord[1] = info.getVersion();
                vulnerabilitiesRecord[2] = info.getCve();
                vulnerabilitiesRecord[3] = String.valueOf(info.getCvss());
                try {
                    printer.printRecord(vulnerabilitiesRecord);
                } catch (Exception ignored) {
                }

            });
            dataFiles.add(new DataFile("protecode-vulnerabilities.csv", vulnerabilitiesTempFile));
        } catch (IOException e) {
            throw makeException("Could not write protecode vulnerabilities to CSV file "
                    + vulnerabilitiesTempFile.getAbsolutePath(), e);
        }
    } catch (IOException e) {
        throw makeException("Cannot create temporary file", e);
    }
}

From source file:co.cask.hydrator.sinks.KafkaProducer.java

@Override
public int write(Iterable<StructuredRecord> objects, final DataWriter dataWriter) throws Exception {
    int count = 0;

    List<Schema.Field> fields = Lists.newArrayList();

    // For each object
    for (StructuredRecord object : objects) {

        // Extract the field names from the object passed in. This is required
        // because this information is not available in initialize or configuration phase.
        if (!fieldsExtracted) {
            fields = object.getSchema().getFields();
            fieldsExtracted = true;//from www.  j a  va 2s.c o  m
        }

        // Depending on the configuration create a body that needs to be 
        // built and pushed to Kafka. 
        String body = "";
        if (sconfig.format.equalsIgnoreCase("JSON")) {
            body = StructuredRecordStringConverter.toJsonString(object);
        } else {
            // Extract all values from the structured record
            List<Object> objs = Lists.newArrayList();
            for (Schema.Field field : fields) {
                objs.add(object.get(field.getName()));
            }

            StringWriter writer = new StringWriter();
            CSVPrinter printer = null;
            CSVFormat csvFileFormat;
            switch (sconfig.format.toLowerCase()) {
            case "csv":
                csvFileFormat = CSVFormat.Predefined.Default.getFormat();
                printer = new CSVPrinter(writer, csvFileFormat);
                break;

            case "excel":
                csvFileFormat = CSVFormat.Predefined.Excel.getFormat();
                printer = new CSVPrinter(writer, csvFileFormat);
                break;

            case "mysql":
                csvFileFormat = CSVFormat.Predefined.MySQL.getFormat();
                printer = new CSVPrinter(writer, csvFileFormat);
                break;

            case "tdf":
                csvFileFormat = CSVFormat.Predefined.TDF.getFormat();
                printer = new CSVPrinter(writer, csvFileFormat);
                break;

            case "rfc4180":
                csvFileFormat = CSVFormat.Predefined.TDF.getFormat();
                printer = new CSVPrinter(writer, csvFileFormat);
                break;
            }

            if (printer != null) {
                printer.printRecord(objs);
                body = writer.toString();
            }
        }

        // Message key.
        String key = "no_key";
        if (sconfig.key != null) {
            key = object.get(sconfig.key);
        }

        // Extract the partition key from the record
        Integer partitionKey = 0;
        if (sconfig.partitionField != null) {
            if (object.get(sconfig.partitionField).getClass().isInstance(Integer.class)) {
                partitionKey = object.get(sconfig.partitionField);
            } else {
                partitionKey = object.get(sconfig.partitionField).hashCode();
            }
        }

        // Write to all the configured topics
        for (String topic : topics) {
            partitionKey = partitionKey % producer.partitionsFor(topic).size();
            if (isAsync) {
                producer.send(new ProducerRecord<String, String>(topic, partitionKey, key, body),
                        new Callback() {
                            @Override
                            public void onCompletion(RecordMetadata meta, Exception e) {
                                if (meta != null) {
                                    context.getMetrics().count("kafka.async.success", 1);
                                }

                                if (e != null) {
                                    context.getMetrics().count("kafka.async.error", 1);
                                }
                            }
                        });
            } else {
                // Waits infinitely to push the message through. 
                producer.send(new ProducerRecord<String, String>(topic, partitionKey, key, body)).get();
            }
            context.getMetrics().count("kafka.producer.count", 1);
        }
    }
    return count;
}