Example usage for org.apache.commons.csv CSVPrinter printRecord

List of usage examples for org.apache.commons.csv CSVPrinter printRecord

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVPrinter printRecord.

Prototype

public void printRecord(final Object... values) throws IOException 

Source Link

Document

Prints the given values a single record of delimiter separated values followed by the record separator.

Usage

From source file:org.apache.ambari.server.api.services.serializers.CsvSerializer.java

/**
 * Generate a CSV record by processing the resource embedded in the specified node.  The order of
 * the fields are to be set as specified.
 *
 * @param csvPrinter     the CSVPrinter used to create the record
 * @param node           the relevant node in the collection
 * @param fieldNameOrder a list of field names indicating order
 * @throws IOException if an error occurs creating the CSV record
 *//*  w w  w  .  ja va2 s .  co m*/
private void processRecord(CSVPrinter csvPrinter, TreeNode<Resource> node, List<String> fieldNameOrder)
        throws IOException {

    if (node != null) {
        Resource recordResource = node.getObject();
        if (recordResource != null) {
            List<Object> values = new ArrayList<Object>();

            if (fieldNameOrder != null) {
                for (String fieldName : fieldNameOrder) {
                    values.add(recordResource.getPropertyValue(fieldName));
                }
            } else {
                Map<String, Map<String, Object>> properties = recordResource.getPropertiesMap();
                if (properties != null) {

                    for (Map.Entry<String, Map<String, Object>> outer : properties.entrySet()) {
                        Map<String, Object> innerProperties = outer.getValue();

                        if (innerProperties != null) {
                            for (Map.Entry<String, Object> inner : innerProperties.entrySet()) {
                                values.add(inner.getValue());
                            }
                        }
                    }
                }
            }

            if (!values.isEmpty()) {
                csvPrinter.printRecord(values);
            }
        }
    }
}

From source file:org.apache.ambari.server.api.services.serializers.CsvSerializer.java

/**
 * Optionally generate the CSV header record and establish the field order by processing the
 * csv_column_map and csv_column_order node properties.
 *
 * @param csvPrinter the CSVPrinter used to create the record
 * @param node       a node containing header and ordering information
 * @return a list indicating the field order for the CSV records
 * @throws IOException if an error occurs creating the CSV header
 *///from w  ww.  j  av a2  s . c om
private List<String> processHeader(CSVPrinter csvPrinter, TreeNode<Resource> node) throws IOException {
    Map<String, String> header;
    List<String> fieldNameOrder;
    Object object;

    // Get the explicitly set header property for the current tree node. This may be null if no
    // header needs to be written out. The header map is expected to be a map of field names to
    // descriptive header values.
    object = node.getProperty(PROPERTY_COLUMN_MAP);
    if (object instanceof Map) {
        header = (Map<String, String>) object;
    } else {
        header = null;
    }

    // Determine the field name order.  If explicitly set, use it, else grab it from the header map
    // (if available).
    object = node.getProperty(PROPERTY_COLUMN_ORDER);
    if (object instanceof List) {
        // Use the explicitly set ordering
        fieldNameOrder = (List<String>) object;
    } else if (header != null) {
        // Use the ordering specified by the map.
        fieldNameOrder = new ArrayList<String>(header.keySet());
    } else {
        fieldNameOrder = null;
    }

    if (header != null) {
        // build the header record
        List<String> headerNames = new ArrayList<String>();
        for (String fieldName : fieldNameOrder) {
            headerNames.add(header.get(fieldName));
        }

        // write out the header...
        csvPrinter.printRecord(headerNames);
    }

    return fieldNameOrder;
}

From source file:org.apache.ambari.view.hive.resources.jobs.JobService.java

/**
 * Get job results in csv format/*from   w  w  w.  j  a  v  a2  s.com*/
 */
@GET
@Path("{jobId}/results/csv")
@Produces("text/csv")
public Response getResultsCSV(@PathParam("jobId") String jobId, @Context HttpServletResponse response,
        @QueryParam("columns") final String requestedColumns) {
    try {
        JobController jobController = getResourceManager().readController(jobId);
        final Cursor resultSet = jobController.getResults();
        resultSet.selectColumns(requestedColumns);

        StreamingOutput stream = new StreamingOutput() {
            @Override
            public void write(OutputStream os) throws IOException, WebApplicationException {
                Writer writer = new BufferedWriter(new OutputStreamWriter(os));
                CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
                try {
                    while (resultSet.hasNext()) {
                        csvPrinter.printRecord(resultSet.next().getRow());
                        writer.flush();
                    }
                } finally {
                    writer.close();
                }
            }
        };

        return Response.ok(stream).build();
    } catch (WebApplicationException ex) {
        throw ex;
    } catch (ItemNotFound itemNotFound) {
        throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
    } catch (Exception ex) {
        throw new ServiceFormattedException(ex.getMessage(), ex);
    }
}

From source file:org.apache.ambari.view.hive.resources.jobs.JobService.java

/**
 * Get job results in csv format//  www. ja v a2s. co m
 */
@GET
@Path("{jobId}/results/csv/saveToHDFS")
@Produces(MediaType.APPLICATION_JSON)
public Response getResultsToHDFS(@PathParam("jobId") String jobId, @QueryParam("commence") String commence,
        @QueryParam("file") final String targetFile, @QueryParam("stop") final String stop,
        @QueryParam("columns") final String requestedColumns, @Context HttpServletResponse response) {
    try {
        final JobController jobController = getResourceManager().readController(jobId);

        String backgroundJobId = "csv" + String.valueOf(jobController.getJob().getId());
        if (commence != null && commence.equals("true")) {
            if (targetFile == null)
                throw new MisconfigurationFormattedException("targetFile should not be empty");
            BackgroundJobController.getInstance(context).startJob(String.valueOf(backgroundJobId),
                    new Runnable() {
                        @Override
                        public void run() {

                            try {
                                Cursor resultSet = jobController.getResults();
                                resultSet.selectColumns(requestedColumns);

                                FSDataOutputStream stream = getSharedObjectsFactory().getHdfsApi()
                                        .create(targetFile, true);
                                Writer writer = new BufferedWriter(new OutputStreamWriter(stream));
                                CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT);
                                try {
                                    while (resultSet.hasNext() && !Thread.currentThread().isInterrupted()) {
                                        csvPrinter.printRecord(resultSet.next().getRow());
                                        writer.flush();
                                    }
                                } finally {
                                    writer.close();
                                }
                                stream.close();

                            } catch (IOException e) {
                                throw new ServiceFormattedException(
                                        "Could not write CSV to HDFS for job#" + jobController.getJob().getId(),
                                        e);
                            } catch (InterruptedException e) {
                                throw new ServiceFormattedException(
                                        "Could not write CSV to HDFS for job#" + jobController.getJob().getId(),
                                        e);
                            } catch (ItemNotFound itemNotFound) {
                                throw new NotFoundFormattedException("Job results are expired", itemNotFound);
                            }

                        }
                    });
        }

        if (stop != null && stop.equals("true")) {
            BackgroundJobController.getInstance(context).interrupt(backgroundJobId);
        }

        JSONObject object = new JSONObject();
        object.put("stopped", BackgroundJobController.getInstance(context).isInterrupted(backgroundJobId));
        object.put("jobId", jobController.getJob().getId());
        object.put("backgroundJobId", backgroundJobId);
        object.put("operationType", "CSV2HDFS");
        object.put("status", BackgroundJobController.getInstance(context).state(backgroundJobId).toString());

        return Response.ok(object).build();
    } catch (WebApplicationException ex) {
        throw ex;
    } catch (ItemNotFound itemNotFound) {
        throw new NotFoundFormattedException(itemNotFound.getMessage(), itemNotFound);
    } catch (Exception ex) {
        throw new ServiceFormattedException(ex.getMessage(), ex);
    }
}

From source file:org.apache.ambari.view.hive.resources.uploads.TableDataReader.java

@Override
public int read(char[] cbuf, int off, int len) throws IOException {

    int totalLen = len;
    int count = 0;
    do {//ww  w.  j a v a  2 s .c om
        int n = stringReader.read(cbuf, off, len);

        if (n != -1) {
            // n  were read
            len = len - n; // len more to be read
            off = off + n; // off now shifted to n more
            count += n;
        }

        if (count == totalLen)
            return count; // all totalLen characters were read

        if (iterator.hasNext()) { // keep reading as long as we keep getting rows
            StringWriter stringWriter = new StringWriter(CAPACITY);
            CSVPrinter csvPrinter = new CSVPrinter(stringWriter, CSV_FORMAT);
            Row row = iterator.next();
            csvPrinter.printRecord(row.getRow());
            stringReader.close(); // close the old string reader
            stringReader = new StringReader(stringWriter.getBuffer().toString());
            csvPrinter.close();
            stringWriter.close();
        } else {
            return count == 0 ? -1 : count;
        }
    } while (count < totalLen);

    return count;
}

From source file:org.apache.camel.dataformat.csv.CsvMarshaller.java

/**
 * Marshals the given object into the given stream.
 *
 * @param exchange     Exchange (used for access to type conversion)
 * @param object       Body to marshal/*from w ww  .j a  v a2  s  .c o  m*/
 * @param outputStream Output stream of the CSV
 * @throws NoTypeConversionAvailableException if the body cannot be converted
 * @throws IOException                        if we cannot write into the given stream
 */
public void marshal(Exchange exchange, Object object, OutputStream outputStream)
        throws NoTypeConversionAvailableException, IOException {
    CSVPrinter printer = new CSVPrinter(new OutputStreamWriter(outputStream), format);
    try {
        List<?> list = ExchangeHelper.convertToType(exchange, List.class, object);
        if (list != null) {
            for (Object child : list) {
                printer.printRecord(getRecordValues(exchange, child));
            }
        } else {
            printer.printRecord(getRecordValues(exchange, object));
        }
    } finally {
        IOHelper.close(printer);
    }
}

From source file:org.apache.nifi.processors.ParseCSV.ParseCSV.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    final Charset charset = Charset.defaultCharset();
    FlowFile flowFile = session.get();//  w  ww  .j ava  2 s  .c  om
    if (flowFile == null) {
        return;
    }
    // TODO implement
    final Map<String, String> attributes = new LinkedHashMap<>();
    final String format = context.getProperty(FORMAT).getValue();
    final boolean create_attributes = Boolean.parseBoolean(context.getProperty(CREATE_ATTRIBUTES).getValue());
    final char delimiter = context.getProperty(DELIMITER).getValue().charAt(0);
    final boolean with_header = Boolean.parseBoolean(context.getProperty(WITH_HEADER).getValue());
    final String output_format = context.getProperty(OUTPUT_FORMAT).getValue();
    final String custom_header = context.getProperty(CUSTOM_HEADER).getValue();
    final String column_mask = context.getProperty(COLUMN_MASK).getValue();
    final String column_encrypt = context.getProperty(COLUMN_ENCRYPT).getValue();
    final String column_tokenize = context.getProperty(COLUMN_TOKENIZE).getValue();
    final String tokenize_unique_identifier = context.getProperty(TOKENIZE_UNQIUE_IDENTIFIER).getValue();
    final String tokenized_ouput = context.getProperty(TOKENIZED_OUTPUT).getValue();
    final String encryptionKey = "Bar12345Bar12345";
    final String static_schema = context.getProperty(STATIC_SCHEMA).getValue();

    // new flowfile here
    final org.apache.nifi.util.ObjectHolder<FlowFile> holder = new org.apache.nifi.util.ObjectHolder<>(null);

    flowFile = session.write(flowFile, new StreamCallback() {
        @Override
        public void process(InputStream inputStream, OutputStream outputStream) throws IOException {

            CSVFormat csvFormat = buildFormat(format, delimiter, with_header, custom_header);
            CSVParser csvParser = new CSVParser(new InputStreamReader(inputStream, charset), csvFormat);
            CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(outputStream, charset), csvFormat);
            String headerArray[];

            ArrayList<String> columnMaskList = new ArrayList<>();
            ArrayList<String> columnEncryptList = new ArrayList<String>();
            ArrayList<String> columnTokenizeList = new ArrayList<String>();

            List<String> maskValueHolder = new LinkedList<>();
            FlowFile tokenized = session.create();

            // print header if needed
            if (custom_header != null && output_format.equals("CSV") && static_schema == null) {
                csvPrinter.printRecord(custom_header);
                headerArray = custom_header.split(",");
            } else if (static_schema != null && custom_header == null) {
                csvPrinter.printRecord(static_schema.replace("\"", ""));
                headerArray = static_schema.split(",");
            } else {
                headerArray = csvParser.getHeaderMap().keySet().toArray(new String[0]);
                csvPrinter.printRecord(headerArray);
            }

            if (column_mask != null) {
                columnMaskList = new ArrayList<>(Arrays.asList(column_mask.replace("\"", "").split(",")));
            }

            if (column_encrypt != null) {
                columnEncryptList = new ArrayList<>(Arrays.asList(column_encrypt.split(",")));
            }

            if (column_tokenize != null) {
                columnTokenizeList = new ArrayList<>(Arrays.asList(column_tokenize.split(",")));
            }

            // loop through records and print
            for (final CSVRecord record : csvParser) {

                Map<String, String> k = record.toMap();

                for (Map.Entry<String, String> konj : k.entrySet()) {
                    //System.out.println(konj.getValue());
                }
                // generate attributes if required per record
                if (create_attributes) {
                    for (int i = 0; i < headerArray.length; i++) {
                        //attributes.put(headerArray[i], record.get(i));
                        attributes.put(headerArray[i] + "." + record.getRecordNumber(), record.get(i));
                    }
                }
                // check masked columns
                if (column_mask != null || column_encrypt != null) {
                    // we have to loop through the header array and match user requested mask columns
                    for (int i = 0; i < headerArray.length; i++) {
                        //System.out.println(headerArray[i] + "." + record.getRecordNumber() + " - " + mask(record.get(i)));

                        if (columnMaskList.contains(headerArray[i])) {
                            // set mask
                            maskValueHolder.add(mask(record.get(i)));

                            // construct tokenization row for external DB store
                            if (columnTokenizeList.contains(headerArray[i])) {
                                final String tokenizedRow;
                                tokenizedRow = tokenizationOut(tokenized_ouput, headerArray[i],
                                        tokenize_unique_identifier, mask(record.get(i)), record.get(i),
                                        Long.toString(record.getRecordNumber()));

                                tokenized = session.append(tokenized, new OutputStreamCallback() {
                                    @Override
                                    public void process(OutputStream outputStream) throws IOException {
                                        outputStream.write(tokenizedRow.getBytes());
                                    }
                                });
                            }
                        } else if (columnEncryptList.contains(headerArray[i])) {
                            // encrypt
                            maskValueHolder.add(new String(Encrypt(record.get(i), encryptionKey), "UTF-8"));
                        } else {
                            // no mask
                            maskValueHolder.add(record.get(i));
                        }
                    }
                    csvPrinter.printRecord(maskValueHolder);
                    // clear mask column holder
                    maskValueHolder.clear();
                } else {
                    // no masking or encryption required, print record
                    switch (output_format) {
                    case "CSV":
                        //csvPrinter.printRecord(record);
                        List<String> items = Arrays.asList(static_schema.split(","));
                        String lastColumn = items.get(items.size() - 1);
                        String test = "";
                        for (String item : items) {
                            if (item != lastColumn) {
                                test += record.get(item) + ",";
                            } else {
                                test += record.get(item);
                            }
                        }

                        csvPrinter.printRecord(test.replace("^\"|\"$", ""));
                        break;
                    case "JSON":
                        String json = new ObjectMapper().writer().withDefaultPrettyPrinter()
                                .writeValueAsString(record.toMap()) + "\n";
                        if (json.length() > 0) {
                            outputStream.write(json.getBytes());
                        }

                        //List<Map<?, ?>> data = readObjectsFromCsv(inputStream);
                        //String adis = writeAsJson(data);
                        //outputStream.write(writeAsJson(data).getBytes());
                        break;
                    case "XML":
                        outputStream.write(new XmlMapper().writeValueAsString(record.toMap()).getBytes());
                        break;
                    }
                }
            }
            csvPrinter.flush();
            csvPrinter.close();
            holder.set(tokenized);
        }
    });

    flowFile = session.putAllAttributes(flowFile, attributes);
    session.transfer(flowFile, RELATIONSHIP_SUCCESS);
    session.transfer(holder.get(), RELATIONSHIP_TOKENIZED);
}

From source file:org.bedework.eventreg.bus.CSVOutputter.java

@Override
public String next() {
    if (!regit.hasNext()) {
        return null;
    }//from  ww  w.  ja  va2 s.c o m

    /*
    ><c:forEach var="reg" items="${regs}" varStatus="loopStatus"><%--
    </c:forEach>
    */
    final List flds = new ArrayList();

    final Registration reg = regit.next();

    final StringBuilder out = new StringBuilder();

    try {
        final CSVPrinter csv = new CSVPrinter(out, CSVFormat.EXCEL);
        flds.add(reg.getEvSummary());
        flds.add(reg.getEvDate());
        flds.add(reg.getEvTime());
        flds.add(reg.getEvLocation());
        flds.add(reg.getRegistrationId());
        flds.add(reg.getAuthid());
        flds.add(reg.getTicketsRequested());
        flds.add(reg.getNumTickets());
        flds.add(reg.getType());
        flds.add(reg.getComment());
        flds.add(reg.getCreated());
        flds.add(reg.getLastmod());

        if (form == null) {
            csv.printRecord(flds.toArray());
            csv.flush();
            csv.close();
            return out.toString();
        }

        final FormFields ff = new FormFields(form.getFields());

        try {
            final Map vals = reg.restoreFormValues();

            for (final FieldDef fd : ff) {
                final Object val = vals.get(fd.getName());

                if (val == null) {
                    flds.add("");
                } else {
                    flds.add(val);
                }
            }
        } catch (final Throwable t) {
            out.append("Exception restoring form values");
        }

        csv.printRecord(flds.toArray());
        csv.flush();
        csv.close();
    } catch (final Throwable t) {
        return "Exception " + t.getLocalizedMessage();
    }

    return out.toString();
}

From source file:org.chanthing.csvtool.CSVTrans.java

public static void main(String[] args) throws IOException {
    CSVParser reader = null;/* www. j a v  a  2  s .c  om*/
    CSVPrinter writer = null;
    CSVXform xFormer = null;

    if (args.length < 2) {
        System.out.println("Usage: java CSVTrans <src_csv_file> <dest_csv_file>");
        return;
    }

    try {
        reader = new CSVParser(new FileReader(args[0]), srcFormat);
        writer = new CSVPrinter(new FileWriter(args[1]), destFormat);
        xFormer = new CSVXform(destHeaders.size(), xforms);

        writer.printRecord(destHeaders); // Write out headers to destination file

        /*
         *  For each record in the input file
         */
        for (CSVRecord record : reader) {
            List<String> destRecord = xFormer.xform(record);
            writer.printRecord(destRecord);
        }

    } finally {
        if (reader != null) {
            reader.close();
        }
        if (writer != null) {
            writer.close();
        }
    }
}

From source file:org.cloudsimulator.controller.PlacementSimulatorTestRunner.java

protected void exportSimulationResults(String dcName, String testCase,
        ArrayList<SimulationResultWrapper> simulationResultWrappers) {

    Path resultPath = Paths.get(this.autoExportResultPath, dcName);

    if (!Files.exists(resultPath)) {
        File resultFile = new File(resultPath.toString());
        resultFile.mkdirs();/*from   www.j a  va2 s  .  c  om*/
    }

    ArrayList<PlacementResultExt> resultExts = new ArrayList<PlacementResultExt>();

    Path resultFilePath = Paths.get(resultPath.toString(), testCase + ".csv");

    CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(';');

    Object[] header = { "Id", "Reserved OS resource", "Max risk", "Max overprov time", "N runs for GRASP",
            "N simulation", "N VM", "N VM used for keep test consistency", "Iteration time", "Euristic method",
            "Euristic coefficient", "N Used host", "CPU Avg", "Mem Avg", "Overall Avg", "Cpu in range",
            "Mem in range", "Overall in range" };

    FileWriter buffer;
    try {
        buffer = new FileWriter(resultFilePath.toString());
        CSVPrinter csvPrinter = new CSVPrinter(buffer, csvFormat);

        csvPrinter.printRecord(header);
        for (SimulationResultWrapper simulationResultWrapper : simulationResultWrappers) {

            for (PlacementResult result : simulationResultWrapper.getPlacementResults()) {
                List record = new ArrayList();
                record.add(result.getId());
                record.add(simulationResultWrapper.getReservedOsHostResource());
                record.add(simulationResultWrapper.getHostMaxRisk());
                record.add(simulationResultWrapper.getMaxOverprovisioningTime());
                record.add(simulationResultWrapper.getN_runs());
                record.add(simulationResultWrapper.getN_simulation());
                record.add(simulationResultWrapper.getOriginalMachines().size());
                record.add(simulationResultWrapper.getUsedForSimulationMachines().size());
                record.add(result.getIterationTime());
                record.add(simulationResultWrapper.getEuristicMethod());
                record.add(simulationResultWrapper.getEuristicCoeffBuilderMethod());
                record.add(result.getUsedHost().size());
                record.add((float) result.getDataCenterMachinePlacement().getCpu_avg_usage());
                record.add((float) result.getDataCenterMachinePlacement().getMemory_avg_usage());
                record.add((float) result.getDataCenterMachinePlacement().getOverall_avg_usage());
                record.add((float) result.getDataCenterMachinePlacement().getCpu_in_range());
                record.add((float) result.getDataCenterMachinePlacement().getMemory_in_range());
                record.add((float) result.getDataCenterMachinePlacement().getOverall_in_range());

                csvPrinter.printRecord(record);
                csvPrinter.flush();

                //            resultExts.add(new PlacementResultExt(result.getId(),
                //                    simulationResultWrapper.getReservedOsHostResource(),
                //                    simulationResultWrapper.getHostMaxRisk(),
                //                    simulationResultWrapper.getMaxOverprovisioningTime(),
                //                    simulationResultWrapper.getN_runs(),
                //                    simulationResultWrapper.getN_simulation(),
                //                    simulationResultWrapper.getOriginalMachines().size(),
                //                    simulationResultWrapper.getUsedForSimulationMachines().size(),
                //                    result.getIterationTime(),
                //                    simulationResultWrapper.getEuristicMethod(),
                //                    simulationResultWrapper.getEuristicCoeffBuilderMethod(),
                //                    result.getUsedHost().size(),
                //                    (float)result.getDataCenterMachinePlacement().getCpu_avg_usage(),
                //                    (float)result.getDataCenterMachinePlacement().getMemory_avg_usage(),
                //                    (float)result.getDataCenterMachinePlacement().getOverall_avg_usage(),
                //                    (float)result.getDataCenterMachinePlacement().getCpu_in_range(),
                //                    (float)result.getDataCenterMachinePlacement().getMemory_in_range(),
                //                    (float)result.getDataCenterMachinePlacement().getOverall_in_range()));
            }
        }
        csvPrinter.close();

    } catch (IOException e) {
        e.printStackTrace();
    }

    //            for (PlacementResultExt res : resultExts) {
    //                csvPrinter.printRecords(res);    
    //            }
    //            

}