Example usage for org.apache.commons.csv CSVPrinter close

List of usage examples for org.apache.commons.csv CSVPrinter close

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVPrinter close.

Prototype

@Override
    public void close() throws IOException 

Source Link

Usage

From source file:org.apache.ambari.server.api.services.serializers.CsvSerializer.java

/**
 * Serialize the result into a CSV-formatted text document.
 * <p/>/*from   w ww  .ja  v a2s  .  c o m*/
 * It is expected that the result set is a collection of flat resources - no sub-resources will be
 * included in the output.  The root of the tree structure may have a column map (csv_column_map)
 * and a column order (csv_column_order) property set to indicate the header record and ordering
 * of the columns.
 * <p/>
 * The csv_column_map is a map of resource property names to header descriptive names.  If not
 * specified, a header record will not be serialized.
 * <p/>
 * The csv_column_order is a list of resource property names declaring the order of the columns.
 * If not specified, the order will be taken from the key order of csv_column_map or the "natural"
 * ordering of the resource property names, both may be unpredictable.
 *
 * @param result internal result
 * @return a String containing the CSV-formatted document
 */
@Override
public Object serialize(Result result) {
    if (result.getStatus().isErrorState()) {
        return serializeError(result.getStatus());
    } else {
        CSVPrinter csvPrinter = null;
        try {
            // A StringBuffer to store the CSV-formatted document while building it.  It may be
            // necessary to use file-based storage if the data set is expected to be really large.
            StringBuffer buffer = new StringBuffer();

            TreeNode<Resource> root = result.getResultTree();

            if (root != null) {
                csvPrinter = new CSVPrinter(buffer, CSVFormat.DEFAULT);

                // TODO: recursively handle tree structure, for now only handle single level of detail
                if ("true".equalsIgnoreCase(root.getStringProperty("isCollection"))) {
                    List<String> fieldNameOrder = processHeader(csvPrinter, root);

                    Collection<TreeNode<Resource>> children = root.getChildren();
                    if (children != null) {
                        // Iterate over the child nodes of the collection an add each as a new record in the
                        // CSV document.
                        for (TreeNode<Resource> child : children) {
                            processRecord(csvPrinter, child, fieldNameOrder);
                        }
                    }
                }
            }

            return buffer.toString();
        } catch (IOException e) {
            //todo: exception handling.  Create ResultStatus 500 and call serializeError
            throw new RuntimeException("Unable to serialize to csv: " + e, e);
        } finally {
            if (csvPrinter != null) {
                try {
                    csvPrinter.close();
                } catch (IOException ex) {
                }
            }
        }
    }
}

From source file:org.apache.ambari.server.api.services.serializers.CsvSerializer.java

@Override
public Object serializeError(ResultStatus error) {
    CSVPrinter csvPrinter = null;
    try {//from  ww  w  .ja  v a2 s  .c  o m
        StringBuffer buffer = new StringBuffer();
        csvPrinter = new CSVPrinter(buffer, CSVFormat.DEFAULT);

        csvPrinter.printRecord(Arrays.asList("status", "message"));
        csvPrinter.printRecord(Arrays.asList(error.getStatus().getStatus(), error.getMessage()));

        return buffer.toString();
    } catch (IOException e) {
        //todo: exception handling.  Create ResultStatus 500 and call serializeError
        throw new RuntimeException("Unable to serialize to csv: " + e, e);
    } finally {
        if (csvPrinter != null) {
            try {
                csvPrinter.close();
            } catch (IOException ex) {
            }
        }
    }
}

From source file:org.apache.ambari.view.hive.resources.uploads.TableDataReader.java

@Override
public int read(char[] cbuf, int off, int len) throws IOException {

    int totalLen = len;
    int count = 0;
    do {/*from w w  w . j a va2  s .c o  m*/
        int n = stringReader.read(cbuf, off, len);

        if (n != -1) {
            // n  were read
            len = len - n; // len more to be read
            off = off + n; // off now shifted to n more
            count += n;
        }

        if (count == totalLen)
            return count; // all totalLen characters were read

        if (iterator.hasNext()) { // keep reading as long as we keep getting rows
            StringWriter stringWriter = new StringWriter(CAPACITY);
            CSVPrinter csvPrinter = new CSVPrinter(stringWriter, CSV_FORMAT);
            Row row = iterator.next();
            csvPrinter.printRecord(row.getRow());
            stringReader.close(); // close the old string reader
            stringReader = new StringReader(stringWriter.getBuffer().toString());
            csvPrinter.close();
            stringWriter.close();
        } else {
            return count == 0 ? -1 : count;
        }
    } while (count < totalLen);

    return count;
}

From source file:org.apache.nifi.processors.ParseCSV.ParseCSV.java

@Override
public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException {

    final Charset charset = Charset.defaultCharset();
    FlowFile flowFile = session.get();/*from ww  w  .j  av  a  2s  .c o m*/
    if (flowFile == null) {
        return;
    }
    // TODO implement
    final Map<String, String> attributes = new LinkedHashMap<>();
    final String format = context.getProperty(FORMAT).getValue();
    final boolean create_attributes = Boolean.parseBoolean(context.getProperty(CREATE_ATTRIBUTES).getValue());
    final char delimiter = context.getProperty(DELIMITER).getValue().charAt(0);
    final boolean with_header = Boolean.parseBoolean(context.getProperty(WITH_HEADER).getValue());
    final String output_format = context.getProperty(OUTPUT_FORMAT).getValue();
    final String custom_header = context.getProperty(CUSTOM_HEADER).getValue();
    final String column_mask = context.getProperty(COLUMN_MASK).getValue();
    final String column_encrypt = context.getProperty(COLUMN_ENCRYPT).getValue();
    final String column_tokenize = context.getProperty(COLUMN_TOKENIZE).getValue();
    final String tokenize_unique_identifier = context.getProperty(TOKENIZE_UNQIUE_IDENTIFIER).getValue();
    final String tokenized_ouput = context.getProperty(TOKENIZED_OUTPUT).getValue();
    final String encryptionKey = "Bar12345Bar12345";
    final String static_schema = context.getProperty(STATIC_SCHEMA).getValue();

    // new flowfile here
    final org.apache.nifi.util.ObjectHolder<FlowFile> holder = new org.apache.nifi.util.ObjectHolder<>(null);

    flowFile = session.write(flowFile, new StreamCallback() {
        @Override
        public void process(InputStream inputStream, OutputStream outputStream) throws IOException {

            CSVFormat csvFormat = buildFormat(format, delimiter, with_header, custom_header);
            CSVParser csvParser = new CSVParser(new InputStreamReader(inputStream, charset), csvFormat);
            CSVPrinter csvPrinter = new CSVPrinter(new OutputStreamWriter(outputStream, charset), csvFormat);
            String headerArray[];

            ArrayList<String> columnMaskList = new ArrayList<>();
            ArrayList<String> columnEncryptList = new ArrayList<String>();
            ArrayList<String> columnTokenizeList = new ArrayList<String>();

            List<String> maskValueHolder = new LinkedList<>();
            FlowFile tokenized = session.create();

            // print header if needed
            if (custom_header != null && output_format.equals("CSV") && static_schema == null) {
                csvPrinter.printRecord(custom_header);
                headerArray = custom_header.split(",");
            } else if (static_schema != null && custom_header == null) {
                csvPrinter.printRecord(static_schema.replace("\"", ""));
                headerArray = static_schema.split(",");
            } else {
                headerArray = csvParser.getHeaderMap().keySet().toArray(new String[0]);
                csvPrinter.printRecord(headerArray);
            }

            if (column_mask != null) {
                columnMaskList = new ArrayList<>(Arrays.asList(column_mask.replace("\"", "").split(",")));
            }

            if (column_encrypt != null) {
                columnEncryptList = new ArrayList<>(Arrays.asList(column_encrypt.split(",")));
            }

            if (column_tokenize != null) {
                columnTokenizeList = new ArrayList<>(Arrays.asList(column_tokenize.split(",")));
            }

            // loop through records and print
            for (final CSVRecord record : csvParser) {

                Map<String, String> k = record.toMap();

                for (Map.Entry<String, String> konj : k.entrySet()) {
                    //System.out.println(konj.getValue());
                }
                // generate attributes if required per record
                if (create_attributes) {
                    for (int i = 0; i < headerArray.length; i++) {
                        //attributes.put(headerArray[i], record.get(i));
                        attributes.put(headerArray[i] + "." + record.getRecordNumber(), record.get(i));
                    }
                }
                // check masked columns
                if (column_mask != null || column_encrypt != null) {
                    // we have to loop through the header array and match user requested mask columns
                    for (int i = 0; i < headerArray.length; i++) {
                        //System.out.println(headerArray[i] + "." + record.getRecordNumber() + " - " + mask(record.get(i)));

                        if (columnMaskList.contains(headerArray[i])) {
                            // set mask
                            maskValueHolder.add(mask(record.get(i)));

                            // construct tokenization row for external DB store
                            if (columnTokenizeList.contains(headerArray[i])) {
                                final String tokenizedRow;
                                tokenizedRow = tokenizationOut(tokenized_ouput, headerArray[i],
                                        tokenize_unique_identifier, mask(record.get(i)), record.get(i),
                                        Long.toString(record.getRecordNumber()));

                                tokenized = session.append(tokenized, new OutputStreamCallback() {
                                    @Override
                                    public void process(OutputStream outputStream) throws IOException {
                                        outputStream.write(tokenizedRow.getBytes());
                                    }
                                });
                            }
                        } else if (columnEncryptList.contains(headerArray[i])) {
                            // encrypt
                            maskValueHolder.add(new String(Encrypt(record.get(i), encryptionKey), "UTF-8"));
                        } else {
                            // no mask
                            maskValueHolder.add(record.get(i));
                        }
                    }
                    csvPrinter.printRecord(maskValueHolder);
                    // clear mask column holder
                    maskValueHolder.clear();
                } else {
                    // no masking or encryption required, print record
                    switch (output_format) {
                    case "CSV":
                        //csvPrinter.printRecord(record);
                        List<String> items = Arrays.asList(static_schema.split(","));
                        String lastColumn = items.get(items.size() - 1);
                        String test = "";
                        for (String item : items) {
                            if (item != lastColumn) {
                                test += record.get(item) + ",";
                            } else {
                                test += record.get(item);
                            }
                        }

                        csvPrinter.printRecord(test.replace("^\"|\"$", ""));
                        break;
                    case "JSON":
                        String json = new ObjectMapper().writer().withDefaultPrettyPrinter()
                                .writeValueAsString(record.toMap()) + "\n";
                        if (json.length() > 0) {
                            outputStream.write(json.getBytes());
                        }

                        //List<Map<?, ?>> data = readObjectsFromCsv(inputStream);
                        //String adis = writeAsJson(data);
                        //outputStream.write(writeAsJson(data).getBytes());
                        break;
                    case "XML":
                        outputStream.write(new XmlMapper().writeValueAsString(record.toMap()).getBytes());
                        break;
                    }
                }
            }
            csvPrinter.flush();
            csvPrinter.close();
            holder.set(tokenized);
        }
    });

    flowFile = session.putAllAttributes(flowFile, attributes);
    session.transfer(flowFile, RELATIONSHIP_SUCCESS);
    session.transfer(holder.get(), RELATIONSHIP_TOKENIZED);
}

From source file:org.bedework.eventreg.bus.CSVOutputter.java

@Override
public String next() {
    if (!regit.hasNext()) {
        return null;
    }//from w ww.j a v a  2s  . c  o  m

    /*
    ><c:forEach var="reg" items="${regs}" varStatus="loopStatus"><%--
    </c:forEach>
    */
    final List flds = new ArrayList();

    final Registration reg = regit.next();

    final StringBuilder out = new StringBuilder();

    try {
        final CSVPrinter csv = new CSVPrinter(out, CSVFormat.EXCEL);
        flds.add(reg.getEvSummary());
        flds.add(reg.getEvDate());
        flds.add(reg.getEvTime());
        flds.add(reg.getEvLocation());
        flds.add(reg.getRegistrationId());
        flds.add(reg.getAuthid());
        flds.add(reg.getTicketsRequested());
        flds.add(reg.getNumTickets());
        flds.add(reg.getType());
        flds.add(reg.getComment());
        flds.add(reg.getCreated());
        flds.add(reg.getLastmod());

        if (form == null) {
            csv.printRecord(flds.toArray());
            csv.flush();
            csv.close();
            return out.toString();
        }

        final FormFields ff = new FormFields(form.getFields());

        try {
            final Map vals = reg.restoreFormValues();

            for (final FieldDef fd : ff) {
                final Object val = vals.get(fd.getName());

                if (val == null) {
                    flds.add("");
                } else {
                    flds.add(val);
                }
            }
        } catch (final Throwable t) {
            out.append("Exception restoring form values");
        }

        csv.printRecord(flds.toArray());
        csv.flush();
        csv.close();
    } catch (final Throwable t) {
        return "Exception " + t.getLocalizedMessage();
    }

    return out.toString();
}

From source file:org.cast.cwm.admin.CSVDownload.java

/**
 * creates a new resource response based on the request attributes
 *
 * @param attributes current request attributes from client
 * @return resource response for answering request
 *///from  w  w  w .  j ava2s  .  co  m
@Override
protected ResourceResponse newResourceResponse(Attributes attributes) {
    ResourceResponse rr = new ResourceResponse();
    rr.disableCaching();
    rr.setFileName("log.csv");
    rr.setContentDisposition(ContentDisposition.ATTACHMENT);
    rr.setContentType("text/csv");

    if (rr.dataNeedsToBeWritten(attributes)) {
        rr.setWriteCallback(new WriteCallback() {
            @Override
            public void writeData(Attributes attributes) {
                Response response = attributes.getResponse();

                try {
                    CSVPrinter writer = new CSVPrinter(
                            new OutputStreamWriter(response.getOutputStream(), "UTF-8"), CSVFormat.EXCEL);

                    // Write header row
                    for (IDataColumn<E> col : columns) {
                        writer.print(col.getHeaderString());
                    }
                    writer.println();

                    // Write documentation row, if requested
                    if (includeDocumentationRow) {
                        for (IDataColumn<E> col : columns) {
                            if (col instanceof IDocumentedColumn
                                    && ((IDocumentedColumn) col).getDocumentationModel() != null) {
                                writer.print(((IDocumentedColumn) col).getDocumentationModel().getObject());
                            } else {
                                writer.print("");
                            }
                        }
                        writer.println();
                    }

                    // Write Data
                    Iterator<? extends E> it = iteratorProvider.getIterator();
                    while (it.hasNext()) {
                        E e = it.next();
                        for (IDataColumn<E> col : columns) {
                            String columnValue = col.getItemString(new Model<E>(e));
                            if (columnValue == null) {
                                log.warn("Got a null value for {} of item {}", col.getHeaderString(), e);
                                columnValue = "null";
                            }
                            // Clean up text -- CSV file cannot have newlines in it
                            writer.print(columnValue.replaceAll("[\r\n]", " "));
                        }
                        writer.println();
                    }
                    writer.close();

                } catch (UnsupportedEncodingException e) {
                    throw new StringValueConversionException("UTF-8 translation not supported?!", e);
                } catch (IOException e) {
                    throw new WicketRuntimeException("Couldn't write to resource", e);
                }
            }
        });
    }

    return rr;
}

From source file:org.chanthing.csvtool.CSVTrans.java

public static void main(String[] args) throws IOException {
    CSVParser reader = null;//  ww w.j  a va2  s. c o  m
    CSVPrinter writer = null;
    CSVXform xFormer = null;

    if (args.length < 2) {
        System.out.println("Usage: java CSVTrans <src_csv_file> <dest_csv_file>");
        return;
    }

    try {
        reader = new CSVParser(new FileReader(args[0]), srcFormat);
        writer = new CSVPrinter(new FileWriter(args[1]), destFormat);
        xFormer = new CSVXform(destHeaders.size(), xforms);

        writer.printRecord(destHeaders); // Write out headers to destination file

        /*
         *  For each record in the input file
         */
        for (CSVRecord record : reader) {
            List<String> destRecord = xFormer.xform(record);
            writer.printRecord(destRecord);
        }

    } finally {
        if (reader != null) {
            reader.close();
        }
        if (writer != null) {
            writer.close();
        }
    }
}

From source file:org.cloudsimulator.controller.PlacementSimulatorTestRunner.java

protected void exportSimulationResults(String dcName, String testCase,
        ArrayList<SimulationResultWrapper> simulationResultWrappers) {

    Path resultPath = Paths.get(this.autoExportResultPath, dcName);

    if (!Files.exists(resultPath)) {
        File resultFile = new File(resultPath.toString());
        resultFile.mkdirs();/*from   w w  w .  j a v a  2s  .c  o m*/
    }

    ArrayList<PlacementResultExt> resultExts = new ArrayList<PlacementResultExt>();

    Path resultFilePath = Paths.get(resultPath.toString(), testCase + ".csv");

    CSVFormat csvFormat = CSVFormat.EXCEL.withDelimiter(';');

    Object[] header = { "Id", "Reserved OS resource", "Max risk", "Max overprov time", "N runs for GRASP",
            "N simulation", "N VM", "N VM used for keep test consistency", "Iteration time", "Euristic method",
            "Euristic coefficient", "N Used host", "CPU Avg", "Mem Avg", "Overall Avg", "Cpu in range",
            "Mem in range", "Overall in range" };

    FileWriter buffer;
    try {
        buffer = new FileWriter(resultFilePath.toString());
        CSVPrinter csvPrinter = new CSVPrinter(buffer, csvFormat);

        csvPrinter.printRecord(header);
        for (SimulationResultWrapper simulationResultWrapper : simulationResultWrappers) {

            for (PlacementResult result : simulationResultWrapper.getPlacementResults()) {
                List record = new ArrayList();
                record.add(result.getId());
                record.add(simulationResultWrapper.getReservedOsHostResource());
                record.add(simulationResultWrapper.getHostMaxRisk());
                record.add(simulationResultWrapper.getMaxOverprovisioningTime());
                record.add(simulationResultWrapper.getN_runs());
                record.add(simulationResultWrapper.getN_simulation());
                record.add(simulationResultWrapper.getOriginalMachines().size());
                record.add(simulationResultWrapper.getUsedForSimulationMachines().size());
                record.add(result.getIterationTime());
                record.add(simulationResultWrapper.getEuristicMethod());
                record.add(simulationResultWrapper.getEuristicCoeffBuilderMethod());
                record.add(result.getUsedHost().size());
                record.add((float) result.getDataCenterMachinePlacement().getCpu_avg_usage());
                record.add((float) result.getDataCenterMachinePlacement().getMemory_avg_usage());
                record.add((float) result.getDataCenterMachinePlacement().getOverall_avg_usage());
                record.add((float) result.getDataCenterMachinePlacement().getCpu_in_range());
                record.add((float) result.getDataCenterMachinePlacement().getMemory_in_range());
                record.add((float) result.getDataCenterMachinePlacement().getOverall_in_range());

                csvPrinter.printRecord(record);
                csvPrinter.flush();

                //            resultExts.add(new PlacementResultExt(result.getId(),
                //                    simulationResultWrapper.getReservedOsHostResource(),
                //                    simulationResultWrapper.getHostMaxRisk(),
                //                    simulationResultWrapper.getMaxOverprovisioningTime(),
                //                    simulationResultWrapper.getN_runs(),
                //                    simulationResultWrapper.getN_simulation(),
                //                    simulationResultWrapper.getOriginalMachines().size(),
                //                    simulationResultWrapper.getUsedForSimulationMachines().size(),
                //                    result.getIterationTime(),
                //                    simulationResultWrapper.getEuristicMethod(),
                //                    simulationResultWrapper.getEuristicCoeffBuilderMethod(),
                //                    result.getUsedHost().size(),
                //                    (float)result.getDataCenterMachinePlacement().getCpu_avg_usage(),
                //                    (float)result.getDataCenterMachinePlacement().getMemory_avg_usage(),
                //                    (float)result.getDataCenterMachinePlacement().getOverall_avg_usage(),
                //                    (float)result.getDataCenterMachinePlacement().getCpu_in_range(),
                //                    (float)result.getDataCenterMachinePlacement().getMemory_in_range(),
                //                    (float)result.getDataCenterMachinePlacement().getOverall_in_range()));
            }
        }
        csvPrinter.close();

    } catch (IOException e) {
        e.printStackTrace();
    }

    //            for (PlacementResultExt res : resultExts) {
    //                csvPrinter.printRecords(res);    
    //            }
    //            

}

From source file:org.eclipse.sw360.portal.portlets.admin.UserPortlet.java

public void backUpUsers(ResourceRequest request, ResourceResponse response)
        throws PortletException, IOException, SystemException, PortalException {
    List<User> liferayUsers;
    try {/*from w w w. j  ava  2 s . c om*/
        liferayUsers = UserLocalServiceUtil.getUsers(QueryUtil.ALL_POS, QueryUtil.ALL_POS);
    } catch (SystemException e) {
        log.error("Could not get user List from liferay", e);
        liferayUsers = Collections.emptyList();
    }

    final ByteArrayOutputStream outB = new ByteArrayOutputStream();
    Writer out = new BufferedWriter(new OutputStreamWriter(outB));

    CSVPrinter csvPrinter = new CSVPrinter(out, CommonUtils.sw360CsvFormat);

    csvPrinter.printRecord("GivenName", "Lastname", "Email", "Department", "UserGroup", "GID", "isMale",
            "PasswdHash", "wantsMailNotification");
    for (User liferayUser : liferayUsers) {

        String firstName = liferayUser.getFirstName();
        String lastName = liferayUser.getLastName();
        String emailAddress = liferayUser.getEmailAddress();
        List<Organization> organizations = liferayUser.getOrganizations();

        String department = "";

        if (organizations != null && organizations.size() > 0) {
            department = organizations.get(0).getName();
        }

        String gid = liferayUser.getOpenId();
        boolean isMale = liferayUser.isMale();
        String passwordHash = liferayUser.getPassword();
        if (isNullOrEmpty(emailAddress) || isNullOrEmpty(department)) {
            continue;
        }
        org.eclipse.sw360.datahandler.thrift.users.User sw360user = UserCacheHolder
                .getUserFromEmail(emailAddress);
        boolean wantsMailNotification = sw360user.isSetWantsMailNotification() ? sw360user.wantsMailNotification
                : true;
        String userGroup = sw360user.getUserGroup().toString();

        csvPrinter.printRecord(firstName, lastName, emailAddress, department, userGroup, gid, isMale,
                passwordHash, wantsMailNotification);
    }

    csvPrinter.flush();
    csvPrinter.close();

    ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(outB.toByteArray());
    PortletResponseUtil.sendFile(request, response, "Users.csv", byteArrayInputStream, "text/csv");
}

From source file:org.gitia.jdataanalysis.JDataAnalysis.java

public void save(String[][] data, String[] headers, String folder, String fileName) {
    String NEW_LINE_SEPARATOR = "\n";

    FileWriter fileWriter = null;

    CSVPrinter csvFilePrinter = null;

    //Create the CSVFormat object with "\n" as a record delimiter
    CSVFormat csvFileFormat = CSVFormat.DEFAULT.withRecordSeparator(NEW_LINE_SEPARATOR);
    try {//from   w w  w  .j  ava  2s  .  co  m
        //initialize FileWriter object
        File file = new File(folder + "/" + fileName);
        fileWriter = new FileWriter(file);

        //initialize CSVPrinter object 
        csvFilePrinter = new CSVPrinter(fileWriter, csvFileFormat);

        //Create CSV file header
        csvFilePrinter.printRecord(headers);

        //Write a new student object list to the CSV file
        for (int i = 0; i < data.length; i++) {
            //List studentDataRecord = new ArrayList();
            csvFilePrinter.printRecord(data[i]);
        }
        System.out.println("CSV file was created successfully !!!");
        System.out.println(folder + "/" + fileName);

    } catch (Exception e) {
        System.out.println("Error in CsvFileWriter !!!");
        e.printStackTrace();
    } finally {
        try {
            fileWriter.flush();
            fileWriter.close();
            csvFilePrinter.close();
        } catch (IOException e) {
            System.out.println("Error while flushing/closing fileWriter/csvPrinter !!!");
            e.printStackTrace();
        }
    }
}