Example usage for org.apache.commons.csv CSVRecord size

List of usage examples for org.apache.commons.csv CSVRecord size

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVRecord size.

Prototype

public int size() 

Source Link

Document

Returns the number of values in this record.

Usage

From source file:com.github.jferard.pgloaderutils.sniffer.csd.CSDValidatorHelper.java

/**
 * @param result the result that will hold the errors
 * @param fields the schema / schema pattern
 * @param firstRecord the first record of the stream
 * @return the error count, -1 the schema has too many fields
 *//*from  w  w  w .  j  av a 2s .c o  m*/
public int validateHeader(CSDValidationResult<F> result, SizedIterable<F> fields, CSVRecord firstRecord) {
    int headerErrorCount = 0;
    if (firstRecord.size() < fields.size()) {
        result.schemaHasTooManyFieldsForHeader(firstRecord);
        return -1;
    }

    int j = 0;
    for (F field : fields) {
        String value = firstRecord.get(j++);
        if (!matcher.match(field, value)) {
            result.incorrectColumnName(field, value);
            headerErrorCount++;
        }
    }
    return headerErrorCount;
}

From source file:biz.ganttproject.impex.csv.RecordGroup.java

boolean process(CSVRecord record) {
    assert record.size() > 0;
    boolean allEmpty = true;
    for (Iterator<String> it = record.iterator(); it.hasNext();) {
        if (!Strings.isNullOrEmpty(it.next())) {
            allEmpty = false;//from  w  w  w  .  j a  va  2  s  .co m
            break;
        }
    }
    if (allEmpty) {
        return false;
    }
    try {
        return doProcess(record);
    } catch (Throwable e) {
        GPLogger.getLogger(GanttCSVOpen.class).log(Level.WARNING,
                String.format("Failed to process record:\n%s", record), e);
        return false;
    }
}

From source file:com.awesheet.managers.CSVManager.java

/**
 * Imports a Sheet from a CSV file in the specified path.
 * @param path a CSV File Path.//from w w  w  . j  ava2s  .c o  m
 * @return a new Sheet or null if parsing failed
 */
public Sheet importSheet(String path) {
    File csvData = new File(path);

    // Parse the CSV file.
    CSVParser parser;

    try {
        parser = CSVParser.parse(csvData, Charset.defaultCharset(), CSVFormat.RFC4180);
    } catch (IOException e) {
        return null;
    }

    // Create our new sheet.
    Sheet sheet = new Sheet("Imported Sheet");

    // Populate its cells.
    for (CSVRecord record : parser) {
        for (int x = 0; x < record.size(); ++x) {
            sheet.setCellValue(x, (int) record.getRecordNumber() - 1, record.get(x), true);
        }
    }

    return sheet;
}

From source file:com.github.jferard.pgloaderutils.sniffer.csd.CSDSchemaPattern.java

private void addFields(CSDFieldFactory<F> factory, List<F> newFields, F field, CSVRecord firstRecord,
        int begin) {
    if (firstRecord == null)
        return;/*from   w w w  .  ja  va  2 s .c om*/

    for (int i = begin; i < firstRecord.size(); i++) {
        String name = firstRecord.get(i);
        newFields.add(factory.create(field.getType(), name, name, true));
    }
}

From source file:MasterRoomControllerFx.rooms.charts.RoomChartController.java

public void getHumData() {
    try {//w  ww .j a v a 2s  . c  o m
        File csvData = new File("humHistory" + roomRow + roomColumn + ".csv");
        if (csvData.exists()) {
            CSVParser parser = CSVParser.parse(csvData, StandardCharsets.UTF_8,
                    CSVFormat.EXCEL.withDelimiter(';'));
            for (CSVRecord csvRecord : parser) {
                for (int i = 0; i < csvRecord.size() - 1; i++) {
                    hum.add(Float.parseFloat(csvRecord.get(i)));
                }
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(RoomChartController.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:MasterRoomControllerFx.rooms.charts.RoomChartController.java

public void getTempData() {
    try {//from ww  w.  j  a  v a2s. c  om
        File csvData = new File("tempHistory" + roomRow + roomColumn + ".csv");
        if (csvData.exists()) {
            CSVParser parser = CSVParser.parse(csvData, StandardCharsets.UTF_8,
                    CSVFormat.EXCEL.withDelimiter(';'));
            for (CSVRecord csvRecord : parser) {
                for (int i = 0; i < csvRecord.size(); i++) {
                    if (i == 0)
                        temp.add(Float.parseFloat(csvRecord.get(i)));
                    if (i == 1)
                        time.add(csvRecord.get(i));
                }
            }
        }
    } catch (IOException ex) {
        Logger.getLogger(RoomChartController.class.getName()).log(Level.SEVERE, null, ex);
    }
}

From source file:br.edimarmanica.trinity.intrasitemapping.manual.Mapping.java

private List<Map<String, String>> readOffset(File offsetFile) {
    List<Map<String, String>> offset = new ArrayList<>(); //cada arquivo  um offset

    try (Reader in = new FileReader(offsetFile)) {
        try (CSVParser parser = new CSVParser(in, CSVFormat.EXCEL)) {
            int nrRegistro = 0;
            for (CSVRecord record : parser) {

                for (int nrRegra = 0; nrRegra < record.size(); nrRegra++) {
                    String value;
                    try {
                        value = Formatter.formatValue(Preprocessing.filter(record.get(nrRegra)));
                    } catch (InvalidValue ex) {
                        value = "";
                    }//from  ww  w  . ja  v  a 2s. c om

                    if (nrRegistro == 0) {
                        Map<String, String> regra = new HashMap<>();
                        regra.put(Formatter.formatURL(record.get(0)), value);
                        offset.add(regra);
                    } else {
                        offset.get(nrRegra).put(Formatter.formatURL(record.get(0)), value);
                    }
                }
                nrRegistro++;
            }
        }
    } catch (FileNotFoundException ex) {
        Logger.getLogger(Mapping.class.getName()).log(Level.SEVERE, null, ex);
    } catch (IOException ex) {
        Logger.getLogger(Mapping.class.getName()).log(Level.SEVERE, null, ex);
    }

    return offset;
}

From source file:com.github.jferard.pgloaderutils.sniffer.csv.CSVOptionalHeaderSniffer.java

@Override
public void sniff(final InputStream inputStream, final int size) throws IOException {
    final Reader streamReader = new InputStreamReader(inputStream, this.charset);

    final CSVParser parser = new CSVParser(streamReader, this.csvFormat);
    try {//from   w  w w.  j a  v  a 2 s.c  om
        final Iterator<CSVRecord> iterator = parser.iterator();

        if (iterator.hasNext()) {
            final CSVRecord firstRowRecord = iterator.next();
            final int firstRowSize = firstRowRecord.size();

            final char[] firstRowSignature = this.rowSignaturesAnalyzer.getSignature(firstRowRecord,
                    firstRowSize);

            if (this.containsAtLeastOneOnlyDigitsValue(firstRowSignature)) {
                this.header = null;
            } else {
                final char[] remainingRowsSignature = this.rowSignaturesAnalyzer
                        .getRemainingRowsSignature(iterator, firstRowSize);
                if (this.containsAtLeastOneColumnWithLetterHeaderAndDigitValues(firstRowSignature,
                        remainingRowsSignature, firstRowSize)) {
                    // copy firstRow in header
                    for (final String s : firstRowRecord)
                        this.header.add(s);
                }
            }
        } else
            this.header = null;
    } finally {
        parser.close();
    }
}

From source file:com.streamsets.pipeline.lib.csv.CsvParser.java

private String[] toArray(CSVRecord record) {
    String[] array = (record == null) ? null : new String[record.size()];
    if (array != null) {
        for (int i = 0; i < record.size(); i++) {
            array[i] = record.get(i);//from   w  ww  .j av a 2  s .c  om
        }
    }
    return array;
}

From source file:co.cask.hydrator.transforms.ParseCSV.java

@Override
public void transform(StructuredRecord in, Emitter<StructuredRecord> emitter) throws Exception {
    // Field has to string to be parsed correctly. For others throw an exception.
    String body = in.get(config.field);

    // Parse the text as CSV and emit it as structured record.
    try {//from www . j  a va2 s .c om
        CSVParser parser = CSVParser.parse(body, csvFormat);
        List<CSVRecord> records = parser.getRecords();
        for (CSVRecord record : records) {
            if (fields.size() == record.size()) {
                StructuredRecord sRecord = createStructuredRecord(record);
                emitter.emit(sRecord);
            } else {
                LOG.warn("Skipping record as ouput schema specified has '{}' fields, while CSV record has '{}'",
                        fields.size(), record.size());
                // Write the record to error Dataset.
            }
        }
    } catch (IOException e) {
        LOG.error("There was a issue parsing the record. ", e.getLocalizedMessage());
    }
}