Example usage for org.apache.commons.csv CSVParser parse

List of usage examples for org.apache.commons.csv CSVParser parse

Introduction

In this page you can find the example usage for org.apache.commons.csv CSVParser parse.

Prototype

public static CSVParser parse(final String string, final CSVFormat format) throws IOException 

Source Link

Document

Creates a parser for the given String .

Usage

From source file:com.team3637.service.TeamServiceMySQLImpl.java

@Override
public void importCSV(String inputFile) {
    try {/*  w ww  . j  a va 2  s  . co m*/
        String csvData = new String(Files.readAllBytes(FileSystems.getDefault().getPath(inputFile)));
        csvData = csvData.replaceAll("\\r", "");
        CSVParser parser = CSVParser.parse(csvData, CSVFormat.DEFAULT.withRecordSeparator("\n"));
        for (CSVRecord record : parser) {
            Team team = new Team();
            team.setId(Integer.parseInt(record.get(0)));
            team.setTeam(Integer.parseInt(record.get(1)));
            team.setAvgscore(Double.parseDouble(record.get(2)));
            team.setMatches(Integer.parseInt(record.get(3)));
            String[] tags = record.get(4).substring(1, record.get(4).length() - 1).split(",");
            for (int i = 0; i < tags.length; i++)
                tags[i] = tags[i].trim();
            if (tags.length > 0 && !tags[0].equals(""))
                team.setTags(Arrays.asList(tags));
            else
                team.setTags(new ArrayList<String>());
            if (checkForTeam(team.getTeam()))
                update(team);
            else
                create(team);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.team3637.service.MatchServiceMySQLImpl.java

@Override
public void importCSV(String inputFile) {
    try {/*from w  ww.j a  va  2s  .c  o m*/
        String csvData = new String(Files.readAllBytes(FileSystems.getDefault().getPath(inputFile)));
        csvData = csvData.replaceAll("\\r", "");
        CSVParser parser = CSVParser.parse(csvData, CSVFormat.DEFAULT.withRecordSeparator("\n"));
        for (CSVRecord record : parser) {
            Match match = new Match();
            match.setId(Integer.parseInt(record.get(0)));
            match.setMatchNum(Integer.parseInt(record.get(1)));
            match.setTeam(Integer.parseInt(record.get(2)));
            match.setScore(Integer.parseInt(record.get(3)));
            String[] tags = record.get(4).substring(1, record.get(4).length() - 1).split(",");
            for (int i = 0; i < tags.length; i++)
                tags[i] = tags[i].trim();
            if (tags.length > 0 && !tags[0].equals(""))
                match.setTags(Arrays.asList(tags));
            else
                match.setTags(new ArrayList<String>());
            if (checkForMatch(match.getMatchNum(), match.getTeam()))
                update(match);
            else
                create(match);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:it.newfammulfin.api.EntryResource.java

@POST
@Consumes("text/csv")
@Produces(MediaType.TEXT_PLAIN)/* w w w  . ja v a  2s.  c  o m*/
public Response importFromCsv(String csvData,
        @DefaultValue("false") @QueryParam("invertSign") final boolean invertSign) {
    final Group group = (Group) requestContext.getProperty(GroupRetrieverRequestFilter.GROUP);
    final Map<String, Key<Chapter>> chapterStringsMap = new HashMap<>();
    final List<CSVRecord> records;
    try {
        records = CSVParser.parse(csvData, CSVFormat.DEFAULT.withHeader()).getRecords();
    } catch (IOException e) {
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
                .entity(String.format("Unexpected %s: %s.", e.getClass().getSimpleName(), e.getMessage()))
                .build();
    }
    //check users
    final Set<String> userIds = new HashSet<>();
    for (String columnName : records.get(0).toMap().keySet()) {
        if (columnName.startsWith("by:")) {
            String userId = columnName.replaceFirst("by:", "");
            if (!group.getUsersMap().keySet().contains(Key.create(RegisteredUser.class, userId))) {
                return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
                        .entity(String.format("User %s not found in this group.", userId)).build();
            }
            userIds.add(userId);
        }
    }
    //build chapters
    final Set<String> chapterStringsSet = new HashSet<>();
    for (CSVRecord record : records) {
        chapterStringsSet.add(record.get("chapters"));
    }
    final List<Key<?>> createdKeys = new ArrayList<>();
    try {
        OfyService.ofy().transact(new Work<List<Key<?>>>() {
            @Override
            public List<Key<?>> run() {
                for (String chapterStrings : chapterStringsSet) {
                    List<String> pieces = Arrays.asList(chapterStrings.split(CSV_CHAPTERS_SEPARATOR));
                    Key<Chapter> parentChapterKey = null;
                    for (int i = 0; i < pieces.size(); i++) {
                        String partialChapterString = Joiner.on(CSV_CHAPTERS_SEPARATOR)
                                .join(pieces.subList(0, i + 1));
                        Key<Chapter> chapterKey = chapterStringsMap.get(partialChapterString);
                        if (chapterKey == null) {
                            chapterKey = OfyService.ofy().load().type(Chapter.class).ancestor(group)
                                    .filter("name", pieces.get(i)).filter("parentChapterKey", parentChapterKey)
                                    .keys().first().now();
                            chapterStringsMap.put(partialChapterString, chapterKey);
                        }
                        if (chapterKey == null) {
                            Chapter chapter = new Chapter(pieces.get(i), Key.create(group), parentChapterKey);
                            OfyService.ofy().save().entity(chapter).now();
                            chapterKey = Key.create(chapter);
                            createdKeys.add(chapterKey);
                            LOG.info(String.format("%s created.", chapter));
                        }
                        chapterStringsMap.put(partialChapterString, chapterKey);
                        parentChapterKey = chapterKey;
                    }
                }
                //build entries
                DateTimeFormatter formatter = DateTimeFormat.forPattern("dd/MM/YY");
                Key<Group> groupKey = Key.create(group);
                for (CSVRecord record : records) {
                    Entry entry = new Entry();
                    entry.setGroupKey(groupKey);
                    entry.setDate(LocalDate.parse(record.get("date"), formatter));
                    entry.setAmount(Money.of(CurrencyUnit.of(record.get("currency").toUpperCase()),
                            (invertSign ? -1 : 1) * Double.parseDouble(record.get("value"))));
                    if (!record.get("chapters").isEmpty()) {
                        entry.setChapterKey(chapterStringsMap.get(record.get("chapters")));
                    }
                    entry.setPayee(record.get("payee"));
                    for (String tag : record.get("tags").split(CSV_TAGS_SEPARATOR)) {
                        if (!tag.trim().isEmpty()) {
                            entry.getTags().add(tag);
                        }
                    }
                    entry.setDescription(record.get("description"));
                    entry.setNote(record.get("notes"));
                    int scale = Math.max(DEFAULT_SHARE_SCALE, entry.getAmount().getScale());
                    //by shares
                    for (String userId : userIds) {
                        String share = record.get("by:" + userId);
                        double value;
                        if (share.contains("%")) {
                            entry.setByPercentage(true);
                            value = Double.parseDouble(share.replace("%", ""));
                            value = entry.getAmount().getAmount().doubleValue() * value / 100d;
                        } else {
                            value = (invertSign ? -1 : 1) * Double.parseDouble(share);
                        }
                        entry.getByShares().put(Key.create(RegisteredUser.class, userId),
                                BigDecimal.valueOf(value).setScale(scale, RoundingMode.DOWN));
                    }
                    boolean equalByShares = checkAndBalanceZeroShares(entry.getByShares(),
                            entry.getAmount().getAmount());
                    entry.setByPercentage(entry.isByPercentage() || equalByShares);
                    //for shares
                    for (String userId : userIds) {
                        String share = record.get("for:" + userId);
                        double value;
                        if (share.contains("%")) {
                            entry.setForPercentage(true);
                            value = Double.parseDouble(share.replace("%", ""));
                            value = entry.getAmount().getAmount().doubleValue() * value / 100d;
                        } else {
                            value = (invertSign ? -1 : 1) * Double.parseDouble(share);
                        }
                        entry.getForShares().put(Key.create(RegisteredUser.class, userId),
                                BigDecimal.valueOf(value).setScale(scale, RoundingMode.DOWN));
                    }
                    boolean equalForShares = checkAndBalanceZeroShares(entry.getForShares(),
                            entry.getAmount().getAmount());
                    entry.setForPercentage(entry.isForPercentage() || equalForShares);
                    OfyService.ofy().save().entity(entry).now();
                    createdKeys.add(Key.create(entry));
                    EntryOperation operation = new EntryOperation(Key.create(group), Key.create(entry),
                            new Date(),
                            Key.create(RegisteredUser.class, securityContext.getUserPrincipal().getName()),
                            EntryOperation.Type.IMPORT);
                    OfyService.ofy().save().entity(operation).now();
                    LOG.info(String.format("%s created.", entry));
                }
                return createdKeys;
            }
        });
        //count keys
        int numberOfCreatedChapters = 0;
        int numberOfCreatedEntries = 0;
        for (Key<?> key : createdKeys) {
            if (key.getKind().equals(Entry.class.getSimpleName())) {
                numberOfCreatedEntries = numberOfCreatedEntries + 1;
            } else if (key.getKind().equals(Chapter.class.getSimpleName())) {
                numberOfCreatedChapters = numberOfCreatedChapters + 1;
            }
        }
        return Response.ok(String.format("Done: %d chapters and %d entries created.", numberOfCreatedChapters,
                numberOfCreatedEntries)).build();
    } catch (RuntimeException e) {
        LOG.warning(String.format("Unexpected %s: %s.", e.getClass().getSimpleName(), e.getMessage()));
        return Response.status(Response.Status.INTERNAL_SERVER_ERROR)
                .entity(String.format("Unexpected %s: %s.", e.getClass().getSimpleName(), e.getMessage()))
                .build();
    }
}

From source file:com.team3637.service.TagServiceMySQLImpl.java

@Override
public void importCSV(String inputFile) {
    try {//from   w  w  w .j  a v  a 2 s  .  c o  m
        String csvData = new String(Files.readAllBytes(FileSystems.getDefault().getPath(inputFile)));
        csvData = csvData.replaceAll("\\r", "");
        CSVParser parser = CSVParser.parse(csvData, CSVFormat.DEFAULT.withRecordSeparator("\n"));
        for (CSVRecord record : parser) {
            Tag tag = new Tag();
            tag.setId(Integer.parseInt(record.get(0)));
            tag.setTag(record.get(1));
            tag.setType(record.get(2));
            tag.setCounter(record.get(3));
            tag.setInTable(record.get(4).equals("1") || record.get(4).toLowerCase().equals("true"));
            tag.setRequiesEval(record.get(5).equals("1") || record.get(5).toLowerCase().equals("true"));
            tag.setExpression(record.get(6).replace("\n", ""));
            if (checkForTag(tag))
                update(tag);
            else
                create(tag);
        }
    } catch (IOException e) {
        e.printStackTrace();
    }
}

From source file:com.datascience.cascading.scheme.CsvScheme.java

/**
 * Reads the header record from the source file.
 *///from  www.  j  av a 2  s  .  c  o  m
@SuppressWarnings("unchecked")
private CSVRecord getHeaderRecord(FlowProcess<JobConf> flowProcess, Tap tap) {
    Tap textLine = new Hfs(new TextLine(new Fields("line")),
            tap.getFullIdentifier(flowProcess.getConfigCopy()));

    try (TupleEntryIterator iterator = textLine.openForRead(flowProcess)) {
        String line = iterator.next().getTuple().getString(0);
        boolean skipHeaderRecord = format.getSkipHeaderRecord();
        CSVRecord headerRecord = CSVParser.parse(line, format.withSkipHeaderRecord(false)).iterator().next();
        format.withSkipHeaderRecord(skipHeaderRecord);
        return headerRecord;
    } catch (IOException e) {
        throw new TapException(e);
    }
}

From source file:org.ag.es.etl.IndexObjBuilder.java

public static Index buildIndexFromCSVEntry(String csv_str, String[] time_fields, String[] event_attributes,
        String index, String type, Optional<String> id_field) {
    try {/*from   w  ww  .  j  ava 2s .  co m*/
        CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(event_attributes).withDelimiter(';');
        Map<String, String> csv_as_map = ((CSVRecord) CSVParser.parse(csv_str, csvFileFormat).getRecords()
                .get(0)).toMap();
        for (String time_field : time_fields) {
            csv_as_map.put(time_field,
                    DateFormatExtractor.buildDateTime(csv_as_map.get(time_field)).toString());
        }
        if (id_field.isPresent()) {
            return new Index.Builder(csv_as_map).index(index).type(type).id(id_field.get()).build();
        } else {
            return new Index.Builder(csv_as_map).index(index).type(type).build();
        }
    } catch (Exception e) {
        return null;
    }
}

From source file:org.ag.es.etl.IndexObjBuilder.java

public static Index buildIndexFromCSVEntry(String csv_str, String[] event_attributes, String index_field,
        String type_field, Optional<String> id_field, boolean drop_meta) {
    try {//from   w  w w .j a v  a 2 s.  c om
        CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(event_attributes).withDelimiter(';');
        Map<String, String> csv_as_map = ((CSVRecord) CSVParser.parse(csv_str, csvFileFormat).getRecords()
                .get(0)).toMap();
        String index = csv_as_map.get(index_field);
        String type = csv_as_map.get(type_field);
        String id = id_field.isPresent() ? csv_as_map.get(id_field.get()) : null;
        if (drop_meta) {
            csv_as_map.remove(index_field);
            csv_as_map.remove(type_field);
            if (id_field.isPresent()) {
                csv_as_map.remove(id_field.get());
            }
        }
        if (id_field.isPresent()) {
            return new Index.Builder(csv_as_map).index(index).type(type).id(id).build();
        } else {
            return new Index.Builder(csv_as_map).index(index).type(type).build();
        }
    } catch (Exception e) {
        return null;
    }
}

From source file:org.apache.phoenix.util.csv.CsvUpsertExecutorTest.java

private CSVRecord createCsvRecord(String... columnValues) throws IOException {
    String inputRecord = Joiner.on(',').join(columnValues);
    return Iterables.getFirst(CSVParser.parse(inputRecord, CSVFormat.DEFAULT), null);
}

From source file:org.apache.storm.sql.runtime.serde.csv.CsvScheme.java

@Override
public List<Object> deserialize(ByteBuffer ser) {
    try {/*  w  ww  .  ja v  a 2 s  .c  om*/
        String data = new String(Utils.toByteArray(ser), StandardCharsets.UTF_8);
        CSVParser parser = CSVParser.parse(data, CSVFormat.RFC4180);
        CSVRecord record = parser.getRecords().get(0);
        Preconditions.checkArgument(record.size() == fieldNames.size(), "Invalid schema");

        ArrayList<Object> list = new ArrayList<>(fieldNames.size());
        for (int i = 0; i < record.size(); i++) {
            list.add(record.get(i));
        }
        return list;
    } catch (IOException e) {
        throw new RuntimeException(e);
    }
}

From source file:org.ecloudmanager.monitoring.HaproxyStatsCollector.java

private void collectStats(String haproxyStatsAddr) {
    client.target("http://" + haproxyStatsAddr + ":22002" + "/;csv").request().async()
            .get(new InvocationCallback<Response>() {
                @Override/*from w  ww  .j  av  a 2 s. c  o  m*/
                public void completed(Response response) {
                    if (response.getStatus() == 200) {
                        String csv = response.readEntity(String.class);
                        csv = csv.replaceFirst("# ", "");
                        List<CSVRecord> records = null;
                        try {
                            records = CSVParser.parse(csv, CSVFormat.DEFAULT.withHeader()).getRecords();
                            collectRecords(haproxyStatsAddr, records);
                        } catch (IOException e) {
                            log.error(e);
                        }
                    } else {
                        log.error("Cannot connect to haproxy stats endpoint " + haproxyStatsAddr
                                + " response code " + response.getStatus());
                    }
                }

                @Override
                public void failed(Throwable throwable) {
                    log.trace("Can't get haproxy stats from " + haproxyStatsAddr, throwable);
                }
            });
}