List of usage examples for org.apache.commons.csv CSVParser CSVParser
public CSVParser(final Reader reader, final CSVFormat format) throws IOException
If you do not read all records from the given reader , you should call #close() on the parser, unless you close the reader .
From source file:org.opennms.netmgt.jasper.analytics.HWForecastReportTest.java
private void verify() throws Exception { Table<Integer, String, Double> forecasts = TreeBasedTable.create(); try (FileReader reader = new FileReader(m_csvFile); CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader());) { int k = 0; for (CSVRecord record : parser) { try { Double fit = Double.parseDouble(record.get("HWFit")); Double lwr = Double.parseDouble(record.get("HWLwr")); Double upr = Double.parseDouble(record.get("HWUpr")); if (Double.isNaN(fit)) { continue; }/*from w w w. ja v a 2 s .c o m*/ forecasts.put(k, "fit", fit); forecasts.put(k, "lwr", lwr); forecasts.put(k, "upr", upr); k++; } catch (NumberFormatException e) { // pass } } } assertEquals(340, forecasts.rowKeySet().size()); // First fitted value assertEquals(432.526086422424, forecasts.get(0, "fit"), 0.00001); // Last fitted value for which there is a known datapoint assertEquals(24079.4692522087, forecasts.get(327, "fit"), 0.00001); // First forecasted value assertEquals(22245.5417010936, forecasts.get(328, "fit"), 0.00001); }
From source file:org.opennms.netmgt.jasper.measurement.MeasurementQueryExecutorRemoteIT.java
@Test public void testReportHwForecast() throws IOException, JRException { createReport("Forecast", new ReportFiller() { @Override/*from w ww . j a v a 2s . co m*/ public void fill(Map<String, Object> params) throws Exception { params.put(JRParameter.IS_IGNORE_PAGINATION, true); params.put("MEASUREMENT_URL", "http://localhost:9999/opennms/rest/measurements"); params.put("dsName", "ifInOctets"); params.put("startDate", "1414602000000"); params.put("endDate", "1417046400000"); } }); // Verify the results of the generated report Table<Integer, String, Double> forecasts = TreeBasedTable.create(); FileReader reader = new FileReader(createFileName("Forecast", "csv")); CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader()); int k = 0; for (CSVRecord record : parser) { try { Double fit = Double.parseDouble(record.get("HWFit")); Double lwr = Double.parseDouble(record.get("HWLwr")); Double upr = Double.parseDouble(record.get("HWUpr")); if (Double.isNaN(fit)) { continue; } forecasts.put(k, "fit", fit); forecasts.put(k, "lwr", lwr); forecasts.put(k, "upr", upr); k++; } catch (NumberFormatException e) { // pass } } Assert.assertEquals(340, forecasts.rowKeySet().size()); // First fitted value Assert.assertEquals(432.526086422424, forecasts.get(0, "fit"), 0.00001); // Last fitted value for which there is a known data point Assert.assertEquals(24079.4692522087, forecasts.get(327, "fit"), 0.00001); // First forecasted value Assert.assertEquals(22245.5417010936, forecasts.get(328, "fit"), 0.00001); }
From source file:org.pad.pgsql.loadmovies.LoadFiles.java
/** * Load movies from csv file and save them in DB. * * @throws Exception/*from w w w .j a v a 2s . c om*/ */ private static void loadMoviesAndLinks() throws Exception { MovieDao movieDao = new MovieDao(DS); Map<Integer, Integer[]> moviesLinks = new HashMap<>(); //Loads all links informations in memory to enrich afterwards movies CSVParser parser = new CSVParser(new FileReader("C:\\PRIVE\\SRC\\ml-20m\\links.csv"), CSVFormat.EXCEL.withHeader()); for (CSVRecord link : parser) { Integer movieId = Integer.parseInt(link.get("movieId")); if (keepId(movieId)) { System.out.println("Parsing line " + link.toString()); Integer[] otherIds = new Integer[2]; otherIds[0] = Integer.parseInt(link.get("imdbId")); if (StringUtils.isNoneEmpty(link.get("tmdbId"))) { otherIds[1] = Integer.parseInt(link.get("tmdbId")); } moviesLinks.put(movieId, otherIds); } } //Read movie file final Reader reader = new FileReader("C:\\PRIVE\\SRC\\ml-20m\\movies.csv"); parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); for (CSVRecord record : parser) { //build a movie object from record Integer movieId = Integer.parseInt(record.get("movieId")); if (keepId(movieId)) { String title = record.get("title"); String genres = record.get("genres"); //Splitting title to extract the date String movieDate = StringUtils.substringBeforeLast(StringUtils.substringAfterLast(title, "("), ")"); String movieName = null; if (StringUtils.isNumeric(movieDate)) { movieName = StringUtils.substringBeforeLast(title, "("); } else { movieName = title; movieDate = null; } System.out.println(movieName + " - " + movieDate); Movie movieToAdd = new Movie(movieId, movieName, movieDate); //Enrich movie with links Integer[] additionalIds = moviesLinks.get(movieId); if (additionalIds != null) { movieToAdd.setImdbId(additionalIds[0]); movieToAdd.setTmdbId(additionalIds[1]); } //Save in database movieDao.save(movieToAdd); } } }
From source file:org.pad.pgsql.loadmovies.LoadFiles.java
/** * Read tags and load them in a multivalue map. * * @return MultivalueMap with key movieId and values all tags. * @throws Exception it is only a demo./* w w w . j a v a2 s. c o m*/ */ private static LinkedMultiValueMap<Integer, Tag> readTags() throws Exception { TagDao tagDao = new TagDao(DS); LinkedMultiValueMap<Integer, Tag> tags = new LinkedMultiValueMap(); final Reader reader = new FileReader("C:\\PRIVE\\SRC\\ml-20m\\tags.csv"); CSVParser parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); for (CSVRecord record : parser) { Integer movieId = Integer.parseInt(record.get("movieId")); Integer userId = Integer.parseInt(record.get("userId")); if (keepId(movieId) && keepId(userId)) { //CSV Header : userId,movieId,tag,timestamp Tag newTag = new Tag(); newTag.setUserId(userId); newTag.setMovieId(movieId); newTag.setTag(record.get("tag")); newTag.setDate(new Date(Long.parseLong(record.get("timestamp")) * 1000)); //Adding to map for json loading tags.add(newTag.getMovieId(), newTag); //Saving in tag table //tagDao.save(newTag); } } return tags; }
From source file:org.pad.pgsql.loadmovies.LoadFiles.java
/** * Load ratings and enrich movies with tags informations before updating the related movie. * * @throws Exception//from w ww . j a v a2 s. c o m */ private static void loadRatings() throws Exception { //MultivalueMap with key movieId and values all tags LinkedMultiValueMap<Integer, Tag> tags = readTags(); //MultivalueMap with key movieId and values all ratings LinkedMultiValueMap<Integer, Rating> ratings = new LinkedMultiValueMap(); //"userId,movieId,rating,timestamp final Reader reader = new FileReader("C:\\PRIVE\\SRC\\ml-20m\\ratings.csv"); CSVParser parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); RatingDao ratingDao = new RatingDao(DS); for (CSVRecord record : parser) { Integer movieId = Integer.parseInt(record.get("movieId")); Integer userId = Integer.parseInt(record.get("userId")); if (keepId(movieId) && keepId(userId)) { //Building a rating object. Rating rating = new Rating(); rating.setUserId(userId); rating.setMovieId(movieId); rating.setRating(Float.parseFloat(record.get("rating"))); rating.setDate(new Date(Long.parseLong(record.get("timestamp")) * 1000)); //Add for json saving ratings.add(rating.getMovieId(), rating); //traditional saving //ratingDao.save(rating); } } MovieDaoJSON movieDaoJSON = new MovieDaoJSON(DS); ratings.entrySet().stream().forEach((integerListEntry -> { //Building other information objects OtherInformations otherInformations = new OtherInformations(); List ratingList = integerListEntry.getValue(); otherInformations.setRatings(ratingList.subList(0, Math.min(10, ratingList.size()))); otherInformations.computeMean(); //Retrieve tags from the movieId otherInformations.setTags(tags.get(integerListEntry.getKey())); try { movieDaoJSON.addOtherInformationsToMovie(integerListEntry.getKey(), otherInformations); } catch (JsonProcessingException e) { e.printStackTrace(); } })); }
From source file:org.qamatic.mintleaf.readers.CsvReader.java
protected CSVParser getCSVParser() throws IOException { return new CSVParser(afileReader, CSVFormat.EXCEL.withHeader().withIgnoreEmptyLines()); }
From source file:org.qamatic.mintleaf.readers.CsvRowListWrapper.java
protected CSVParser getCSVParser() throws MintleafException { if (parser == null) { try {//from w w w .j a v a 2s .c o m parser = new CSVParser(afileReader, CSVFormat.EXCEL.withHeader().withIgnoreEmptyLines()); } catch (IOException e) { throw new MintleafException(e); } } return parser; }
From source file:org.shareok.data.documentProcessor.CsvHandler.java
/** * Reads out the data in an excel file and stores data in a hashmap * <p>Also sets the total record number and file heading</p> * /* w w w . ja v a2 s . com*/ * @throws Exception */ @Override public void readData() { FileReader fileReader = null; CSVParser csvFileParser = null; String[] headMapping = null; //CSVFormat csvFileFormat = CSVFormat.DEFAULT.withHeader(FILE_HEADER_MAPPING); try { //initialize FileReader object fileReader = new FileReader(fileName); //initialize CSVParser object if (null == csvFormat) { csvFormat = CSVFormat.DEFAULT; } csvFileParser = new CSVParser(fileReader, csvFormat); //Get a list of CSV file records List csvRecords = csvFileParser.getRecords(); int size = csvRecords.size(); setRecordCount(size); data = new HashMap(); //Read the CSV file records starting from the second record to skip the header for (int i = 0; i < size; i++) { CSVRecord record = (CSVRecord) csvRecords.get(i); if (null != record) { if (i == 0) { List headMappingList = new ArrayList(); Iterator it = record.iterator(); while (it.hasNext()) { String value = (String) it.next(); headMappingList.add(value); } headMapping = new String[headMappingList.size()]; headMapping = (String[]) headMappingList.toArray(headMapping); setFileHeadMapping(headMapping); } else { for (int j = 0; j < fileHeadMapping.length; j++) { String colName = fileHeadMapping[j].trim(); String key = colName + "-" + i; data.put(key, record.get(j)); } } } } } catch (Exception e) { System.out.println("Error in CsvFileReader !!!"); e.printStackTrace(); } finally { try { fileReader.close(); csvFileParser.close(); } catch (IOException e) { System.out.println("Error while closing fileReader/csvFileParser !!!"); e.printStackTrace(); } } }
From source file:org.slc.sli.sample.transform.CcsCsvReader.java
void load() throws IOException { File file = new File(fileLocation); file = removeEmptyLinesFromCsv(file); if (containsCopyright) { copyright = removeTrailingCharacters(tail(file), ','); file = removeLastLine(file);/*from w w w. j a va 2 s. c om*/ } InputStreamReader isReader = new InputStreamReader(new FileInputStream(file), "UTF-8"); csvParser = new CSVParser(isReader, CSVStrategy.EXCEL_STRATEGY); firstLine = csvParser.getLine(); getNextRecord(); }
From source file:org.softinica.maven.jmeter.report.parser.CSVReportParser.java
@Override public Input parseInput(InputDefinition definition) { CSVParser parser = null;//from ww w .ja v a 2 s . c o m Input input = new Input(); try { Reader reader = new InputStreamReader(new FileInputStream(definition.getInputFile())); parser = new CSVParser(reader, CSVFormat.DEFAULT); Iterator<CSVRecord> it = parser.iterator(); while (it.hasNext()) { Sample sample = new Sample(); CSVRecord record = it.next(); sample.setTimestamp(Long.valueOf(record.get(0))); sample.setLabel(record.get(2)); sample.setValue(Double.valueOf(record.get(4)) * definition.getScale()); sample.setSuccess(Boolean.parseBoolean(record.get(7))); input.getSamples().add(sample); } } catch (IOException e) { throw new RuntimeException(e); } finally { Utils.close(parser); } return input; }