List of usage examples for org.apache.commons.csv CSVRecord get
public String get(final String name)
From source file:org.opennms.netmgt.jasper.analytics.HWForecastReportTest.java
private void verify() throws Exception { Table<Integer, String, Double> forecasts = TreeBasedTable.create(); try (FileReader reader = new FileReader(m_csvFile); CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader());) { int k = 0; for (CSVRecord record : parser) { try { Double fit = Double.parseDouble(record.get("HWFit")); Double lwr = Double.parseDouble(record.get("HWLwr")); Double upr = Double.parseDouble(record.get("HWUpr")); if (Double.isNaN(fit)) { continue; }//from ww w. j a v a2 s .c o m forecasts.put(k, "fit", fit); forecasts.put(k, "lwr", lwr); forecasts.put(k, "upr", upr); k++; } catch (NumberFormatException e) { // pass } } } assertEquals(340, forecasts.rowKeySet().size()); // First fitted value assertEquals(432.526086422424, forecasts.get(0, "fit"), 0.00001); // Last fitted value for which there is a known datapoint assertEquals(24079.4692522087, forecasts.get(327, "fit"), 0.00001); // First forecasted value assertEquals(22245.5417010936, forecasts.get(328, "fit"), 0.00001); }
From source file:org.opennms.netmgt.jasper.measurement.MeasurementQueryExecutorRemoteIT.java
@Test public void testReportHwForecast() throws IOException, JRException { createReport("Forecast", new ReportFiller() { @Override//from w w w. ja va2 s . c o m public void fill(Map<String, Object> params) throws Exception { params.put(JRParameter.IS_IGNORE_PAGINATION, true); params.put("MEASUREMENT_URL", "http://localhost:9999/opennms/rest/measurements"); params.put("dsName", "ifInOctets"); params.put("startDate", "1414602000000"); params.put("endDate", "1417046400000"); } }); // Verify the results of the generated report Table<Integer, String, Double> forecasts = TreeBasedTable.create(); FileReader reader = new FileReader(createFileName("Forecast", "csv")); CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader()); int k = 0; for (CSVRecord record : parser) { try { Double fit = Double.parseDouble(record.get("HWFit")); Double lwr = Double.parseDouble(record.get("HWLwr")); Double upr = Double.parseDouble(record.get("HWUpr")); if (Double.isNaN(fit)) { continue; } forecasts.put(k, "fit", fit); forecasts.put(k, "lwr", lwr); forecasts.put(k, "upr", upr); k++; } catch (NumberFormatException e) { // pass } } Assert.assertEquals(340, forecasts.rowKeySet().size()); // First fitted value Assert.assertEquals(432.526086422424, forecasts.get(0, "fit"), 0.00001); // Last fitted value for which there is a known data point Assert.assertEquals(24079.4692522087, forecasts.get(327, "fit"), 0.00001); // First forecasted value Assert.assertEquals(22245.5417010936, forecasts.get(328, "fit"), 0.00001); }
From source file:org.opens.referentiel.creator.CodeGeneratorMojo.java
private void writeToI18NFile(FileGenerator fg, CSVRecord record, String lang) throws IOException, InvalidParameterException { Integer themeIndex = Integer.valueOf(record.get(THEME_CODE_COLUMN_NAME)); String theme = record.get(THEME_LABEL_COLUMN_NAME + lang); String critere;/*from w w w . ja v a2s . c o m*/ String critereCode; String test = record.get(TEST_LABEL_COLUMN_NAME + langSet.first()); String testCode = record.get(TEST_CODE_COLUMN_NAME); if (isCriterionPresent) { critere = record.get(CRITERION_LABEL_COLUMN_NAME + lang); critereCode = record.get(CRITERION_CODE_COLUMN_NAME); } else { critere = test; critereCode = testCode; } if (StringUtils.isBlank(theme) || StringUtils.isBlank(critere) || StringUtils.isBlank(critereCode)) { throw new InvalidParameterException("Your csv file has an empty column"); } Map themeMap = Collections.singletonMap(themeIndex, theme); Map critereMap = Collections.singletonMap(critereCode, critere); Map testMap = Collections.singletonMap(testCode, test); if (StringUtils.isNotBlank(theme) && StringUtils.isNotBlank(String.valueOf(themeIndex))) { fg.writei18NFile(themeMap, lang, langSet.first(), "theme"); } if (StringUtils.isNotBlank(critere) && StringUtils.isNotBlank(critereCode)) { fg.writei18NFile(critereMap, lang, langSet.first(), "criterion"); } if (StringUtils.isNotBlank(test) && StringUtils.isNotBlank(testCode)) { fg.writei18NFile(testMap, lang, langSet.first(), "rule"); } if (IS_I18N_REFERENTIAL_CREATED == false) { fg.writei18NFile(null, lang, langSet.first(), "referential"); } }
From source file:org.pad.pgsql.loadmovies.LoadFiles.java
/** * Load movies from csv file and save them in DB. * * @throws Exception/*w ww. ja v a2s. c o m*/ */ private static void loadMoviesAndLinks() throws Exception { MovieDao movieDao = new MovieDao(DS); Map<Integer, Integer[]> moviesLinks = new HashMap<>(); //Loads all links informations in memory to enrich afterwards movies CSVParser parser = new CSVParser(new FileReader("C:\\PRIVE\\SRC\\ml-20m\\links.csv"), CSVFormat.EXCEL.withHeader()); for (CSVRecord link : parser) { Integer movieId = Integer.parseInt(link.get("movieId")); if (keepId(movieId)) { System.out.println("Parsing line " + link.toString()); Integer[] otherIds = new Integer[2]; otherIds[0] = Integer.parseInt(link.get("imdbId")); if (StringUtils.isNoneEmpty(link.get("tmdbId"))) { otherIds[1] = Integer.parseInt(link.get("tmdbId")); } moviesLinks.put(movieId, otherIds); } } //Read movie file final Reader reader = new FileReader("C:\\PRIVE\\SRC\\ml-20m\\movies.csv"); parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); for (CSVRecord record : parser) { //build a movie object from record Integer movieId = Integer.parseInt(record.get("movieId")); if (keepId(movieId)) { String title = record.get("title"); String genres = record.get("genres"); //Splitting title to extract the date String movieDate = StringUtils.substringBeforeLast(StringUtils.substringAfterLast(title, "("), ")"); String movieName = null; if (StringUtils.isNumeric(movieDate)) { movieName = StringUtils.substringBeforeLast(title, "("); } else { movieName = title; movieDate = null; } System.out.println(movieName + " - " + movieDate); Movie movieToAdd = new Movie(movieId, movieName, movieDate); //Enrich movie with links Integer[] additionalIds = moviesLinks.get(movieId); if (additionalIds != null) { movieToAdd.setImdbId(additionalIds[0]); movieToAdd.setTmdbId(additionalIds[1]); } //Save in database movieDao.save(movieToAdd); } } }
From source file:org.pad.pgsql.loadmovies.LoadFiles.java
/** * Read tags and load them in a multivalue map. * * @return MultivalueMap with key movieId and values all tags. * @throws Exception it is only a demo./*from w ww.j a v a 2s .com*/ */ private static LinkedMultiValueMap<Integer, Tag> readTags() throws Exception { TagDao tagDao = new TagDao(DS); LinkedMultiValueMap<Integer, Tag> tags = new LinkedMultiValueMap(); final Reader reader = new FileReader("C:\\PRIVE\\SRC\\ml-20m\\tags.csv"); CSVParser parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); for (CSVRecord record : parser) { Integer movieId = Integer.parseInt(record.get("movieId")); Integer userId = Integer.parseInt(record.get("userId")); if (keepId(movieId) && keepId(userId)) { //CSV Header : userId,movieId,tag,timestamp Tag newTag = new Tag(); newTag.setUserId(userId); newTag.setMovieId(movieId); newTag.setTag(record.get("tag")); newTag.setDate(new Date(Long.parseLong(record.get("timestamp")) * 1000)); //Adding to map for json loading tags.add(newTag.getMovieId(), newTag); //Saving in tag table //tagDao.save(newTag); } } return tags; }
From source file:org.pad.pgsql.loadmovies.LoadFiles.java
/** * Load ratings and enrich movies with tags informations before updating the related movie. * * @throws Exception/*from w w w .ja va 2 s. co m*/ */ private static void loadRatings() throws Exception { //MultivalueMap with key movieId and values all tags LinkedMultiValueMap<Integer, Tag> tags = readTags(); //MultivalueMap with key movieId and values all ratings LinkedMultiValueMap<Integer, Rating> ratings = new LinkedMultiValueMap(); //"userId,movieId,rating,timestamp final Reader reader = new FileReader("C:\\PRIVE\\SRC\\ml-20m\\ratings.csv"); CSVParser parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); RatingDao ratingDao = new RatingDao(DS); for (CSVRecord record : parser) { Integer movieId = Integer.parseInt(record.get("movieId")); Integer userId = Integer.parseInt(record.get("userId")); if (keepId(movieId) && keepId(userId)) { //Building a rating object. Rating rating = new Rating(); rating.setUserId(userId); rating.setMovieId(movieId); rating.setRating(Float.parseFloat(record.get("rating"))); rating.setDate(new Date(Long.parseLong(record.get("timestamp")) * 1000)); //Add for json saving ratings.add(rating.getMovieId(), rating); //traditional saving //ratingDao.save(rating); } } MovieDaoJSON movieDaoJSON = new MovieDaoJSON(DS); ratings.entrySet().stream().forEach((integerListEntry -> { //Building other information objects OtherInformations otherInformations = new OtherInformations(); List ratingList = integerListEntry.getValue(); otherInformations.setRatings(ratingList.subList(0, Math.min(10, ratingList.size()))); otherInformations.computeMean(); //Retrieve tags from the movieId otherInformations.setTags(tags.get(integerListEntry.getKey())); try { movieDaoJSON.addOtherInformationsToMovie(integerListEntry.getKey(), otherInformations); } catch (JsonProcessingException e) { e.printStackTrace(); } })); }
From source file:org.phenotips.oo.OmimSourceParser.java
private void loadSymptoms(boolean positive) { try (BufferedReader in = new BufferedReader( new InputStreamReader(new URL(positive ? POSITIVE_ANNOTATIONS_URL : NEGATIVE_ANNOTATIONS_URL) .openConnection().getInputStream(), ENCODING))) { for (CSVRecord row : CSVFormat.TDF.parse(in)) { if ("OMIM".equals(row.get(0))) { SolrInputDocument term = this.data.get(row.get(1)); if (term != null) { term.addField(positive ? "actual_symptom" : "actual_not_symptom", row.get(4)); }//from w w w. j a va2 s . c o m } } } catch (IOException ex) { this.logger.error("Failed to load OMIM-HPO links: {}", ex.getMessage(), ex); } }
From source file:org.phenotips.oo.OmimSourceParser.java
private void loadGenes() { final String missing = "-"; try (BufferedReader in = new BufferedReader( new InputStreamReader(new URL(GENE_ANNOTATIONS_URL).openConnection().getInputStream(), ENCODING))) { for (CSVRecord row : CSVFormat.TDF.withHeader().parse(in)) { if (!row.get("Type").contains("gene")) { continue; }/*from ww w . j ava2 s . c o m*/ SolrInputDocument term = this.data.get(row.get(2)); if (term != null) { String gs = row.get("Approved Gene Symbol"); if (!missing.equals(gs)) { term.addField(GENE_FIELD, gs); } String eid = row.get("Ensembl Gene ID"); if (!missing.equals(eid)) { term.addField(GENE_FIELD, eid); } } } } catch (IOException ex) { this.logger.error("Failed to load OMIM-Gene links: {}", ex.getMessage(), ex); } }
From source file:org.phenotips.oo.OmimSourceParser.java
private void loadGeneReviews() { try (BufferedReader in = new BufferedReader(new InputStreamReader( new URL(GENEREVIEWS_MAPPING_URL).openConnection().getInputStream(), ENCODING))) { for (CSVRecord row : CSVFormat.TDF.withHeader().parse(in)) { SolrInputDocument term = this.data.get(row.get(2)); if (term != null) { term.addField("gene_reviews_link", "https://www.ncbi.nlm.nih.gov/books/" + row.get(0)); }/*from www. ja v a 2s . co m*/ } } catch (IOException ex) { this.logger.error("Failed to load OMIM-GeneReviews links: {}", ex.getMessage(), ex); } }
From source file:org.phenotips.vocabulary.AbstractCSVAnnotationsExtension.java
/** * Processes and caches the row data. By default, it simply copies every mapped value from the row. Override if * further processing of the data is needed. * * @param row the {@link CSVRecord data row} to process * @param vocabulary the vocabulary being indexed *///from w w w .ja v a 2s . c om protected void processCSVRecordRow(final CSVRecord row, final Vocabulary vocabulary) { Map<String, String> csvData = row.toMap(); MultiValuedMap<String, String> termData = this.data.get(row.get(ID_KEY)); if (termData == null) { termData = new ArrayListValuedHashMap<>(); this.data.put(row.get(ID_KEY), termData); } for (Map.Entry<String, String> item : csvData.entrySet()) { if (!ID_KEY.equals(item.getKey()) && StringUtils.isNoneBlank(item.getKey(), item.getValue())) { termData.put(item.getKey(), item.getValue()); } } }