List of usage examples for org.apache.commons.csv CSVRecord get
public String get(final String name)
From source file:com.nuevebit.miroculus.mrna.cli.DatabasePopulator.java
private void parseCSV(String csv) throws IOException { CSVParser csvParser = CSVParser.parse(csv, CSVFormat.EXCEL); Iterator<CSVRecord> records = csvParser.iterator(); // ignore headers records.next();//from w ww.java 2 s.c o m // read line by line while (records.hasNext()) { CSVRecord record = records.next(); // normalize the name (remove *) String miRNAName = MiRNA.normalizeName(record.get(0)); MiRNA miRNA = miRNARepository.findByName(miRNAName); if (miRNA == null) { // primera vez que se agrega miRNA = miRNARepository.save(new MiRNA(miRNAName)); } String diseaseName = record.get(1).toLowerCase().trim(); Disease disease = diseaseRepository.findByName(diseaseName); if (disease == null) { disease = diseaseRepository.save(new Disease(diseaseName)); disease.setMortalityRate(0d); } String authorName = record.get(4).trim(); Author author = authorRepository.findByName(authorName); if (author == null) { author = authorRepository.save(new Author(authorName)); } String publicationTitle = record.get(6).trim(); String publicationJournal = record.get(5).trim(); Publication pub = publicationRepository.findByNameAndJournal(publicationTitle, publicationJournal); if (pub == null) { pub = new Publication(publicationTitle, publicationJournal); pub.setAuthor(author); String year = record.get(7); pub.setYear(Integer.valueOf(year)); pub.setDescription(record.get(9).trim()); pub = publicationRepository.save(pub); } String methodName = record.get(8).trim(); DiscoveryMethod method = discoveryMethodRepository.findByName(methodName); if (method == null) { method = discoveryMethodRepository.save(new DiscoveryMethod(methodName)); } CorrelationDiscovery correlation = new CorrelationDiscovery(miRNA, disease, Integer.valueOf(record.get(2))); correlation.setPublication(pub); correlation.setMethod(method); // save the found correlation correlationDiscoveryRepository.save(correlation); } }
From source file:ColdestWeather.ColdestWeather.java
public CSVRecord checkColdest(CSVRecord currentRow, CSVRecord coldestSoFar) { if (coldestSoFar == null) { coldestSoFar = currentRow;/*from ww w .ja v a 2 s. c o m*/ } else { double currentTemp = Double.parseDouble(currentRow.get("TemperatureF")); double coldestTemp = Double.parseDouble(coldestSoFar.get("TemperatureF")); //Check if currentTemp < coldestTemp if (currentTemp < coldestTemp && currentTemp > -9000) { //Update coldesSoFar to currentRow coldestSoFar = currentRow; } } return coldestSoFar; }
From source file:com.cotrino.langnet.GenerateVisualization.java
private double[] calculateSimilarity(File logFile) throws IOException { int i = 0;//from ww w . j a va 2s .com double total = 0.0; Reader reader = new FileReader(logFile); CSVParser parser = new CSVParser(reader, csvFormat); for (CSVRecord record : parser) { try { double similarity = Double.parseDouble(record.get("Similarity")); total += similarity; i++; } catch (NumberFormatException e) { logger.error("At " + logFile.getName() + ", failed line: " + record.toString()); } } parser.close(); reader.close(); return new double[] { (total / i), i }; }
From source file:com.team3637.service.ScheduleServiceMySQLImpl.java
@Override public void importCSV(String inputFile) { try {//from w w w .j a v a 2s .c om String csvData = new String(Files.readAllBytes(FileSystems.getDefault().getPath(inputFile))); csvData = csvData.replaceAll("\\r", ""); CSVParser parser = CSVParser.parse(csvData, CSVFormat.DEFAULT.withRecordSeparator("\n")); for (CSVRecord record : parser) { Schedule schedule = new Schedule(); schedule.setId(Integer.parseInt(record.get(0))); schedule.setMatchNum(Integer.parseInt(record.get(1))); schedule.setB1(Integer.parseInt(record.get(2))); schedule.setB2(Integer.parseInt(record.get(3))); schedule.setB3(Integer.parseInt(record.get(4))); schedule.setR1(Integer.parseInt(record.get(5))); schedule.setR2(Integer.parseInt(record.get(6))); schedule.setR3(Integer.parseInt(record.get(7))); if (checkForMatch(schedule)) update(schedule); else create(schedule); } } catch (IOException e) { e.printStackTrace(); } }
From source file:assignment.CSVFileReader.java
@Override public List<Map<String, String>> readFile(String filePath) { Reader reader;/*from w w w. j av a2 s . c o m*/ List<Map<String, String>> rows = new ArrayList<Map<String, String>>(); try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(filePath), "utf-8")); CSVParser csvParser = new CSVParser(reader, CSVFormat.DEFAULT); Iterator<CSVRecord> csvRecord = csvParser.iterator(); CSVRecord headers = csvRecord.next(); for (CSVRecord row : csvParser) { Map<String, String> item = new HashMap<String, String>(); int colNr = 0; for (String header : headers) { String r = ""; try { r = row.get(colNr); } catch (Exception ex) { } item.put(header, r); colNr++; } rows.add(item); } } catch (Exception ex) { } return rows; }
From source file:eu.fthevenet.binjr.data.codec.CsvDecoder.java
@Override public Map<TimeSeriesInfo<T>, TimeSeriesProcessor<T>> decode(InputStream in, List<TimeSeriesInfo<T>> seriesInfo) throws IOException, DecodingDataFromAdapterException { try (Profiler ignored = Profiler.start("Building time series from csv data", logger::trace)) { try (BufferedReader reader = new BufferedReader(new InputStreamReader(in, encoding))) { CSVFormat csvFormat = CSVFormat.DEFAULT.withAllowMissingColumnNames(false).withFirstRecordAsHeader() .withSkipHeaderRecord().withDelimiter(delimiter); Iterable<CSVRecord> records = csvFormat.parse(reader); Map<TimeSeriesInfo<T>, TimeSeriesProcessor<T>> series = new HashMap<>(); final AtomicLong nbpoints = new AtomicLong(0); for (CSVRecord csvRecord : records) { nbpoints.incrementAndGet(); ZonedDateTime timeStamp = dateParser.apply(csvRecord.get(0)); for (TimeSeriesInfo<T> info : seriesInfo) { T val = numberParser.apply(csvRecord.get(info.getBinding().getLabel())); XYChart.Data<ZonedDateTime, T> point = new XYChart.Data<>(timeStamp, val); TimeSeriesProcessor<T> l = series.computeIfAbsent(info, k -> timeSeriesFactory.create()); l.addSample(point);//from w w w . j ava2s . c om } } logger.trace(() -> String.format("Built %d series with %d samples each (%d total samples)", seriesInfo.size(), nbpoints.get(), seriesInfo.size() * nbpoints.get())); return series; } } }
From source file:ColdestWeather.ColdestWeather.java
public String lowestHumidityInManyFiles() { CSVRecord lowestSoFar = null;//from ww w . j a v a2 s . c o m String lowestHumFile = ""; DirectoryResource dr = new DirectoryResource(); //Iterate over all the files for (File file : dr.selectedFiles()) { FileResource fr = new FileResource(file); //call lowestHumidityInFile to get the file with lowest Humidity CSVRecord currentRow = lowestHumidityInFile(fr.getCSVParser()); if (lowestSoFar == null) { lowestSoFar = currentRow; } else { if (!currentRow.get("Humidity").contains("N/A")) { int currentHumidity = Integer.parseInt(currentRow.get("Humidity")); int lowestHumidity = Integer.parseInt(lowestSoFar.get("Humidity")); if (currentHumidity < lowestHumidity) { lowestSoFar = currentRow; lowestHumFile = file.getAbsolutePath(); } } } } return lowestHumFile; }
From source file:edu.washington.gs.skyline.model.quantification.QuantificationTest.java
private List<ReplicateData> readReplicates(String filename) throws Exception { Map<String, ReplicateData> replicates = new LinkedHashMap<>(); Reader reader = new InputStreamReader(QuantificationTest.class.getResourceAsStream(filename)); try {/* w ww .j a v a 2 s. c o m*/ CSVParser parser = new CSVParser(reader, CSVFormat.EXCEL.withHeader()); for (CSVRecord record : parser.getRecords()) { String fileName = record.get("FileName"); ReplicateData replicate = replicates.get(fileName); if (replicate == null) { replicate = new ReplicateData(); replicates.put(fileName, replicate); } } } finally { reader.close(); } throw new NotImplementedException(); }
From source file:gov.ornl.stucco.stix_extractors.MetasploitExtractor.java
private STIXPackage extract(String metasploitInfo) { List<CSVRecord> records; try {/*from w w w. j ava 2 s.co m*/ records = getCSVRecordsList(HEADERS, metasploitInfo); } catch (IOException e) { e.printStackTrace(); return null; } if (records.isEmpty()) { return null; } CSVRecord record = records.get(0); int start; if (record.get(0).equals(ID)) { if (records.size() == 1) { return null; } else { start = 1; } } else { start = 0; } ExploitTargetsType ets = new ExploitTargetsType(); TTPsType ttps = new TTPsType(); for (int i = start; i < records.size(); i++) { try { record = records.get(i); /* exploit */ ExploitType exploit = new ExploitType(); BehaviorType behavior = new BehaviorType(); boolean withExploit = false; if (!record.get(FULLNAME).isEmpty()) { exploit.withTitle(record.get(FULLNAME)); withExploit = true; } if (!record.get(MTYPE).isEmpty()) { exploit.withId(new QName("gov.ornl.stucco", record.get(MTYPE) + "-" + UUID.randomUUID().toString(), "stucco")); withExploit = true; } if (!record.get(NAME).isEmpty()) { exploit.withShortDescriptions(new StructuredTextType() //list .withValue(record.get(NAME))); withExploit = true; } if (!record.get(DESCRIPTION).isEmpty()) { exploit.withDescriptions(new StructuredTextType() //list .withValue(record.get(DESCRIPTION))); withExploit = true; } if (withExploit) { behavior.withExploits(new ExploitsType().withExploits(exploit)); } /* vulnerability */ List<RelatedExploitTargetType> relatedEt = new ArrayList<RelatedExploitTargetType>(); Pattern pattern = Pattern.compile("CVE-\\d{4}-\\d{4,7}"); Matcher matcher = pattern.matcher(record.get(REF_NAMES)); while (matcher.find()) { ExploitTarget et = new ExploitTarget().withId( new QName("gov.ornl.stucco", "vulnerability-" + UUID.randomUUID().toString(), "stucco")) .withTitle("Vulnerability").withVulnerabilities(new VulnerabilityType() //list .withCVEID(matcher.group()).withTitle(matcher.group()) .withDescriptions(new StructuredTextType() //list .withValue(matcher.group())) .withSource("Metasploit")); ets.withExploitTargets(et); relatedEt.add(new RelatedExploitTargetType() .withExploitTarget(new ExploitTarget().withIdref(et.getId())) .withRelationship(new ControlledVocabularyStringType().withValue("exploit"))); } //if malware exists, then packing it and adding references to vulnerabilities if (withExploit) { TTP ttp = initTTP("Exploit", "Metasploit").withBehavior(behavior); if (!relatedEt.isEmpty()) { ttp.withExploitTargets( new org.mitre.stix.ttp_1.ExploitTargetsType().withExploitTargets(relatedEt)); } ttps.withTTPS(ttp); } } catch (RuntimeException e) { e.printStackTrace(); } } if (!ets.getExploitTargets().isEmpty()) { try { stixPackage = initStixPackage("Vulnerability and Malware Description", "Metasploit") .withExploitTargets(ets); } catch (DatatypeConfigurationException e) { e.printStackTrace(); } } if (!ttps.getTTPS().isEmpty()) { if (stixPackage == null) { try { stixPackage = initStixPackage("Vulnerability and Malware Description", "Metasploit") .withTTPs(ttps); } catch (DatatypeConfigurationException e) { e.printStackTrace(); } } else { stixPackage.withTTPs(ttps); } } return stixPackage; }
From source file:com.mycompany.couchdb.CsvManage.java
public boolean hello() throws FileNotFoundException, IOException { int count = 0; String filename = "src/main/resources/GeoLiteCity-Location.csv"; City city;//from www.j a v a 2s . c o m DataDealer cityDao; cityDao = new DataDealer(); Reader in = new FileReader(filename); //read file CSVParser parser = new CSVParser(in, CSVFormat.EXCEL.withHeader()); //create parser city = new City(); for (CSVRecord record : parser) { //parse file and create objects //city.setLocId(Integer.parseInt(record.get("locId"))); //city.setCountry(record.get("country")); city.setCity(record.get("city")); //city.setRegion(record.get("region")); city.setLatitude(Float.parseFloat(record.get("latitude"))); city.setLongitude(Float.parseFloat(record.get("longitude"))); // try { cityDao.save(city); } finally { cityDao.finalize(); } } return true; }