List of usage examples for org.apache.commons.csv CSVRecord get
public String get(final String name)
From source file:org.ohdsi.rabbitInAHat.dataModel.Database.java
public static Database generateModelFromCSV(InputStream stream, String dbName) { Database database = new Database(); database.dbName = dbName.substring(0, dbName.lastIndexOf(".")); Map<String, Table> nameToTable = new HashMap<String, Table>(); try {/*from www . j a v a 2s. c o m*/ for (CSVRecord row : CSVFormat.RFC4180.withHeader().parse(new InputStreamReader(stream))) { Table table = nameToTable.get(row.get("TABLE_NAME").toLowerCase()); if (table == null) { table = new Table(); table.setDb(database); table.setName(row.get("TABLE_NAME").toLowerCase()); nameToTable.put(row.get("TABLE_NAME").toLowerCase(), table); database.tables.add(table); } Field field = new Field(row.get("COLUMN_NAME").toLowerCase(), table); field.setNullable(row.get("IS_NULLABLE").equals("YES")); field.setType(row.get("DATA_TYPE")); field.setDescription(row.get("DESCRIPTION")); table.getFields().add(field); } } catch (IOException e) { throw new RuntimeException(e.getMessage()); } return database; }
From source file:org.onebusaway.admin.service.bundle.impl.FixedRouteParserServiceImpl.java
/** * Parses a csv record representing one line of a FixedRouteDataValidation * report. If the line is part of the mode currently being processed, it * is added to that DataValidationMode object. If it is for a new mode, the * current mode is added to the parsedModes list and the record being * parsed becomes the new current mode.// w ww .j av a2s . c o m * * @param record the record to be parsed * @param currentMode the DataValidationMode currently being built * @param parsedModes the list of modes already created. * @return the DataValidationMode currently being built */ private DataValidationMode parseRecord(CSVRecord record, DataValidationMode currentMode, List<DataValidationMode> parsedModes) { if (record.size() < 8 || record.get(4).isEmpty() || !record.get(4).matches("^\\d+$")) { //Stop count should be numeric return currentMode; } // Create the StopCt for this line (every line should have a stop count) DataValidationStopCt currentStopCt = new DataValidationStopCt(); currentStopCt.setStopCt(Integer.parseInt(record.get(4))); int[] stopCtTrips = { 0, 0, 0 }; for (int i = 0; i < 3; i++) { try { int tripCt = Integer.parseInt(record.get(5 + i)); stopCtTrips[i] = tripCt; } catch (NumberFormatException ex) { // Do nothing, leave array value at 0. } } currentStopCt.setTripCts(stopCtTrips); String modeName = record.get(0); String routeName = record.get(1); String headsign = record.get(2); String dirName = record.get(3); // If routeName is prefixed with the route number, extract the route number String routeNum = ""; if (routeName.length() > 0) { int idx = routeName.substring(0, Math.min(5, routeName.length())).indexOf("-"); if (idx > 0) { routeNum = routeName.substring(0, idx).trim(); routeName = routeName.substring(idx + 1); } } if (modeName.length() > 0) { // new mode if (routeName.isEmpty()) { return currentMode; // this shouldn't happen. Any line with a mode // name should also have a route name. } if (currentMode != null) { parsedModes.add(currentMode); } currentMode = new DataValidationMode(modeName, routeNum, routeName, headsign, dirName); currentRoute = currentMode.getRoutes().first(); currentHeadsign = currentRoute.getHeadsignCounts().first(); currentDirection = currentHeadsign.getDirCounts().first(); SortedSet<DataValidationStopCt> stopCountsList = currentDirection.getStopCounts(); stopCountsList.add(currentStopCt); } else if (routeName.length() > 0) { // New route for current mode currentRoute = new DataValidationRouteCounts(routeNum, routeName, headsign, dirName); currentMode.getRoutes().add(currentRoute); currentHeadsign = currentRoute.getHeadsignCounts().first(); currentDirection = currentHeadsign.getDirCounts().first(); SortedSet<DataValidationStopCt> stopCountsList = currentDirection.getStopCounts(); stopCountsList.add(currentStopCt); } else if (headsign.length() > 0) { currentHeadsign = new DataValidationHeadsignCts(headsign, dirName); currentRoute.getHeadsignCounts().add(currentHeadsign); currentDirection = currentHeadsign.getDirCounts().first(); SortedSet<DataValidationStopCt> stopCountsList = currentDirection.getStopCounts(); stopCountsList.add(currentStopCt); } else if (dirName.length() > 0) { currentDirection = new DataValidationDirectionCts(dirName); currentHeadsign.getDirCounts().add(currentDirection); SortedSet<DataValidationStopCt> stopCountsList = currentDirection.getStopCounts(); stopCountsList.add(currentStopCt); } else if (dirName.isEmpty()) { SortedSet<DataValidationStopCt> stopCountsList = currentDirection.getStopCounts(); stopCountsList.add(currentStopCt); } return currentMode; }
From source file:org.onebusaway.admin.service.impl.BundleCheckParserServiceImpl.java
private BundleValidationParseResults parseRecord(CSVRecord record, BundleValidationParseResults parseResults) { // Verify that second field contains a valid test. if (record.size() < 2 || !validTests.contains(record.get(1).toLowerCase())) { BundleValidationParseError parseError = new BundleValidationParseError(); parseError.setLinenum((int) record.getRecordNumber()); parseError.setErrorMessage(PARSE_ERROR); parseError.setOffendingLine(record.toString()); parseResults.getParseErrors().add(parseError); return parseResults; }//from w ww .j av a 2 s . c o m ParsedBundleValidationCheck parsedCheck = new ParsedBundleValidationCheck(); parsedCheck.setLinenum((int) record.getRecordNumber()); parsedCheck.setAgencyId(record.get(0)); parsedCheck.setSpecificTest(record.get(1)); if (record.get(2) != null) { parsedCheck.setRouteName(record.get(2)); } if (record.get(3) != null) { parsedCheck.setRouteId(record.get(3)); } if (record.get(4) != null) { parsedCheck.setStopName(record.get(4)); } if (record.get(5) != null) { parsedCheck.setStopId(record.get(5)); } if (record.get(6) != null) { parsedCheck.setDate(record.get(6)); } if (record.get(7) != null) { parsedCheck.setDepartureTime(record.get(7)); } parseResults.getParsedBundleChecks().add(parsedCheck); return parseResults; }
From source file:org.onehippo.forge.content.pojo.model.CsvConvertToContentNodesTest.java
@Test public void testReadCsvAndConvertToContentNodes() throws Exception { InputStream input = null;//from ww w .j a va 2s .c om InputStreamReader reader = null; try { // 1. Open a reader from a CSV file. input = NEWS_CSV_URL.openStream(); reader = new InputStreamReader(input, "UTF-8"); // 2. Create CSV parser to parse the CSV data with column headers. CSVParser parser = CSVFormat.DEFAULT.withHeader("Title", "Introduction", "Date", "Content") .withSkipHeaderRecord().parse(reader); CSVRecord record; // 3. StringCodec to generate a JCR node name from the title column, // and ObjectMapper instance to log a ContentNode to JSON. final StringCodec codec = new StringCodecFactory.UriEncoding(); final ObjectMapper objectMapper = new ObjectMapper(); String name; String title; String introduction; String date; String content; String translationId; String translationLocale = "en"; String targetDocumentLocation; // 4. Iterate each data record and create a ContentNode for a news article with setting properties and child nodes. for (Iterator<CSVRecord> it = parser.iterator(); it.hasNext();) { record = it.next(); // 4.1. Read each column from a CSV record. title = record.get("Title"); name = codec.encode(title); introduction = record.get("Introduction"); date = record.get("Date"); content = record.get("Content"); // 4.2. Create a ContentNode for a news article and set primitive property values. ContentNode newsNode = new ContentNode(name, "ns1:newsdocument"); newsNode.setProperty("ns1:title", title); newsNode.setProperty("ns1:introduction", introduction); newsNode.setProperty("ns1:date", ContentPropertyType.DATE, date); // 4.3. Create/add a child hippostd:html content node and set the content. ContentNode htmlNode = new ContentNode("ns1:content", HippoStdNodeType.NT_HTML); newsNode.addNode(htmlNode); htmlNode.setProperty(HippoStdNodeType.HIPPOSTD_CONTENT, content); // 4.4. In Hippo CMS, the internal translation UUID and locale string are important in most cases. // So, let's generate a translation UUID and use 'en' for simplicity for now. translationId = UUID.randomUUID().toString(); newsNode.setProperty(HippoTranslationNodeType.ID, translationId); newsNode.setProperty(HippoTranslationNodeType.LOCALE, translationLocale); // 4.5. (Optional) Set kind of meta property for localized document name which is displayed in folder view later. // This meta property is not used by Hippo CMS, but can be read/used by a higher level content importing application // to set a localized (translated) name of the document (e.g, using Hippo TranslationWorkflow). newsNode.setProperty("jcr:localizedName", title); // 4.6. (Optional) Determine the target document location where this content should be generated and // store it in a meta property, jcr:path. // This meta property cannot be used in JCR repository in importing process, but can be read/used by a higher level // content importing application to create a document using Hippo DocumentWorkflow for instance. targetDocumentLocation = "/content/documents/ns1/news/" + name; newsNode.setProperty("jcr:path", targetDocumentLocation); // 4.7. (Optional) Log the JSON-ized string of the news ContentNode instance. StringWriter stringWriter = new StringWriter(256); objectMapper.writerWithDefaultPrettyPrinter().writeValue(stringWriter, newsNode); log.debug("newsNode: \n{}\n", stringWriter.toString()); } } finally { IOUtils.closeQuietly(reader); IOUtils.closeQuietly(input); } }
From source file:org.opencastproject.dataloader.EventsLoader.java
private List<EventEntry> parseCSV(CSVParser csv) { List<EventEntry> arrayList = new ArrayList<EventEntry>(); for (CSVRecord record : csv) { String title = record.get(0); String description = StringUtils.trimToNull(record.get(1)); String series = StringUtils.trimToNull(record.get(2)); String seriesName = StringUtils.trimToNull(record.get(3)); Integer days = Integer.parseInt(record.get(4)); float signum = Math.signum(days); DateTime now = DateTime.now();/*from w ww .j a va2s.c om*/ if (signum > 0) { now = now.plusDays(days); } else if (signum < 0) { now = now.minusDays(days * -1); } Integer duration = Integer.parseInt(record.get(5)); boolean archive = BooleanUtils.toBoolean(record.get(6)); String agent = StringUtils.trimToNull(record.get(7)); String source = StringUtils.trimToNull(record.get(8)); String contributor = StringUtils.trimToNull(record.get(9)); List<String> presenters = Arrays .asList(StringUtils.split(StringUtils.trimToEmpty(record.get(10)), ",")); EventEntry eventEntry = new EventEntry(title, now.toDate(), duration, archive, series, agent, source, contributor, description, seriesName, presenters); arrayList.add(eventEntry); } return arrayList; }
From source file:org.openestate.io.idx.IdxRecord.java
@Override protected void parse(CSVRecord record) { String version = StringUtils.trimToNull(record.get(FIELD_VERSION)); if (version != null && !VERSION.equalsIgnoreCase(version) && !IdxFormat.VERSION.equalsIgnoreCase(version)) { LOGGER.warn("IDX version '" + version + "' is not supported. Trying to parse the record anyway."); }//from w w w .j a v a 2 s. co m super.parse(record); }
From source file:org.openestate.io.is24_csv.Is24CsvParser.java
/** * Creates a record, according to the object category, that is provided in * a {@link CSVRecord}.//from w w w. ja v a 2s .c om * * @param record * the record to process * * @return * created record or null, if no matching record was found */ public static Is24CsvRecord createRecord(CSVRecord record) { Immobilienart art = Is24CsvRecord.getImmobilienart(record); if (Immobilienart.ANLAGE.equals(art)) return Anlageobjekt.newRecord(record); else if (Immobilienart.GEWERBE_BUERO_PRAXEN.equals(art)) return GewerbeBueroPraxis.newRecord(record); else if (Immobilienart.GEWERBE_EINZELHANDEL.equals(art)) return GewerbeEinzelhandel.newRecord(record); else if (Immobilienart.GEWERBE_GASTRONOMIE_HOTEL.equals(art)) return GewerbeGastronomieHotel.newRecord(record); else if (Immobilienart.GEWERBE_HALLE_PRODUKTION.equals(art)) return GewerbeHalleProduktion.newRecord(record); else if (Immobilienart.GEWERBE_SONSTIGES.equals(art)) return GewerbeSonstiges.newRecord(record); else if (Immobilienart.HAUS_KAUF.equals(art)) return HausKauf.newRecord(record); else if (Immobilienart.HAUS_MIETE.equals(art)) return HausMiete.newRecord(record); else if (Immobilienart.STELLPLATZ_KAUF.equals(art)) return StellplatzKauf.newRecord(record); else if (Immobilienart.STELLPLATZ_MIETE.equals(art)) return StellplatzMiete.newRecord(record); else if (Immobilienart.WOHNEN_AUF_ZEIT.equals(art)) return WohnenAufZeit.newRecord(record); else if (Immobilienart.WOHNUNG_KAUF.equals(art)) return WohnungKauf.newRecord(record); else if (Immobilienart.WOHNUNG_MIETE.equals(art)) return WohnungMiete.newRecord(record); // Immobilienart fr Grundstcke wird abhngig zur Objektkategorie erzeugt else if (Immobilienart.GRUNDSTUECKE.equals(art)) { ObjektkategorieGrundstueck cat = Grundstueck.getObjektkategorie(record); if (ObjektkategorieGrundstueck.WOHNEN.equals(cat)) return GrundstueckWohnen.newRecord(record); else return GrundstueckGewerbe.newRecord(record); } LOGGER.warn("Unsupported 'Immobilienart' value: " + record.get(Is24CsvRecord.FIELD_IMMOBILIENART)); return null; }
From source file:org.openestate.io.is24_csv.Is24CsvRecord.java
public static Immobilienart getImmobilienart(CSVRecord record) { return Immobilienart.parse(record.get(FIELD_IMMOBILIENART)); }
From source file:org.openestate.io.is24_csv.records.Grundstueck.java
public static ObjektkategorieGrundstueck getObjektkategorie(CSVRecord record) { return ObjektkategorieGrundstueck.parse(record.get(FIELD_OBJEKTKATEGORIE)); }
From source file:org.opennms.netmgt.integrations.R.RScriptExecutor.java
/** * Convert the CSV string to an immutable table. *//*w w w .j av a 2s . co m*/ protected static ImmutableTable<Long, String, Double> fromCsv(final String csv) throws IOException { ImmutableTable.Builder<Long, String, Double> builder = ImmutableTable.builder(); try (StringReader reader = new StringReader(csv); CSVParser parser = new CSVParser(reader, CSVFormat.RFC4180.withHeader());) { long rowIndex = 0; Map<String, Integer> headerMap = parser.getHeaderMap(); for (CSVRecord record : parser) { for (String key : headerMap.keySet()) { Double value; try { value = Double.valueOf(record.get(key)); } catch (NumberFormatException e) { value = Double.NaN; } builder.put(rowIndex, key, value); } rowIndex++; } } return builder.build(); }