List of usage examples for org.apache.commons.csv CSVParser parse
public static CSVParser parse(final URL url, final Charset charset, final CSVFormat format) throws IOException
From source file:canreg.client.dataentry.Import.java
public static boolean importFiles(Task<Object, Void> task, Document doc, List<canreg.client.dataentry.Relation> map, File[] files, CanRegServerInterface server, ImportOptions io) throws SQLException, RemoteException, SecurityException, RecordLockedException { int numberOfLinesRead = 0; Writer reportWriter = new BufferedWriter(new OutputStreamWriter(System.out)); if (io.getReportFileName() != null && io.getReportFileName().trim().length() > 0) { try {/*from w w w .ja va 2 s .c o m*/ reportWriter = new BufferedWriter(new FileWriter(io.getReportFileName())); } catch (IOException ex) { Logger.getLogger(Import.class.getName()).log(Level.WARNING, null, ex); } } boolean success = false; Set<String> noNeedToLookAtPatientVariables = new TreeSet<String>(); noNeedToLookAtPatientVariables .add(canreg.common.Tools.toLowerCaseStandardized(io.getPatientIDVariableName())); noNeedToLookAtPatientVariables .add(canreg.common.Tools.toLowerCaseStandardized(io.getPatientRecordIDVariableName())); String[] lineElements; ResultCode worstResultCodeFound; // CSVReader reader = null; CSVParser parser = null; CSVFormat format = CSVFormat.DEFAULT.withFirstRecordAsHeader().withDelimiter(io.getSeparators()[0]); int linesToRead = io.getMaxLines(); try { // first we get the patients if (task != null) { task.firePropertyChange(PROGRESS, 0, 0); task.firePropertyChange(PATIENTS, 0, 0); } if (files[0] != null) { reportWriter .write("Starting to import patients from " + files[0].getAbsolutePath() + Globals.newline); FileInputStream patientFIS = new FileInputStream(files[0]); InputStreamReader patientISR = new InputStreamReader(patientFIS, io.getFileCharsets()[0]); Logger.getLogger(Import.class.getName()).log(Level.CONFIG, "Name of the character encoding {0}", patientISR.getEncoding()); int numberOfRecordsInFile = canreg.common.Tools.numberOfLinesInFile(files[0].getAbsolutePath()); numberOfLinesRead = 0; if (linesToRead > 0) { linesToRead = Math.min(numberOfRecordsInFile, linesToRead); } else { linesToRead = numberOfRecordsInFile; } parser = CSVParser.parse(files[0], io.getFileCharsets()[0], format); for (CSVRecord csvRecord : parser) { // We allow for null tasks... boolean savePatient = true; boolean deletePatient = false; int oldPatientDatabaseRecordID = -1; if (task != null) { task.firePropertyChange(PROGRESS, ((numberOfLinesRead - 1) * 100 / linesToRead) / 3, ((numberOfLinesRead) * 100 / linesToRead) / 3); task.firePropertyChange(PATIENTS, ((numberOfLinesRead - 1) * 100 / linesToRead), ((numberOfLinesRead) * 100 / linesToRead)); } // Build patient part Patient patient = new Patient(); for (int i = 0; i < map.size(); i++) { Relation rel = map.get(i); if (rel.getDatabaseTableVariableID() >= 0 && rel.getDatabaseTableName().equalsIgnoreCase("patient")) { if (rel.getVariableType().equalsIgnoreCase("Number")) { if (csvRecord.get(rel.getFileColumnNumber()).length() > 0) { try { patient.setVariable(rel.getDatabaseVariableName(), Integer.parseInt(csvRecord.get(rel.getFileColumnNumber()))); } catch (NumberFormatException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, "Number format error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex); success = false; } } } else { patient.setVariable(rel.getDatabaseVariableName(), csvRecord.get(rel.getFileColumnNumber())); } } if (task != null) { task.firePropertyChange(RECORD, i - 1 / map.size() * 50, i / map.size() * 50); } } // debugOut(patient.toString()); // debugOut(tumour.toString()); // add patient to the database Object patientID = patient.getVariable(io.getPatientRecordIDVariableName()); Patient oldPatientRecord = null; try { oldPatientRecord = CanRegClientApp.getApplication().getPatientRecord((String) patientID, false); } catch (DistributedTableDescriptionException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RecordLockedException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RemoteException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SQLException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SecurityException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (UnknownTableException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } if (oldPatientRecord != null) { // deal with discrepancies switch (io.getDiscrepancies()) { case ImportOptions.REJECT: savePatient = false; break; case ImportOptions.UPDATE: String updateReport = updateRecord(oldPatientRecord, patient); if (updateReport.length() > 0) { reportWriter.write(patient.getVariable(io.getTumourIDVariablename()) + Globals.newline + updateReport); } oldPatientDatabaseRecordID = (Integer) oldPatientRecord .getVariable(Globals.PATIENT_TABLE_RECORD_ID_VARIABLE_NAME); patient = oldPatientRecord; savePatient = true; break; case ImportOptions.OVERWRITE: // deleteTumour; oldPatientDatabaseRecordID = (Integer) oldPatientRecord .getVariable(Globals.PATIENT_TABLE_RECORD_ID_VARIABLE_NAME); String overWriteReport = overwriteRecord(oldPatientRecord, patient); if (overWriteReport.length() > 0) { reportWriter.write(patient.getVariable(io.getTumourIDVariablename()) + Globals.newline + overWriteReport); } patient = oldPatientRecord; savePatient = true; break; } // reportWriter.write(tumour.getVariable(io.getTumourIDVariablename()) + "Tumour already exists.\n"); } if (task != null) { task.firePropertyChange(RECORD, 50, 75); } if ((!io.isTestOnly())) { if (deletePatient) { server.deleteRecord(oldPatientDatabaseRecordID, Globals.PATIENT_TABLE_NAME); } if (savePatient) { if (patient.getVariable(Globals.PATIENT_TABLE_RECORD_ID_VARIABLE_NAME) != null) { server.editPatient(patient); } else { server.savePatient(patient); } } } if (task != null) { task.firePropertyChange(RECORD, 100, 75); } numberOfLinesRead++; if (Thread.interrupted()) { //We've been interrupted: no more importing. reportWriter.flush(); throw new InterruptedException(); } } parser.close(); reportWriter.write("Finished reading patients." + Globals.newline + Globals.newline); reportWriter.flush(); } if (task != null) { task.firePropertyChange(PATIENTS, 100, 100); task.firePropertyChange("progress", 33, 34); } // then we get the tumours if (task != null) { task.firePropertyChange(TUMOURS, 0, 0); } if (files[1] != null) { reportWriter .write("Starting to import tumours from " + files[1].getAbsolutePath() + Globals.newline); FileInputStream tumourFIS = new FileInputStream(files[1]); InputStreamReader tumourISR = new InputStreamReader(tumourFIS, io.getFileCharsets()[1]); Logger.getLogger(Import.class.getName()).log(Level.CONFIG, "Name of the character encoding {0}", tumourISR.getEncoding()); numberOfLinesRead = 0; int numberOfRecordsInFile = canreg.common.Tools.numberOfLinesInFile(files[1].getAbsolutePath()); if (linesToRead > 0) { linesToRead = Math.min(numberOfRecordsInFile, linesToRead); } else { linesToRead = numberOfRecordsInFile; } format = CSVFormat.DEFAULT.withFirstRecordAsHeader().withDelimiter(io.getSeparators()[1]); parser = CSVParser.parse(files[1], io.getFileCharsets()[1], format); for (CSVRecord csvRecord : parser) { // We allow for null tasks... boolean saveTumour = true; boolean deleteTumour = false; if (task != null) { task.firePropertyChange(PROGRESS, 33 + ((numberOfLinesRead - 1) * 100 / linesToRead) / 3, 33 + ((numberOfLinesRead) * 100 / linesToRead) / 3); task.firePropertyChange(TUMOURS, ((numberOfLinesRead - 1) * 100 / linesToRead), ((numberOfLinesRead) * 100 / linesToRead)); } // Build tumour part Tumour tumour = new Tumour(); for (int i = 0; i < map.size(); i++) { Relation rel = map.get(i); if (rel.getDatabaseTableVariableID() >= 0 && rel.getDatabaseTableName().equalsIgnoreCase("tumour")) { if (rel.getVariableType().equalsIgnoreCase("Number")) { if (csvRecord.get(rel.getFileColumnNumber()).length() > 0) { try { tumour.setVariable(rel.getDatabaseVariableName(), Integer.parseInt(csvRecord.get(rel.getFileColumnNumber()))); } catch (NumberFormatException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, "Number format error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex); success = false; } } } else { tumour.setVariable(rel.getDatabaseVariableName(), csvRecord.get(rel.getFileColumnNumber())); } } if (task != null) { task.firePropertyChange(RECORD, i - 1 / map.size() * 50, i / map.size() * 50); } } // see if this tumour exists in the database already // TODO: Implement this using arrays and getTumourRexords instead Tumour tumour2 = null; try { tumour2 = CanRegClientApp.getApplication().getTumourRecordBasedOnTumourID( (String) tumour.getVariable(io.getTumourIDVariablename()), false); } catch (DistributedTableDescriptionException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RecordLockedException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RemoteException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SQLException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SecurityException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (UnknownTableException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } if (tumour2 != null) { // deal with discrepancies switch (io.getDiscrepancies()) { case ImportOptions.REJECT: saveTumour = false; break; case ImportOptions.UPDATE: String updateReport = updateRecord(tumour2, tumour); if (updateReport.length() > 0) { reportWriter.write(tumour.getVariable(io.getTumourIDVariablename()) + Globals.newline + updateReport); } tumour = tumour2; saveTumour = true; break; case ImportOptions.OVERWRITE: // deleteTumour; deleteTumour = true; saveTumour = true; break; } // reportWriter.write(tumour.getVariable(io.getTumourIDVariablename()) + "Tumour already exists.\n"); } Patient patient = null; try { patient = CanRegClientApp.getApplication().getPatientRecord( (String) tumour.getVariable(io.getPatientRecordIDTumourTableVariableName()), false); } catch (DistributedTableDescriptionException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RecordLockedException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RemoteException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SQLException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SecurityException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (UnknownTableException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } if (patient != null) { if (io.isDoChecks() && saveTumour) { // run the edits... String message = ""; LinkedList<CheckResult> checkResults = canreg.client.CanRegClientApp.getApplication() .performChecks(patient, tumour); Map<Globals.StandardVariableNames, CheckResult.ResultCode> mapOfVariablesAndWorstResultCodes = new EnumMap<Globals.StandardVariableNames, CheckResult.ResultCode>( Globals.StandardVariableNames.class); worstResultCodeFound = CheckResult.ResultCode.OK; for (CheckResult result : checkResults) { if (result.getResultCode() != CheckResult.ResultCode.OK && result.getResultCode() != CheckResult.ResultCode.NotDone) { if (!result.getResultCode().equals(CheckResult.ResultCode.Missing)) { message += result + "\t"; worstResultCodeFound = CheckResult.decideWorstResultCode( result.getResultCode(), worstResultCodeFound); for (Globals.StandardVariableNames standardVariableName : result .getVariablesInvolved()) { CheckResult.ResultCode worstResultCodeFoundForThisVariable = mapOfVariablesAndWorstResultCodes .get(standardVariableName); if (worstResultCodeFoundForThisVariable == null) { mapOfVariablesAndWorstResultCodes.put(standardVariableName, result.getResultCode()); } else if (CheckResult.compareResultSets(result.getResultCode(), worstResultCodeFoundForThisVariable) > 0) { mapOfVariablesAndWorstResultCodes.put(standardVariableName, result.getResultCode()); } } } } // Logger.getLogger(Import.class.getName()).log(Level.INFO, result.toString()); } // always generate ICD10... // ConversionResult[] conversionResult = canreg.client.CanRegClientApp.getApplication().performConversions(Converter.ConversionName.ICDO3toICD10, patient, tumour); // tumour.setVariable(io.getICD10VariableName(), conversionResult[0].getValue()); if (worstResultCodeFound != CheckResult.ResultCode.Invalid && worstResultCodeFound != CheckResult.ResultCode.Missing) { // generate ICD10 codes ConversionResult[] conversionResult = canreg.client.CanRegClientApp.getApplication() .performConversions(Converter.ConversionName.ICDO3toICD10, patient, tumour); tumour.setVariable(io.getICD10VariableName(), conversionResult[0].getValue()); // generate ICCC codes ConversionResult[] conversionResultICCC = canreg.client.CanRegClientApp .getApplication() .performConversions(Converter.ConversionName.ICDO3toICCC3, patient, tumour); tumour.setVariable(io.getICCCVariableName(), conversionResultICCC[0].getValue()); } else { tumour.setVariable(io.getTumourRecordStatus(), "0"); } if (worstResultCodeFound == CheckResult.ResultCode.OK) { // message += "Cross-check conclusion: Valid"; } else { reportWriter.write(tumour.getVariable(io.getTumourIDVariablename()) + "\t" + message + Globals.newline); // System.out.println(tumour.getVariable(io.getTumourIDVariablename()) + " " + message); } tumour.setVariable(io.getTumourCheckStatus(), CheckResult.toDatabaseVariable(worstResultCodeFound)); } else { // try to generate ICD10, if missing, anyway String icd10 = (String) tumour.getVariable(io.getICD10VariableName()); if (icd10 == null || icd10.trim().length() == 0) { ConversionResult[] conversionResult = canreg.client.CanRegClientApp.getApplication() .performConversions(Converter.ConversionName.ICDO3toICD10, patient, tumour); tumour.setVariable(io.getICD10VariableName(), conversionResult[0].getValue()); } // try to generate ICCC3, if missing, anyway String iccc = (String) tumour.getVariable(io.getICCCVariableName()); if (iccc == null || iccc.trim().length() == 0) { ConversionResult[] conversionResult = canreg.client.CanRegClientApp.getApplication() .performConversions(Converter.ConversionName.ICDO3toICCC3, patient, tumour); tumour.setVariable(io.getICCCVariableName(), conversionResult[0].getValue()); } } } else { reportWriter.write(tumour.getVariable(io.getTumourIDVariablename()) + "\t" + "No patient matches this Tumour." + Globals.newline); tumour.setVariable(io.getTumourRecordStatus(), "0"); tumour.setVariable(io.getTumourCheckStatus(), CheckResult.toDatabaseVariable(ResultCode.Missing)); } if (task != null) { task.firePropertyChange(RECORD, 50, 75); } if (!io.isTestOnly()) { if (deleteTumour) { server.deleteRecord( (Integer) tumour2.getVariable(Globals.TUMOUR_TABLE_RECORD_ID_VARIABLE_NAME), Globals.TUMOUR_TABLE_NAME); } if (saveTumour) { // if tumour has no record ID we save it if (tumour.getVariable(Globals.TUMOUR_TABLE_RECORD_ID_VARIABLE_NAME) == null) { server.saveTumour(tumour); } // if not we edit it else { server.editTumour(tumour); } } } if (task != null) { task.firePropertyChange(RECORD, 75, 100); } //Read next line of data numberOfLinesRead++; if (Thread.interrupted()) { //We've been interrupted: no more importing. reportWriter.flush(); throw new InterruptedException(); } } parser.close(); reportWriter.write("Finished reading tumours." + Globals.newline + Globals.newline); reportWriter.flush(); } if (task != null) { task.firePropertyChange(TUMOURS, 100, 100); } // then at last we get the sources if (task != null) { task.firePropertyChange(SOURCES, 0, 0); task.firePropertyChange(PROGRESS, 66, 66); } if (files[2] != null) { reportWriter .write("Starting to import sources from " + files[2].getAbsolutePath() + Globals.newline); FileInputStream sourceFIS = new FileInputStream(files[2]); InputStreamReader sourceISR = new InputStreamReader(sourceFIS, io.getFileCharsets()[2]); Logger.getLogger(Import.class.getName()).log(Level.CONFIG, "Name of the character encoding {0}", sourceISR.getEncoding()); numberOfLinesRead = 0; int numberOfRecordsInFile = canreg.common.Tools.numberOfLinesInFile(files[2].getAbsolutePath()); if (linesToRead > 0) { linesToRead = Math.min(numberOfRecordsInFile, linesToRead); } else { linesToRead = numberOfRecordsInFile; } format = CSVFormat.DEFAULT.withFirstRecordAsHeader().withDelimiter(io.getSeparators()[2]); parser = CSVParser.parse(files[2], io.getFileCharsets()[2], format); for (CSVRecord csvRecord : parser) { // We allow for null tasks... if (task != null) { task.firePropertyChange(PROGRESS, 67 + ((numberOfLinesRead - 1) * 100 / linesToRead) / 3, 67 + ((numberOfLinesRead) * 100 / linesToRead) / 3); task.firePropertyChange(SOURCES, ((numberOfLinesRead - 1) * 100 / linesToRead), ((numberOfLinesRead) * 100 / linesToRead)); } // Build source part Source source = new Source(); for (int i = 0; i < map.size(); i++) { Relation rel = map.get(i); if (rel.getDatabaseTableVariableID() >= 0 && rel.getDatabaseTableName().equalsIgnoreCase(Globals.SOURCE_TABLE_NAME)) { if (rel.getVariableType().equalsIgnoreCase("Number")) { if (csvRecord.get(rel.getFileColumnNumber()).length() > 0) { try { source.setVariable(rel.getDatabaseVariableName(), Integer.parseInt(csvRecord.get(rel.getFileColumnNumber()))); } catch (NumberFormatException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, "Number format error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex); success = false; } } } else { source.setVariable(rel.getDatabaseVariableName(), csvRecord.get(rel.getFileColumnNumber())); } } if (task != null) { task.firePropertyChange(RECORD, i - 1 / map.size() * 50, i / map.size() * 50); } } Tumour tumour = null; try { tumour = CanRegClientApp.getApplication().getTumourRecordBasedOnTumourID( (String) source.getVariable(io.getTumourIDSourceTableVariableName()), false); } catch (DistributedTableDescriptionException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RecordLockedException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (RemoteException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SQLException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (SecurityException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } catch (UnknownTableException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } if (task != null) { task.firePropertyChange(RECORD, 50, 75); } boolean addSource = true; if (tumour != null) { Set<Source> sources = tumour.getSources(); Object sourceRecordID = source.getVariable(io.getSourceIDVariablename()); // look for source in sources for (Source oldSource : sources) { if (oldSource.getVariable(io.getSourceIDVariablename()).equals(sourceRecordID)) { // deal with discrepancies switch (io.getDiscrepancies()) { case ImportOptions.REJECT: addSource = false; break; case ImportOptions.UPDATE: String updateReport = updateRecord(oldSource, source); if (updateReport.length() > 0) { reportWriter.write(tumour.getVariable(io.getTumourIDVariablename()) + Globals.newline + updateReport); } source = oldSource; addSource = false; break; case ImportOptions.OVERWRITE: // deleteTumour; sources.remove(oldSource); addSource = true; break; } } } if (addSource) { sources.add(source); } tumour.setSources(sources); if (!io.isTestOnly()) { server.editTumour(tumour); } } else { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, "No tumour for source record."); } if (task != null) { task.firePropertyChange(RECORD, 75, 100); } //Read next line of data numberOfLinesRead++; if (Thread.interrupted()) { //We've been interrupted: no more importing. reportWriter.flush(); throw new InterruptedException(); } } reportWriter.write("Finished reading sources." + Globals.newline + Globals.newline); reportWriter.flush(); parser.close(); } if (task != null) { task.firePropertyChange(SOURCES, 100, 100); task.firePropertyChange(PROGRESS, 100, 100); while (!task.isProgressPropertyValid()) { // wait untill progress has been updated... } } reportWriter.write("Finished" + Globals.newline); reportWriter.flush(); success = true; } catch (IOException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, "Error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex); success = false; } catch (InterruptedException ex) { Logger.getLogger(Import.class.getName()).log(Level.INFO, "Interupted on line: " + (numberOfLinesRead + 1) + ". ", ex); success = true; } catch (IndexOutOfBoundsException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, "String too short error in line: " + (numberOfLinesRead + 1 + 1) + ". ", ex); success = false; } finally { if (parser != null) { try { parser.close(); } catch (IOException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } } try { reportWriter.flush(); reportWriter.close(); } catch (IOException ex) { Logger.getLogger(Import.class.getName()).log(Level.SEVERE, null, ex); } } if (task != null) { task.firePropertyChange(PROGRESS, 100, 100); task.firePropertyChange("finished", null, null); } return success; }
From source file:frames.MainGUI.java
public void LoadFeeDataToJTable(JTable t, File file) { try {//www .jav a 2 s.c o m CSVParser parser = CSVParser.parse(file, Charset.forName("UTF-8"), CSVFormat.DEFAULT); //t.setModel(tm); DefaultTableModel model = new DefaultTableModel(); //model.setRowCount(0); for (CSVRecord c : parser) { if (c.getRecordNumber() == 1) { model.addColumn(c.get(datatype.GlobalVariable.TYPE)); model.addColumn(c.get(datatype.GlobalVariable.AMOUNT)); model.addColumn(c.get(datatype.GlobalVariable.PAID_BY)); model.addColumn(c.get(datatype.GlobalVariable.PAYER)); t.setModel(model); model = (DefaultTableModel) t.getModel(); continue; } model.addRow( new Object[] { c.get(datatype.GlobalVariable.TYPE), c.get(datatype.GlobalVariable.AMOUNT), c.get(datatype.GlobalVariable.PAID_BY), c.get(datatype.GlobalVariable.PAYER) }); } } catch (Exception e) { System.out.println(e); } }
From source file:frames.MainGUI.java
public void LoadRentToJtable(JTable t, File file) { try {/*www .ja v a 2 s . c o m*/ CSVParser parser = CSVParser.parse(file, Charset.forName("UTF-8"), CSVFormat.DEFAULT); //t.setModel(tm); DefaultTableModel model = new DefaultTableModel(); //model.setRowCount(0); for (CSVRecord c : parser) { if (c.getRecordNumber() == 1) { model.addColumn("??"); model.addColumn(""); model.addColumn("?"); model.addColumn("?"); model.addColumn(""); model.addColumn(""); t.setModel(model); model = (DefaultTableModel) t.getModel(); continue; } model.addRow(new Object[] { c.get(0), c.get(1), c.get(2), c.get(3), c.get(4), c.get(5) }); } } catch (Exception e) { System.out.println(e); } }
From source file:io.dockstore.client.cli.nested.AbstractEntryClient.java
private void launchCwl(String entry, final List<String> args) throws ApiException, IOException { boolean isLocalEntry = false; if (args.contains("--local-entry")) { isLocalEntry = true;// w w w .j a v a 2 s. c o m } final String yamlRun = optVal(args, "--yaml", null); String jsonRun = optVal(args, "--json", null); final String csvRuns = optVal(args, "--tsv", null); if (!(yamlRun != null ^ jsonRun != null ^ csvRuns != null)) { errorMessage("One of --json, --yaml, and --tsv is required", CLIENT_ERROR); } final File tempDir = Files.createTempDir(); File tempCWL; if (!isLocalEntry) { tempCWL = File.createTempFile("temp", ".cwl", tempDir); } else { tempCWL = new File(entry); } if (!isLocalEntry) { final SourceFile cwlFromServer = getDescriptorFromServer(entry, "cwl"); Files.write(cwlFromServer.getContent(), tempCWL, StandardCharsets.UTF_8); downloadDescriptors(entry, "cwl", tempDir); } jsonRun = convertYamlToJson(yamlRun, jsonRun); final Gson gson = io.cwl.avro.CWL.getTypeSafeCWLToolDocument(); if (jsonRun != null) { // if the root document is an array, this indicates multiple runs JsonParser parser = new JsonParser(); final JsonElement parsed = parser .parse(new InputStreamReader(new FileInputStream(jsonRun), StandardCharsets.UTF_8)); if (parsed.isJsonArray()) { final JsonArray asJsonArray = parsed.getAsJsonArray(); for (JsonElement element : asJsonArray) { final String finalString = gson.toJson(element); final File tempJson = File.createTempFile("parameter", ".json", Files.createTempDir()); FileUtils.write(tempJson, finalString, StandardCharsets.UTF_8); final LauncherCWL cwlLauncher = new LauncherCWL(getConfigFile(), tempCWL.getAbsolutePath(), tempJson.getAbsolutePath()); if (this instanceof WorkflowClient) { cwlLauncher.run(Workflow.class); } else { cwlLauncher.run(CommandLineTool.class); } } } else { final LauncherCWL cwlLauncher = new LauncherCWL(getConfigFile(), tempCWL.getAbsolutePath(), jsonRun); if (this instanceof WorkflowClient) { cwlLauncher.run(Workflow.class); } else { cwlLauncher.run(CommandLineTool.class); } } } else if (csvRuns != null) { final File csvData = new File(csvRuns); try (CSVParser parser = CSVParser.parse(csvData, StandardCharsets.UTF_8, CSVFormat.DEFAULT.withDelimiter('\t').withEscape('\\').withQuoteMode(QuoteMode.NONE))) { // grab header final Iterator<CSVRecord> iterator = parser.iterator(); final CSVRecord headers = iterator.next(); // ignore row with type information iterator.next(); // process rows while (iterator.hasNext()) { final CSVRecord csvRecord = iterator.next(); final File tempJson = File.createTempFile("temp", ".json", Files.createTempDir()); StringBuilder buffer = new StringBuilder(); buffer.append("{"); for (int i = 0; i < csvRecord.size(); i++) { buffer.append("\"").append(headers.get(i)).append("\""); buffer.append(":"); // if the type is an array, just pass it through buffer.append(csvRecord.get(i)); if (i < csvRecord.size() - 1) { buffer.append(","); } } buffer.append("}"); // prettify it JsonParser prettyParser = new JsonParser(); JsonObject json = prettyParser.parse(buffer.toString()).getAsJsonObject(); final String finalString = gson.toJson(json); // write it out FileUtils.write(tempJson, finalString, StandardCharsets.UTF_8); // final String stringMapAsString = gson.toJson(stringMap); // Files.write(stringMapAsString, tempJson, StandardCharsets.UTF_8); final LauncherCWL cwlLauncher = new LauncherCWL(this.getConfigFile(), tempCWL.getAbsolutePath(), tempJson.getAbsolutePath()); if (this instanceof WorkflowClient) { cwlLauncher.run(Workflow.class); } else { cwlLauncher.run(CommandLineTool.class); } } } } else { errorMessage("Missing required parameters, one of --json or --tsv is required", CLIENT_ERROR); } }
From source file:nl.detoren.ijsco.io.OSBOLoader.java
public Spelers laadCSV(String csvpath) { File csvData = new File(csvpath); CSVParser parser = null;/*from ww w . j a v a 2 s.com*/ try { parser = CSVParser.parse(csvData, java.nio.charset.Charset.defaultCharset(), CSVFormat.RFC4180); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } for (CSVRecord csvRecord : parser) { //TODO } return null; }
From source file:nl.utwente.ewi.caes.tactiletriana.simulation.devices.UncontrollableLoad.java
/** * * @param profileNumber - A number between 0 and 5 (inclusive) which selects * the profile data on which this instance is based *//*from w w w. jav a2 s. co m*/ public UncontrollableLoad(int profileNumber, Simulation simulation) { super("Uncontrollable Load", simulation); if (profileNumber < 0 || profileNumber > 5) { throw new IllegalArgumentException("profileNumber must be in the range of 0 to 5"); } this.profileNumber = profileNumber; //Load the profile data into an array from the CSV file containing power consumptions for 6 houses. if (profile == null) { profile = new double[6][525608]; try { File csvData = new File("src/main/resources/datasets/watt_house_profiles_year.csv"); // Jan Harm: je kan gewoon een format aanmaken :) CSVFormat format = CSVFormat.DEFAULT.withDelimiter(';'); CSVParser parser = CSVParser.parse(csvData, Charset.defaultCharset(), format); for (CSVRecord csvRecord : parser) { for (int p = 0; p < 6; p++) { profile[p][(int) parser.getRecordNumber()] = Double.parseDouble(csvRecord.get(p)); } } } catch (IOException e) { throw new RuntimeException("Error while parsing house profile dataset", e); } } }
From source file:nzilbb.csv.CsvDeserializer.java
/** * Loads the serialized form of the graph, using the given set of named streams. * @param streams A list of named streams that contain all the * transcription/annotation data required, and possibly (a) stream(s) for the media annotated. * @param schema The layer schema, definining layers and the way they interrelate. * @return A list of parameters that require setting before {@link IDeserializer#deserialize()} * can be invoked. This may be an empty list, and may include parameters with the value already * set to a workable default. If there are parameters, and user interaction is possible, then * the user may be presented with an interface for setting/confirming these parameters, before * they are then passed to {@link IDeserializer#setParameters(ParameterSet)}. * @throws SerializationException If the graph could not be loaded. * @throws IOException On IO error./*from w ww .j a v a 2 s . c om*/ */ @SuppressWarnings({ "rawtypes", "unchecked" }) public ParameterSet load(NamedStream[] streams, Schema schema) throws SerializationException, IOException { // take the first stream, ignore all others. NamedStream csv = Utility.FindSingleStream(streams, ".csv", "text/csv"); if (csv == null) throw new SerializationException("No CSV stream found"); setName(csv.getName()); setSchema(schema); // create a list of layers we need and possible matching layer names LinkedHashMap<Parameter, List<String>> layerToPossibilities = new LinkedHashMap<Parameter, List<String>>(); HashMap<String, LinkedHashMap<String, Layer>> layerToCandidates = new HashMap<String, LinkedHashMap<String, Layer>>(); LinkedHashMap<String, Layer> metadataLayers = new LinkedHashMap<String, Layer>(); for (Layer layer : schema.getRoot().getChildren().values()) { if (layer.getAlignment() == Constants.ALIGNMENT_NONE) { metadataLayers.put(layer.getId(), layer); } } // next turn child layer // look for person attributes for (Layer layer : schema.getParticipantLayer().getChildren().values()) { if (layer.getAlignment() == Constants.ALIGNMENT_NONE) { metadataLayers.put(layer.getId(), layer); } } // next turn child layer LinkedHashMap<String, Layer> utteranceAndMetadataLayers = new LinkedHashMap<String, Layer>(metadataLayers); utteranceAndMetadataLayers.put(getUtteranceLayer().getId(), getUtteranceLayer()); LinkedHashMap<String, Layer> whoAndMetadataLayers = new LinkedHashMap<String, Layer>(metadataLayers); whoAndMetadataLayers.put(getParticipantLayer().getId(), getParticipantLayer()); // read the header line setParser(CSVParser.parse(csv.getStream(), java.nio.charset.Charset.forName("UTF-8"), CSVFormat.EXCEL.withHeader())); setHeaderMap(parser.getHeaderMap()); Vector<String> possibleIDHeaders = new Vector<String>(); Vector<String> possibleUtteranceHeaders = new Vector<String>(); Vector<String> possibleParticipantHeaders = new Vector<String>(); for (String header : getHeaderMap().keySet()) { if (header.trim().length() == 0) continue; Vector<String> possibleMatches = new Vector<String>(); possibleMatches.add("transcript" + header); possibleMatches.add("participant" + header); possibleMatches.add("speaker" + header); possibleMatches.add(header); // special cases if (header.equalsIgnoreCase("id") || header.equalsIgnoreCase("transcript")) { possibleIDHeaders.add(header); } else if (header.equalsIgnoreCase("text") || header.equalsIgnoreCase("document")) { possibleUtteranceHeaders.add(header); } else if (header.equalsIgnoreCase("name") || header.equalsIgnoreCase("participant") || header.equalsIgnoreCase("participantid")) { possibleParticipantHeaders.add(header); } layerToPossibilities.put(new Parameter("header_" + getHeaderMap().get(header), Layer.class, header), possibleMatches); layerToCandidates.put("header_" + getHeaderMap().get(header), metadataLayers); } // next header ParameterSet parameters = new ParameterSet(); // add utterance/participant parameters int defaultUtterancePossibilityIndex = 0; // if there are no obvious participant column possibilities... Parameter idColumn = new Parameter("id", String.class, "ID Column", "Column containing the ID of the text.", false); if (possibleIDHeaders.size() == 0) { // ...include all columns possibleIDHeaders.addAll(getHeaderMap().keySet()); } else { idColumn.setValue(possibleIDHeaders.firstElement()); } idColumn.setPossibleValues(possibleIDHeaders); parameters.addParameter(idColumn); // if there are no obvious participant column possibilities... if (possibleParticipantHeaders.size() == 0) { // ...include all columns possibleParticipantHeaders.addAll(getHeaderMap().keySet()); // default participant column will be the first column, // so default utterance should be the second (if we didn't find obvious possible text column) if (possibleParticipantHeaders.size() > 1) // but only if there's more than one column { defaultUtterancePossibilityIndex = 1; } } Parameter participantColumn = new Parameter("who", "Participant Column", "Column containing the ID of the author of the text.", true, possibleParticipantHeaders.firstElement()); participantColumn.setPossibleValues(possibleParticipantHeaders); parameters.addParameter(participantColumn); // if there are no obvious text column possibilities... if (possibleUtteranceHeaders.size() == 0) { // ...include all columns possibleUtteranceHeaders.addAll(getHeaderMap().keySet()); } else { // we found a possible text column, so run with it regardless of whether we also found // a possible participant column defaultUtterancePossibilityIndex = 0; } Parameter utteranceColumn = new Parameter("text", "Text Column", "Column containing the transcript text.", true, possibleUtteranceHeaders.elementAt(defaultUtterancePossibilityIndex)); utteranceColumn.setPossibleValues(possibleUtteranceHeaders); parameters.addParameter(utteranceColumn); // add column-mapping parameters, and set possibile/default values for (Parameter p : layerToPossibilities.keySet()) { List<String> possibleNames = layerToPossibilities.get(p); LinkedHashMap<String, Layer> candidateLayers = layerToCandidates.get(p.getName()); parameters.addParameter(p); if (p.getValue() == null && candidateLayers != null && possibleNames != null) { p.setValue(Utility.FindLayerById(candidateLayers, possibleNames)); } if (p.getPossibleValues() == null && candidateLayers != null) { p.setPossibleValues(candidateLayers.values()); } } return parameters; }
From source file:org.apache.ambari.server.serveraction.kerberos.AbstractKerberosDataFileReader.java
/** * Opens the data file for reading.// www .j a v a 2 s .c o m * <p/> * This may be called multiple times and the appropriate action will occur depending on if the * file has been previously opened or closed. * * @throws java.io.IOException */ public void open() throws IOException { if (isClosed()) { csvParser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT.withHeader()); } }
From source file:org.apache.jackrabbit.oak.plugins.tika.CSVFileBinaryResourceProvider.java
@Override public FluentIterable<BinaryResource> getBinaries(final String path) throws IOException { CSVParser parser = CSVParser.parse(dataFile, Charsets.UTF_8, FORMAT); closer.register(parser);/*from ww w .j a va 2 s . c o m*/ return FluentIterable.from(parser).transform(new RecordTransformer()).filter(notNull()) .filter(new Predicate<BinaryResource>() { @Override public boolean apply(BinaryResource input) { return PathUtils.isAncestor(path, input.getPath()); } }); }
From source file:org.apache.phoenix.pherf.result.impl.CSVFileResultHandler.java
public synchronized List<Result> read() throws IOException { CSVParser parser = null;//from ww w .j a va2 s . co m util.ensureBaseResultDirExists(); try { File file = new File(resultFileName); parser = CSVParser.parse(file, Charset.defaultCharset(), CSVFormat.DEFAULT); List<CSVRecord> records = parser.getRecords(); List<Result> results = new ArrayList<>(); String header = null; for (CSVRecord record : records) { // First record is the CSV Header if (record.getRecordNumber() == 1) { header = record.toString(); continue; } List<ResultValue> resultValues = new ArrayList<>(); for (String val : record.toString().split(PherfConstants.RESULT_FILE_DELIMETER)) { resultValues.add(new ResultValue(val)); } Result result = new Result(resultFileDetails, header, resultValues); results.add(result); } return results; } finally { parser.close(); } }