List of usage examples for org.apache.commons.csv CSVRecord get
public String get(final String name)
From source file:biz.ganttproject.impex.csv.TaskRecords.java
@Override protected boolean doProcess(CSVRecord record) { if (!super.doProcess(record)) { return false; }//from w ww .j a v a2 s . c o m if (!hasMandatoryFields(record)) { return false; } Date startDate = parseDateOrError(getOrNull(record, TaskFields.BEGIN_DATE.toString())); // Create the task TaskManager.TaskBuilder builder = taskManager.newTaskBuilder() .withName(getOrNull(record, TaskFields.NAME.toString())).withStartDate(startDate) .withWebLink(getOrNull(record, TaskFields.WEB_LINK.toString())) .withNotes(getOrNull(record, TaskFields.NOTES.toString())); if (record.isSet(TaskDefaultColumn.DURATION.getName())) { builder = builder .withDuration(taskManager.createLength(record.get(TaskDefaultColumn.DURATION.getName()))); } if (record.isSet(TaskFields.END_DATE.toString())) { if (record.isSet(TaskDefaultColumn.DURATION.getName())) { if (Objects.equal(record.get(TaskFields.BEGIN_DATE.toString()), record.get(TaskFields.END_DATE.toString())) && "0".equals(record.get(TaskDefaultColumn.DURATION.getName()))) { builder = builder.withLegacyMilestone(); } } else { Date endDate = parseDateOrError(getOrNull(record, TaskFields.END_DATE.toString())); if (endDate != null) { builder = builder.withEndDate(myTimeUnitStack.getDefaultTimeUnit().adjustRight(endDate)); } } } if (record.isSet(TaskFields.COMPLETION.toString())) { String completion = record.get(TaskFields.COMPLETION.toString()); if (!Strings.isNullOrEmpty(completion)) { builder = builder.withCompletion(Integer.parseInt(completion)); } } if (record.isSet(TaskDefaultColumn.COST.getName())) { try { String cost = record.get(TaskDefaultColumn.COST.getName()); if (!Strings.isNullOrEmpty(cost)) { builder = builder.withCost(new BigDecimal(cost)); } } catch (NumberFormatException e) { GPLogger.logToLogger(e); GPLogger.log(String.format("Failed to parse %s as cost value", record.get(TaskDefaultColumn.COST.getName()))); } } Task task = builder.build(); if (record.isSet(TaskDefaultColumn.ID.getName())) { myTaskIdMap.put(record.get(TaskDefaultColumn.ID.getName()), task); } myAssignmentMap.put(task, getOrNull(record, TaskFields.RESOURCES.toString())); myPredecessorMap.put(task, getOrNull(record, TaskDefaultColumn.PREDECESSORS.getName())); String outlineNumber = getOrNull(record, TaskDefaultColumn.OUTLINE_NUMBER.getName()); if (outlineNumber != null) { myWbsMap.put(outlineNumber, task); } for (String customField : getCustomFields()) { String value = getOrNull(record, customField); if (value == null) { continue; } CustomPropertyDefinition def = taskManager.getCustomPropertyManager() .getCustomPropertyDefinition(customField); if (def == null) { GPLogger.logToLogger("Can't find custom field with name=" + customField + " value=" + value); continue; } task.getCustomValues().addCustomProperty(def, value); } return true; }
From source file:edu.harvard.mcz.imagecapture.loader.JobVerbatimFieldLoad.java
@Override public void start() { startDateTime = new Date(); Singleton.getSingletonInstance().getJobList().addJob((RunnableJob) this); runStatus = RunStatus.STATUS_RUNNING; String selectedFilename = ""; if (file == null) { final JFileChooser fileChooser = new JFileChooser(); fileChooser.setFileSelectionMode(JFileChooser.FILES_AND_DIRECTORIES); if (Singleton.getSingletonInstance().getProperties().getProperties() .getProperty(ImageCaptureProperties.KEY_LASTLOADPATH) != null) { fileChooser.setCurrentDirectory(new File(Singleton.getSingletonInstance().getProperties() .getProperties().getProperty(ImageCaptureProperties.KEY_LASTLOADPATH))); }/*from w w w .ja v a 2 s . co m*/ int returnValue = fileChooser.showOpenDialog(Singleton.getSingletonInstance().getMainFrame()); if (returnValue == JFileChooser.APPROVE_OPTION) { file = fileChooser.getSelectedFile(); } } if (file != null) { log.debug("Selected file to load: " + file.getName() + "."); if (file.exists() && file.isFile() && file.canRead()) { // Save location Singleton.getSingletonInstance().getProperties().getProperties() .setProperty(ImageCaptureProperties.KEY_LASTLOADPATH, file.getPath()); selectedFilename = file.getName(); String[] headers = new String[] {}; CSVFormat csvFormat = CSVFormat.DEFAULT.withHeader(headers); int rows = 0; try { rows = readRows(file, csvFormat); } catch (FileNotFoundException e) { JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(), "Unable to load data, file not found: " + e.getMessage(), "Error: File Not Found", JOptionPane.OK_OPTION); errors.append("File not found ").append(e.getMessage()).append("\n"); log.error(e.getMessage(), e); } catch (IOException e) { errors.append("Error loading csv format, trying tab delimited: ").append(e.getMessage()) .append("\n"); log.debug(e.getMessage()); try { // try reading as tab delimited format, if successful, use that format. CSVFormat tabFormat = CSVFormat.newFormat('\t').withIgnoreSurroundingSpaces(true) .withHeader(headers).withQuote('"'); rows = readRows(file, tabFormat); csvFormat = tabFormat; } catch (IOException e1) { errors.append("Error Loading data: ").append(e1.getMessage()).append("\n"); log.error(e.getMessage(), e1); } } try { Reader reader = new FileReader(file); CSVParser csvParser = new CSVParser(reader, csvFormat); Map<String, Integer> csvHeader = csvParser.getHeaderMap(); headers = new String[csvHeader.size()]; int i = 0; for (String header : csvHeader.keySet()) { headers[i++] = header; log.debug(header); } boolean okToRun = true; //TODO: Work picking/checking responsibility into a FieldLoaderWizard List<String> headerList = Arrays.asList(headers); if (!headerList.contains("barcode")) { log.error("Input file " + file.getName() + " header does not contain required field 'barcode'."); // no barcode field, we can't match the input to specimen records. errors.append("Field \"barcode\" not found in csv file headers. Unable to load data.") .append("\n"); okToRun = false; } if (okToRun) { Iterator<CSVRecord> iterator = csvParser.iterator(); FieldLoader fl = new FieldLoader(); if (headerList.size() == 3 && headerList.contains("verbatimUnclassifiedText") && headerList.contains("questions") && headerList.contains("barcode")) { log.debug("Input file matches case 1: Unclassified text only."); // Allowed case 1a: unclassified text only int confirm = JOptionPane.showConfirmDialog( Singleton.getSingletonInstance().getMainFrame(), "Confirm load from file " + selectedFilename + " (" + rows + " rows) with just barcode and verbatimUnclassifiedText", "Verbatim unclassified Field found for load", JOptionPane.OK_CANCEL_OPTION); if (confirm == JOptionPane.OK_OPTION) { String barcode = ""; int lineNumber = 0; while (iterator.hasNext()) { lineNumber++; counter.incrementSpecimens(); CSVRecord record = iterator.next(); try { String verbatimUnclassifiedText = record.get("verbatimUnclassifiedText"); barcode = record.get("barcode"); String questions = record.get("questions"); fl.load(barcode, verbatimUnclassifiedText, questions, true); counter.incrementSpecimensUpdated(); } catch (IllegalArgumentException e) { RunnableJobError error = new RunnableJobError(file.getName(), barcode, Integer.toString(lineNumber), e.getClass().getSimpleName(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(error); log.error(e.getMessage(), e); } catch (LoadException e) { RunnableJobError error = new RunnableJobError(file.getName(), barcode, Integer.toString(lineNumber), e.getClass().getSimpleName(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(error); log.error(e.getMessage(), e); } percentComplete = (int) ((lineNumber * 100f) / rows); this.setPercentComplete(percentComplete); } } else { errors.append("Load canceled by user.").append("\n"); } } else if (headerList.size() == 4 && headerList.contains("verbatimUnclassifiedText") && headerList.contains("questions") && headerList.contains("barcode") && headerList.contains("verbatimClusterIdentifier")) { log.debug( "Input file matches case 1: Unclassified text only (with cluster identifier)."); // Allowed case 1b: unclassified text only (including cluster identifier) int confirm = JOptionPane.showConfirmDialog( Singleton.getSingletonInstance().getMainFrame(), "Confirm load from file " + selectedFilename + " (" + rows + " rows) with just barcode and verbatimUnclassifiedText", "Verbatim unclassified Field found for load", JOptionPane.OK_CANCEL_OPTION); if (confirm == JOptionPane.OK_OPTION) { String barcode = ""; int lineNumber = 0; while (iterator.hasNext()) { lineNumber++; counter.incrementSpecimens(); CSVRecord record = iterator.next(); try { String verbatimUnclassifiedText = record.get("verbatimUnclassifiedText"); String verbatimClusterIdentifier = record.get("verbatimClusterIdentifier"); barcode = record.get("barcode"); String questions = record.get("questions"); fl.load(barcode, verbatimUnclassifiedText, verbatimClusterIdentifier, questions, true); counter.incrementSpecimensUpdated(); } catch (IllegalArgumentException e) { RunnableJobError error = new RunnableJobError(file.getName(), barcode, Integer.toString(lineNumber), e.getClass().getSimpleName(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(error); log.error(e.getMessage(), e); } catch (LoadException e) { RunnableJobError error = new RunnableJobError(file.getName(), barcode, Integer.toString(lineNumber), e.getClass().getSimpleName(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(error); log.error(e.getMessage(), e); } percentComplete = (int) ((lineNumber * 100f) / rows); this.setPercentComplete(percentComplete); } } else { errors.append("Load canceled by user.").append("\n"); } } else if (headerList.size() == 8 && headerList.contains("verbatimUnclassifiedText") && headerList.contains("questions") && headerList.contains("barcode") && headerList.contains("verbatimLocality") && headerList.contains("verbatimDate") && headerList.contains("verbatimNumbers") && headerList.contains("verbatimCollector") && headerList.contains("verbatimCollection")) { // Allowed case two, transcription into verbatim fields, must be exact list of all // verbatim fields, not including cluster identifier or other metadata. log.debug("Input file matches case 2: Full list of verbatim fields."); int confirm = JOptionPane.showConfirmDialog( Singleton.getSingletonInstance().getMainFrame(), "Confirm load from file " + selectedFilename + " (" + rows + " rows) with just barcode and verbatim fields.", "Verbatim Fields found for load", JOptionPane.OK_CANCEL_OPTION); if (confirm == JOptionPane.OK_OPTION) { String barcode = ""; int lineNumber = 0; while (iterator.hasNext()) { lineNumber++; counter.incrementSpecimens(); CSVRecord record = iterator.next(); try { String verbatimLocality = record.get("verbatimLocality"); String verbatimDate = record.get("verbatimDate"); String verbatimCollector = record.get("verbatimCollector"); String verbatimCollection = record.get("verbatimCollection"); String verbatimNumbers = record.get("verbatimNumbers"); String verbatimUnclasifiedText = record.get("verbatimUnclassifiedText"); barcode = record.get("barcode"); String questions = record.get("questions"); fl.load(barcode, verbatimLocality, verbatimDate, verbatimCollector, verbatimCollection, verbatimNumbers, verbatimUnclasifiedText, questions); counter.incrementSpecimensUpdated(); } catch (IllegalArgumentException e) { RunnableJobError error = new RunnableJobError(file.getName(), barcode, Integer.toString(lineNumber), e.getClass().getSimpleName(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(error); log.error(e.getMessage(), e); } catch (LoadException e) { RunnableJobError error = new RunnableJobError(file.getName(), barcode, Integer.toString(lineNumber), e.getClass().getSimpleName(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(error); log.error(e.getMessage(), e); } percentComplete = (int) ((lineNumber * 100f) / rows); this.setPercentComplete(percentComplete); } } else { errors.append("Load canceled by user.").append("\n"); } } else { // allowed case three, transcription into arbitrary sets verbatim or other fields log.debug("Input file case 3: Arbitrary set of fields."); // Check column headers before starting run. boolean headersOK = false; try { HeaderCheckResult headerCheck = fl.checkHeaderList(headerList); if (headerCheck.isResult()) { int confirm = JOptionPane.showConfirmDialog( Singleton.getSingletonInstance().getMainFrame(), "Confirm load from file " + selectedFilename + " (" + rows + " rows) with headers: \n" + headerCheck.getMessage().replaceAll(":", ":\n"), "Fields found for load", JOptionPane.OK_CANCEL_OPTION); if (confirm == JOptionPane.OK_OPTION) { headersOK = true; } else { errors.append("Load canceled by user.").append("\n"); } } else { int confirm = JOptionPane.showConfirmDialog( Singleton.getSingletonInstance().getMainFrame(), "Problem found with headers in file, try to load anyway?\nHeaders: \n" + headerCheck.getMessage().replaceAll(":", ":\n"), "Problem in fields for load", JOptionPane.OK_CANCEL_OPTION); if (confirm == JOptionPane.OK_OPTION) { headersOK = true; } else { errors.append("Load canceled by user.").append("\n"); } } } catch (LoadException e) { errors.append("Error loading data: \n").append(e.getMessage()).append("\n"); JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(), e.getMessage().replaceAll(":", ":\n"), "Error Loading Data: Problem Fields", JOptionPane.ERROR_MESSAGE); log.error(e.getMessage(), e); } if (headersOK) { int lineNumber = 0; while (iterator.hasNext()) { lineNumber++; Map<String, String> data = new HashMap<String, String>(); CSVRecord record = iterator.next(); String barcode = record.get("barcode"); Iterator<String> hi = headerList.iterator(); boolean containsNonVerbatim = false; while (hi.hasNext()) { String header = hi.next(); // Skip any fields prefixed by the underscore character _ if (!header.equals("barcode") && !header.startsWith("_")) { data.put(header, record.get(header)); if (!header.equals("questions") && MetadataRetriever.isFieldExternallyUpdatable(Specimen.class, header) && MetadataRetriever.isFieldVerbatim(Specimen.class, header)) { containsNonVerbatim = true; } } } if (data.size() > 0) { try { boolean updated = false; if (containsNonVerbatim) { updated = fl.loadFromMap(barcode, data, WorkFlowStatus.STAGE_CLASSIFIED, true); } else { updated = fl.loadFromMap(barcode, data, WorkFlowStatus.STAGE_VERBATIM, true); } counter.incrementSpecimens(); if (updated) { counter.incrementSpecimensUpdated(); } } catch (HibernateException e1) { // Catch (should just be development) problems with the underlying query StringBuilder message = new StringBuilder(); message.append("Query Error loading row (").append(lineNumber) .append(")[").append(barcode).append("]") .append(e1.getMessage()); RunnableJobError err = new RunnableJobError(selectedFilename, barcode, Integer.toString(lineNumber), e1.getMessage(), e1, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(err); log.error(e1.getMessage(), e1); } catch (LoadException e) { StringBuilder message = new StringBuilder(); message.append("Error loading row (").append(lineNumber).append(")[") .append(barcode).append("]").append(e.getMessage()); RunnableJobError err = new RunnableJobError(selectedFilename, barcode, Integer.toString(lineNumber), e.getMessage(), e, RunnableJobError.TYPE_LOAD_FAILED); counter.appendError(err); // errors.append(message.append("\n").toString()); log.error(e.getMessage(), e); } } percentComplete = (int) ((lineNumber * 100f) / rows); this.setPercentComplete(percentComplete); } } else { String message = "Can't load data, problem with headers."; errors.append(message).append("\n"); log.error(message); } } } csvParser.close(); reader.close(); } catch (FileNotFoundException e) { JOptionPane.showMessageDialog(Singleton.getSingletonInstance().getMainFrame(), "Unable to load data, file not found: " + e.getMessage(), "Error: File Not Found", JOptionPane.OK_OPTION); errors.append("File not found ").append(e.getMessage()).append("\n"); log.error(e.getMessage(), e); } catch (IOException e) { errors.append("Error Loading data: ").append(e.getMessage()).append("\n"); log.error(e.getMessage(), e); } } } else { //TODO: handle error condition log.error("File selection cancelled by user."); } report(selectedFilename); done(); }
From source file:com.datascience.cascading.scheme.CsvScheme.java
/** * Detects the header fields./*www. j a v a 2s. c o m*/ */ @SuppressWarnings("unchecked") protected Fields detectHeader(FlowProcess<JobConf> flowProcess, Tap tap, boolean genericNames) { CSVRecord record = getHeaderRecord(flowProcess, tap); String[] fields = new String[record.size()]; for (int i = 0; i < record.size(); i++) { if (genericNames) { fields[i] = String.format("col%d", i); } else { fields[i] = record.get(i); } } return new Fields(fields); }
From source file:com.datascience.cascading.scheme.CsvScheme.java
/** * Method to validate Fields passed present in the headers. *///w w w . j a v a 2 s. com protected boolean validateFields(FlowProcess<JobConf> flowProcess, Tap tap, Fields sourceFields) { CSVRecord headerRecord = getHeaderRecord(flowProcess, tap); if (sourceFields.size() > headerRecord.size()) { return false; } List<String> recordList = new ArrayList<String>(); for (int i = 0; i < headerRecord.size(); i++) { recordList.add(headerRecord.get(i)); } for (int i = 0; i < sourceFields.size(); i++) { if (!recordList.contains(sourceFields.get(i))) { return false; } } return true; }
From source file:com.kdmanalytics.toif.ui.common.AdaptorConfiguration.java
/** * Parse the header row/*w ww .ja v a 2 s . co m*/ * * @param record */ private void parseHeader(CSVRecord record) { int size = record.size(); headers = new LinkedList<String>(); // Import the cells for (int i = 0; i < size; i++) { String text = record.get(i); headers.add(text); if (COLUMN_SFP_STRING.equalsIgnoreCase(text)) COLUMN_SFP = i; else if (COLUMN_CWE_STRING.equalsIgnoreCase(text)) COLUMN_CWE = i; else if (COLUMN_SHOW_STRING.equalsIgnoreCase(text)) COLUMN_SHOW = i; else if (COLUMN_SHOW_STRING_OLD.equalsIgnoreCase(text)) COLUMN_SHOW = i; else if (COLUMN_CPPCHECK_STRING.equalsIgnoreCase(text)) COLUMN_CPPCHECK = i; else if (COLUMN_RATS_STRING.equalsIgnoreCase(text)) COLUMN_RATS = i; else if (COLUMN_SPLINT_STRING.equalsIgnoreCase(text)) COLUMN_SPLINT = i; else if (COLUMN_JLINT_STRING.equalsIgnoreCase(text)) COLUMN_JLINT = i; else if (COLUMN_FINDBUGS_STRING.equalsIgnoreCase(text)) COLUMN_FINDBUGS = i; else if (COLUMN_COUNT_C_STRING1.equalsIgnoreCase(text)) { COLUMN_COUNT_C = i; // Convert to new name text = COLUMN_COUNT_C_STRING2; } else if (COLUMN_COUNT_JAVA_STRING1.equalsIgnoreCase(text)) { COLUMN_COUNT_JAVA = i; // Convert to new name text = COLUMN_COUNT_JAVA_STRING2; } else if (COLUMN_COUNT_C_STRING2.equalsIgnoreCase(text)) COLUMN_COUNT_C = i; else if (COLUMN_COUNT_JAVA_STRING2.equalsIgnoreCase(text)) COLUMN_COUNT_JAVA = i; else { extraColumns.add(text); } columnMap.put(text.toLowerCase(), i); } }
From source file:com.kdmanalytics.toif.ui.common.AdaptorConfiguration.java
/** * Parse the record as a row of data/*from ww w . j a v a 2 s. c om*/ * * @param record * @param rcount * @return */ private int parseData(CSVRecord record, int rcount) { int size = record.size(); List<Object> row = new LinkedList<Object>(); // Import the cells for (int i = 0; i < size; i++) { String text = record.get(i); row.add(getCell(i, text)); } if (row.size() > COLUMN_CWE) { String cwe = (String) row.get(COLUMN_CWE); // Fix the CWE ID and replace the value cwe = fixSfpCweIdentifier(cwe); row.remove(COLUMN_CWE); row.add(COLUMN_CWE, cwe); String sfp = (String) row.get(COLUMN_SFP); // Fix the CWE ID and replace the value sfp = fixSfpCweIdentifier(sfp); row.remove(COLUMN_SFP); row.add(COLUMN_SFP, sfp); // Only add a new row if this is a non-empty row and the CWE // does not exist in the map yet. if (!cwe.isEmpty() && !rowMap.containsKey(cwe)) { data.add(row); rowMap.put(cwe, rcount); sfpMap.put(cwe, (String) row.get(COLUMN_SFP)); ShowField showState = (ShowField) row.get(COLUMN_SHOW); visibilityMap.put((String) row.get(COLUMN_CWE), showState.toBoolean()); // We just added a new row rcount++; dirty = true; } } return rcount; }
From source file:edu.harvard.iq.dataverse.ingest.tabulardata.impl.plugins.csv.CSVFileReader.java
public int readFile(BufferedReader csvReader, DataTable dataTable, PrintWriter finalOut) throws IOException { List<DataVariable> variableList = new ArrayList<>(); CSVParser parser = new CSVParser(csvReader, inFormat.withHeader()); Map<String, Integer> headers = parser.getHeaderMap(); int i = 0;/* w w w . j a va 2 s. co m*/ for (String varName : headers.keySet()) { if (varName == null || varName.isEmpty()) { // TODO: // Add a sensible variable name validation algorithm. // -- L.A. 4.0 alpha 1 throw new IOException(BundleUtil.getStringFromBundle("ingest.csv.invalidHeader")); } DataVariable dv = new DataVariable(); dv.setName(varName); dv.setLabel(varName); dv.setInvalidRanges(new ArrayList<>()); dv.setSummaryStatistics(new ArrayList<>()); dv.setUnf("UNF:6:NOTCALCULATED"); dv.setCategories(new ArrayList<>()); variableList.add(dv); dv.setTypeCharacter(); dv.setIntervalDiscrete(); dv.setFileOrder(i); dv.setDataTable(dataTable); i++; } dataTable.setVarQuantity((long) variableList.size()); dataTable.setDataVariables(variableList); boolean[] isNumericVariable = new boolean[headers.size()]; boolean[] isIntegerVariable = new boolean[headers.size()]; boolean[] isTimeVariable = new boolean[headers.size()]; boolean[] isDateVariable = new boolean[headers.size()]; for (i = 0; i < headers.size(); i++) { // OK, let's assume that every variable is numeric; // but we'll go through the file and examine every value; the // moment we find a value that's not a legit numeric one, we'll // assume that it is in fact a String. isNumericVariable[i] = true; isIntegerVariable[i] = true; isDateVariable[i] = true; isTimeVariable[i] = true; } // First, "learning" pass. // (we'll save the incoming stream in another temp file:) SimpleDateFormat[] selectedDateTimeFormat = new SimpleDateFormat[headers.size()]; SimpleDateFormat[] selectedDateFormat = new SimpleDateFormat[headers.size()]; File firstPassTempFile = File.createTempFile("firstpass-", ".csv"); try (CSVPrinter csvFilePrinter = new CSVPrinter( // TODO allow other parsers of tabular data to use this parser by changin inFormat new FileWriter(firstPassTempFile.getAbsolutePath()), inFormat)) { //Write headers csvFilePrinter.printRecord(headers.keySet()); for (CSVRecord record : parser.getRecords()) { // Checks if #records = #columns in header if (!record.isConsistent()) { List<String> args = Arrays.asList(new String[] { "" + (parser.getCurrentLineNumber() - 1), "" + headers.size(), "" + record.size() }); throw new IOException(BundleUtil.getStringFromBundle("ingest.csv.recordMismatch", args)); } for (i = 0; i < headers.size(); i++) { String varString = record.get(i); isIntegerVariable[i] = isIntegerVariable[i] && varString != null && (varString.isEmpty() || varString.equals("null") || (firstNumCharSet.contains(varString.charAt(0)) && StringUtils.isNumeric(varString.substring(1)))); if (isNumericVariable[i]) { // If variable might be "numeric" test to see if this value is a parsable number: if (varString != null && !varString.isEmpty()) { boolean isNumeric = false; boolean isInteger = false; if (varString.equalsIgnoreCase("NaN") || varString.equalsIgnoreCase("NA") || varString.equalsIgnoreCase("Inf") || varString.equalsIgnoreCase("+Inf") || varString.equalsIgnoreCase("-Inf") || varString.equalsIgnoreCase("null")) { continue; } else { try { Double testDoubleValue = new Double(varString); continue; } catch (NumberFormatException ex) { // the token failed to parse as a double // so the column is a string variable. } } isNumericVariable[i] = false; } } // If this is not a numeric column, see if it is a date collumn // by parsing the cell as a date or date-time value: if (!isNumericVariable[i]) { Date dateResult = null; if (isTimeVariable[i]) { if (varString != null && !varString.isEmpty()) { boolean isTime = false; if (selectedDateTimeFormat[i] != null) { ParsePosition pos = new ParsePosition(0); dateResult = selectedDateTimeFormat[i].parse(varString, pos); if (dateResult != null && pos.getIndex() == varString.length()) { // OK, successfully parsed a value! isTime = true; } } else { for (SimpleDateFormat format : TIME_FORMATS) { ParsePosition pos = new ParsePosition(0); dateResult = format.parse(varString, pos); if (dateResult != null && pos.getIndex() == varString.length()) { // OK, successfully parsed a value! isTime = true; selectedDateTimeFormat[i] = format; break; } } } if (!isTime) { isTimeVariable[i] = false; // if the token didn't parse as a time value, // we will still try to parse it as a date, below. // unless this column is NOT a date. } else { // And if it is a time value, we are going to assume it's // NOT a date. isDateVariable[i] = false; } } } if (isDateVariable[i]) { if (varString != null && !varString.isEmpty()) { boolean isDate = false; // TODO: // Strictly speaking, we should be doing the same thing // here as with the time formats above; select the // first one that works, then insist that all the // other values in this column match it... but we // only have one, as of now, so it should be ok. // -- L.A. 4.0 beta for (SimpleDateFormat format : DATE_FORMATS) { // Strict parsing - it will throw an // exception if it doesn't parse! format.setLenient(false); try { format.parse(varString); isDate = true; selectedDateFormat[i] = format; break; } catch (ParseException ex) { //Do nothing } } isDateVariable[i] = isDate; } } } } csvFilePrinter.printRecord(record); } } dataTable.setCaseQuantity(parser.getRecordNumber()); parser.close(); csvReader.close(); // Re-type the variables that we've determined are numerics: for (i = 0; i < headers.size(); i++) { if (isNumericVariable[i]) { dataTable.getDataVariables().get(i).setTypeNumeric(); if (isIntegerVariable[i]) { dataTable.getDataVariables().get(i).setIntervalDiscrete(); } else { dataTable.getDataVariables().get(i).setIntervalContinuous(); } } else if (isDateVariable[i] && selectedDateFormat[i] != null) { // Dates are still Strings, i.e., they are "character" and "discrete"; // But we add special format values for them: dataTable.getDataVariables().get(i).setFormat(DATE_FORMATS[0].toPattern()); dataTable.getDataVariables().get(i).setFormatCategory("date"); } else if (isTimeVariable[i] && selectedDateTimeFormat[i] != null) { // Same for time values: dataTable.getDataVariables().get(i).setFormat(selectedDateTimeFormat[i].toPattern()); dataTable.getDataVariables().get(i).setFormatCategory("time"); } } // Second, final pass. try (BufferedReader secondPassReader = new BufferedReader(new FileReader(firstPassTempFile))) { parser = new CSVParser(secondPassReader, inFormat.withHeader()); String[] caseRow = new String[headers.size()]; for (CSVRecord record : parser) { if (!record.isConsistent()) { List<String> args = Arrays.asList(new String[] { "" + (parser.getCurrentLineNumber() - 1), "" + headers.size(), "" + record.size() }); throw new IOException(BundleUtil.getStringFromBundle("ingest.csv.recordMismatch", args)); } for (i = 0; i < headers.size(); i++) { String varString = record.get(i); if (isNumericVariable[i]) { if (varString == null || varString.isEmpty() || varString.equalsIgnoreCase("NA")) { // Missing value - represented as an empty string in // the final tab file caseRow[i] = ""; } else if (varString.equalsIgnoreCase("NaN")) { // "Not a Number" special value: caseRow[i] = "NaN"; } else if (varString.equalsIgnoreCase("Inf") || varString.equalsIgnoreCase("+Inf")) { // Positive infinity: caseRow[i] = "Inf"; } else if (varString.equalsIgnoreCase("-Inf")) { // Negative infinity: caseRow[i] = "-Inf"; } else if (varString.equalsIgnoreCase("null")) { // By request from Gus - "NULL" is recognized as a // numeric zero: caseRow[i] = isIntegerVariable[i] ? "0" : "0.0"; } else { /* No re-formatting is done on any other numeric values. * We'll save them as they were, for archival purposes. * The alternative solution - formatting in sci. notation * is commented-out below. */ caseRow[i] = varString; /* if (isIntegerVariable[i]) { try { Integer testIntegerValue = new Integer(varString); caseRow[i] = testIntegerValue.toString(); } catch (NumberFormatException ex) { throw new IOException("Failed to parse a value recognized as an integer in the first pass! (?)"); } } else { try { Double testDoubleValue = new Double(varString); if (testDoubleValue.equals(0.0)) { caseRow[i] = "0.0"; } else { // One possible implementation: // // Round our fractional values to 15 digits // (minimum number of digits of precision guaranteed by // type Double) and format the resulting representations // in a IEEE 754-like "scientific notation" - for ex., // 753.24 will be encoded as 7.5324e2 BigDecimal testBigDecimal = new BigDecimal(varString, doubleMathContext); caseRow[i] = String.format(FORMAT_IEEE754, testBigDecimal); // Strip meaningless zeros and extra + signs: caseRow[i] = caseRow[i].replaceFirst("00*e", "e"); caseRow[i] = caseRow[i].replaceFirst("\\.e", ".0e"); caseRow[i] = caseRow[i].replaceFirst("e\\+00", ""); caseRow[i] = caseRow[i].replaceFirst("^\\+", ""); } } catch (NumberFormatException ex) { throw new IOException("Failed to parse a value recognized as numeric in the first pass! (?)"); } } */ } } else if (isTimeVariable[i] || isDateVariable[i]) { // Time and Dates are stored NOT quoted (don't ask). if (varString != null) { // Dealing with quotes: // remove the leading and trailing quotes, if present: varString = varString.replaceFirst("^\"*", ""); varString = varString.replaceFirst("\"*$", ""); caseRow[i] = varString; } else { caseRow[i] = ""; } } else { // Treat as a String: // Strings are stored in tab files quoted; // Missing values are stored as an empty string // between two tabs (or one tab and the new line); // Empty strings stored as "" (quoted empty string). // For the purposes of this CSV ingest reader, we are going // to assume that all the empty strings in the file are // indeed empty strings, and NOT missing values: if (varString != null) { // escape the quotes, newlines, and tabs: varString = varString.replace("\"", "\\\""); varString = varString.replace("\n", "\\n"); varString = varString.replace("\t", "\\t"); // final pair of quotes: varString = "\"" + varString + "\""; caseRow[i] = varString; } else { caseRow[i] = "\"\""; } } } finalOut.println(StringUtils.join(caseRow, "\t")); } } long linecount = parser.getRecordNumber(); finalOut.close(); parser.close(); dbglog.fine("Tmp File: " + firstPassTempFile); // Firstpass file is deleted to prevent tmp from filling up. firstPassTempFile.delete(); if (dataTable.getCaseQuantity().intValue() != linecount) { List<String> args = Arrays .asList(new String[] { "" + dataTable.getCaseQuantity().intValue(), "" + linecount }); throw new IOException(BundleUtil.getStringFromBundle("ingest.csv.line_mismatch", args)); } return (int) linecount; }
From source file:com.datafibers.kafka.connect.FileGenericSourceTask.java
/** * Decode Csv to struct according to schema form Confluent schema registry * @param line//w ww. j a va2 s . c o m * @return struct of decoded */ public Struct structDecodingFromCsv(String line) { if (line.length() > 0) { Struct struct = new Struct(dataSchema); JsonNode json = null; try { // TODO support other type of files fro here CSVParser csvParser = CSVFormat.EXCEL.withIgnoreEmptyLines().withIgnoreHeaderCase() .withRecordSeparator('\n').withQuote('"').withEscape('\\').withDelimiter(',').withTrim() .parse(new StringReader(line)); // Since this is single line parser, we get element 0 only CSVRecord entry = csvParser.getRecords().get(0); List<org.apache.kafka.connect.data.Field> fields = dataSchema.fields(); int schema_fields_size = fields.size(); log.info("schema_fields_size = " + schema_fields_size); for (int index = 0; index <= schema_fields_size - 1; index++) { Object value = null; org.apache.kafka.connect.data.Field theField = fields.get(index); log.info("printed indexed " + index + " fields: " + theField.name() + ":" + theField.schema().type()); if (theField != null) { switch (theField.schema().type()) { case STRING: { value = entry.get(index); break; } case INT32: { value = Integer.parseInt(entry.get(index)); break; } case BOOLEAN: { value = Boolean.parseBoolean(entry.get(index)); break; } default: value = entry.get(index); } } struct.put(theField.name(), value); } } catch (IOException ex) { throw new ConnectException(String.format("Unable to parse %s into a valid CSV", filename), ex); } return struct; } return null; }
From source file:com.kdmanalytics.toif.report.internal.importWizard.TsvImportWizardPage.java
/** * Perform the actual load./*w ww . ja va2 s . c om*/ * * @return */ public boolean finish() { // Check source file final String name = editor.getStringValue(); setErrorMessage("Importing " + name + " into " + project + "..."); IPath location = new Path(name); File file = location.toFile(); Reader in = null; CSVParser parser = null; try { in = new FileReader(file); CSVFormat format = CSVFormat.EXCEL.withDelimiter('\t').withIgnoreEmptyLines(); parser = new CSVParser(in, format); System.err.println("FILE: " + name); Map<Integer, String> lookup = new HashMap<Integer, String>(); boolean header = true; for (CSVRecord record : parser) { int size = record.size(); IFile ifile = null; String tool = null; String description = null; int line = 0; int offset = 0; int trust = 0; Boolean status = null; int kdmLine = 0; String cwe = null; String sfp = null; // Read the header first if (header) { System.err.print(" "); for (int i = 0; i < size; i++) { if (i > 0) System.err.print(","); String cell = record.get(i); lookup.put(i, cell); System.err.print(cell); } header = false; System.err.println(); System.err.println(" ------------------------------------------"); } // Otherwise this is a data row else { for (int i = 0; i < size; i++) { String cell = record.get(i); String colName = lookup.get(i); if ("Resource".equals(colName)) { IFileGroup group = new FileGroup(cell); try { IResource resource = MemberUtil.findMembers(project, group); if (resource != null) { ifile = (IFile) resource; } } catch (CoreException e) { e.printStackTrace(); } } else if ("SFP".equals(colName)) { sfp = cell; } else if ("CWE".equals(colName)) { cwe = cell; } // Valid is *old* name for "Citing Status" else if ("Valid".equals(colName)) { if (cell != null && !cell.trim().isEmpty()) { status = Boolean.parseBoolean(cell); } } else if ("Citing Status".equals(colName)) { if (cell != null && !cell.trim().isEmpty()) { status = Boolean.parseBoolean(cell); } } else if ("Trust".equals(colName)) { if (cell != null && !cell.trim().isEmpty()) { try { trust = Integer.parseInt(cell); } catch (NumberFormatException e) { } } } else if ("Confidence".equals(colName)) { if (cell != null && !cell.trim().isEmpty()) { try { trust = Integer.parseInt(cell); } catch (NumberFormatException e) { } } } else if ("Line Number".equals(colName)) { if (cell != null && !cell.trim().isEmpty()) { try { line = Integer.parseInt(cell); } catch (NumberFormatException e) { } } } else if ("KDM Line Number".equals(colName)) { if (cell != null && !cell.trim().isEmpty()) { try { kdmLine = Integer.parseInt(cell); } catch (NumberFormatException e) { } } } // "Generator Tool" is *old* name for "SCA Tool" else if ("Generator Tool".equals(colName)) { tool = cell; } else if ("SCA tool".equalsIgnoreCase(colName)) { tool = cell; } else if ("Weakness Description".equals(colName)) { description = cell; } else { System.err.println("WARNING: Unknown column name '" + colName + "'"); } } System.err.print(" "); System.err.print(sfp); System.err.print(","); System.err.print(cwe); System.err.print(","); System.err.print(status); System.err.print(","); System.err.print(trust); System.err.print(","); System.err.print(ifile); System.err.print(","); System.err.print(line); System.err.print(","); System.err.print(kdmLine); System.err.print(","); System.err.print(tool); System.err.print(","); System.err.print(description); System.err.println(); if (ifile != null) { // Create an associated finding. This will allow us to // set the citing status for the finding. If the // finding does not actually exist in the database this information // is still stored in case the finding exists in the future. FindingData finding = new FindingData(ifile, tool, description, line, offset, cwe, sfp); if (status != null) { finding.cite(status); } } } } try { IWorkbenchWindow window = PlatformUI.getWorkbench().getActiveWorkbenchWindow(); if (window != null) { IWorkbenchPage page = window.getActivePage(); if (page != null) { FindingView view = (FindingView) page.showView("com.kdmanalytics.toif.views.FindingView"); view.refresh(); } } } catch (PartInitException e) { e.printStackTrace(); } } catch (IOException e) { e.printStackTrace(); } finally { if (parser != null) { try { parser.close(); } catch (IOException e) { e.printStackTrace(); } } if (in != null) { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } } // PlatformUI.getWorkbench().getDisplay().asyncExec(new Runnable() // { // public void run() // { // final ToifReportImportJob job = new ToifReportImportJob("Import SFP/CWE Data", project, // name); // job.setUser(true); // job.setPriority(Job.BUILD); // job.setRule(project); // job.schedule(); // } // }); return true; }
From source file:io.swagger.inflector.controllers.SwaggerOperationController.java
@Override public Response apply(ContainerRequestContext ctx) { List<Parameter> parameters = operation.getParameters(); final RequestContext requestContext = createContext(ctx); String path = ctx.getUriInfo().getPath(); Map<String, Map<String, String>> formMap = new HashMap<String, Map<String, String>>(); Map<String, File> inputStreams = new HashMap<String, File>(); Object[] args = new Object[parameters.size() + 1]; if (parameters != null) { int i = 0; args[i] = requestContext;//from w ww . j a va 2s.co m i += 1; List<ValidationMessage> missingParams = new ArrayList<ValidationMessage>(); UriInfo uri = ctx.getUriInfo(); String formDataString = null; String[] parts = null; Set<String> existingKeys = new HashSet<String>(); for (Iterator<String> x = uri.getQueryParameters().keySet().iterator(); x.hasNext();) { existingKeys.add(x.next() + ": qp"); } for (Iterator<String> x = uri.getPathParameters().keySet().iterator(); x.hasNext();) { existingKeys.add(x.next() + ": pp"); } for (Iterator<String> x = ctx.getHeaders().keySet().iterator(); x.hasNext();) { String key = x.next(); // if(!commonHeaders.contains(key)) // existingKeys.add(key); } MediaType mt = requestContext.getMediaType(); for (Parameter p : parameters) { Map<String, String> headers = new HashMap<String, String>(); String name = null; if (p instanceof FormParameter) { if (formDataString == null) { // can only read stream once if (mt.isCompatible(MediaType.MULTIPART_FORM_DATA_TYPE)) { // get the boundary String boundary = mt.getParameters().get("boundary"); if (boundary != null) { try { InputStream output = ctx.getEntityStream(); MultipartStream multipartStream = new MultipartStream(output, boundary.getBytes()); boolean nextPart = multipartStream.skipPreamble(); while (nextPart) { String header = multipartStream.readHeaders(); // process headers if (header != null) { CSVFormat format = CSVFormat.DEFAULT.withDelimiter(';') .withRecordSeparator("="); Iterable<CSVRecord> records = format.parse(new StringReader(header)); for (CSVRecord r : records) { for (int j = 0; j < r.size(); j++) { String string = r.get(j); Iterable<CSVRecord> outerString = CSVFormat.DEFAULT .withDelimiter('=').parse(new StringReader(string)); for (CSVRecord outerKvPair : outerString) { if (outerKvPair.size() == 2) { String key = outerKvPair.get(0).trim(); String value = outerKvPair.get(1).trim(); if ("name".equals(key)) { name = value; } headers.put(key, value); } else { Iterable<CSVRecord> innerString = CSVFormat.DEFAULT .withDelimiter(':') .parse(new StringReader(string)); for (CSVRecord innerKVPair : innerString) { if (innerKVPair.size() == 2) { String key = innerKVPair.get(0).trim(); String value = innerKVPair.get(1).trim(); if ("name".equals(key)) { name = value; } headers.put(key, value); } } } } if (name != null) { formMap.put(name, headers); } } } } String filename = extractFilenameFromHeaders(headers); if (filename != null) { try { File file = new File(Files.createTempDir(), filename); file.deleteOnExit(); file.getParentFile().deleteOnExit(); FileOutputStream fo = new FileOutputStream(file); multipartStream.readBodyData(fo); inputStreams.put(name, file); } catch (Exception e) { LOGGER.error("Failed to extract uploaded file", e); } } else { ByteArrayOutputStream bo = new ByteArrayOutputStream(); multipartStream.readBodyData(bo); String value = bo.toString(); headers.put(name, value); } if (name != null) { formMap.put(name, headers); } headers = new HashMap<>(); name = null; nextPart = multipartStream.readBoundary(); } } catch (IOException e) { e.printStackTrace(); } } } else { try { formDataString = IOUtils.toString(ctx.getEntityStream(), "UTF-8"); parts = formDataString.split("&"); for (String part : parts) { String[] kv = part.split("="); existingKeys.add(kv[0] + ": fp"); } } catch (IOException e) { e.printStackTrace(); } } } } } for (Parameter parameter : parameters) { String in = parameter.getIn(); Object o = null; try { if ("formData".equals(in)) { SerializableParameter sp = (SerializableParameter) parameter; String name = parameter.getName(); if (mt.isCompatible(MediaType.MULTIPART_FORM_DATA_TYPE)) { // look in the form map Map<String, String> headers = formMap.get(name); if (headers != null && headers.size() > 0) { if ("file".equals(sp.getType())) { o = inputStreams.get(name); } else { Object obj = headers.get(parameter.getName()); if (obj != null) { JavaType jt = parameterClasses[i]; Class<?> cls = jt.getRawClass(); List<String> os = Arrays.asList(obj.toString()); try { o = validator.convertAndValidate(os, parameter, cls, definitions); } catch (ConversionException e) { missingParams.add(e.getError()); } catch (ValidationException e) { missingParams.add(e.getValidationMessage()); } } } } } else { if (formDataString != null) { for (String part : parts) { String[] kv = part.split("="); if (kv != null) { if (kv.length > 0) { existingKeys.remove(kv[0] + ": fp"); } if (kv.length == 2) { // TODO how to handle arrays here? String key = kv[0]; try { String value = URLDecoder.decode(kv[1], "utf-8"); if (parameter.getName().equals(key)) { JavaType jt = parameterClasses[i]; Class<?> cls = jt.getRawClass(); try { o = validator.convertAndValidate(Arrays.asList(value), parameter, cls, definitions); } catch (ConversionException e) { missingParams.add(e.getError()); } catch (ValidationException e) { missingParams.add(e.getValidationMessage()); } } } catch (UnsupportedEncodingException e) { LOGGER.error("unable to decode value for " + key); } } } } } } } else { try { String paramName = parameter.getName(); if ("query".equals(in)) { existingKeys.remove(paramName + ": qp"); } if ("path".equals(in)) { existingKeys.remove(paramName + ": pp"); } JavaType jt = parameterClasses[i]; Class<?> cls = jt.getRawClass(); if ("body".equals(in)) { if (ctx.hasEntity()) { BodyParameter body = (BodyParameter) parameter; o = EntityProcessorFactory.readValue(ctx.getMediaType(), ctx.getEntityStream(), cls); if (o != null) { validate(o, body.getSchema(), SchemaValidator.Direction.INPUT); } } else if (parameter.getRequired()) { ValidationException e = new ValidationException(); e.message(new ValidationMessage() .message("The input body `" + paramName + "` is required")); throw e; } } if ("query".equals(in)) { o = validator.convertAndValidate(uri.getQueryParameters().get(parameter.getName()), parameter, cls, definitions); } else if ("path".equals(in)) { o = validator.convertAndValidate(uri.getPathParameters().get(parameter.getName()), parameter, cls, definitions); } else if ("header".equals(in)) { o = validator.convertAndValidate(ctx.getHeaders().get(parameter.getName()), parameter, cls, definitions); } } catch (ConversionException e) { missingParams.add(e.getError()); } catch (ValidationException e) { missingParams.add(e.getValidationMessage()); } } } catch (NumberFormatException e) { LOGGER.error("Couldn't find " + parameter.getName() + " (" + in + ") to " + parameterClasses[i], e); } args[i] = o; i += 1; } if (existingKeys.size() > 0) { LOGGER.debug("unexpected keys: " + existingKeys); } if (missingParams.size() > 0) { StringBuilder builder = new StringBuilder(); builder.append("Input error"); if (missingParams.size() > 1) { builder.append("s"); } builder.append(": "); int count = 0; for (ValidationMessage message : missingParams) { if (count > 0) { builder.append(", "); } if (message != null && message.getMessage() != null) { builder.append(message.getMessage()); } else { builder.append("no additional input"); } count += 1; } int statusCode = config.getInvalidRequestStatusCode(); ApiError error = new ApiError().code(statusCode).message(builder.toString()); throw new ApiException(error); } } try { if (method != null) { LOGGER.info("calling method " + method + " on controller " + this.controller + " with args " + Arrays.toString(args)); try { Object response = method.invoke(controller, args); if (response instanceof ResponseContext) { ResponseContext wrapper = (ResponseContext) response; ResponseBuilder builder = Response.status(wrapper.getStatus()); // response headers for (String key : wrapper.getHeaders().keySet()) { List<String> v = wrapper.getHeaders().get(key); if (v.size() == 1) { builder.header(key, v.get(0)); } else { builder.header(key, v); } } // entity if (wrapper.getEntity() != null) { builder.entity(wrapper.getEntity()); // content type if (wrapper.getContentType() != null) { builder.type(wrapper.getContentType()); } else { final ContextResolver<ContentTypeSelector> selector = providersProvider.get() .getContextResolver(ContentTypeSelector.class, MediaType.WILDCARD_TYPE); if (selector != null) { selector.getContext(getClass()).apply(ctx.getAcceptableMediaTypes(), builder); } } if (operation.getResponses() != null) { String responseCode = String.valueOf(wrapper.getStatus()); io.swagger.models.Response responseSchema = operation.getResponses() .get(responseCode); if (responseSchema == null) { // try default response schema responseSchema = operation.getResponses().get("default"); } if (responseSchema != null && responseSchema.getSchema() != null) { validate(wrapper.getEntity(), responseSchema.getSchema(), SchemaValidator.Direction.OUTPUT); } else { LOGGER.debug( "no response schema for code " + responseCode + " to validate against"); } } } return builder.build(); } return Response.ok().entity(response).build(); } catch (IllegalArgumentException | IllegalAccessException | InvocationTargetException e) { for (Throwable cause = e.getCause(); cause != null;) { if (cause instanceof ApiException) { throw (ApiException) cause; } final Throwable next = cause.getCause(); cause = next == cause || next == null ? null : next; } throw new ApiException(ApiErrorUtils.createInternalError(), e); } } Map<String, io.swagger.models.Response> responses = operation.getResponses(); if (responses != null) { String[] keys = new String[responses.keySet().size()]; Arrays.sort(responses.keySet().toArray(keys)); int code = 0; String defaultKey = null; for (String key : keys) { if (key.startsWith("2")) { defaultKey = key; code = Integer.parseInt(key); break; } if ("default".equals(key)) { defaultKey = key; code = 200; break; } if (key.startsWith("3")) { // we use the 3xx responses as defaults defaultKey = key; code = Integer.parseInt(key); } } if (defaultKey != null) { ResponseBuilder builder = Response.status(code); io.swagger.models.Response response = responses.get(defaultKey); if (response.getHeaders() != null && response.getHeaders().size() > 0) { for (String key : response.getHeaders().keySet()) { Property headerProperty = response.getHeaders().get(key); Object output = ExampleBuilder.fromProperty(headerProperty, definitions); if (output instanceof ArrayExample) { output = ((ArrayExample) output).asString(); } else if (output instanceof ObjectExample) { LOGGER.debug( "not serializing output example, only primitives or arrays of primitives are supported"); } else { output = ((Example) output).asString(); } builder.header(key, output); } } Map<String, Object> examples = response.getExamples(); if (examples != null) { for (MediaType mediaType : requestContext.getAcceptableMediaTypes()) { for (String key : examples.keySet()) { if (MediaType.valueOf(key).isCompatible(mediaType)) { builder.entity(examples.get(key)).type(mediaType); return builder.build(); } } } } Object output = ExampleBuilder.fromProperty(response.getSchema(), definitions); if (output != null) { ResponseContext resp = new ResponseContext().entity(output); setContentType(requestContext, resp, operation); builder.entity(output); if (resp.getContentType() != null) { // this comes from the operation itself builder.type(resp.getContentType()); } else { // get acceptable content types List<EntityProcessor> processors = EntityProcessorFactory.getProcessors(); MediaType responseMediaType = null; // take first compatible one for (EntityProcessor processor : processors) { if (responseMediaType != null) { break; } for (MediaType mt : requestContext.getAcceptableMediaTypes()) { LOGGER.debug("checking type " + mt.toString() + " against " + processor.getClass().getName()); if (processor.supports(mt)) { builder.type(mt); responseMediaType = mt; break; } } } if (responseMediaType == null) { // no match based on Accept header, use first processor in list for (EntityProcessor processor : processors) { List<MediaType> supportedTypes = processor.getSupportedMediaTypes(); if (supportedTypes.size() > 0) { builder.type(supportedTypes.get(0)); break; } } } } builder.entity(output); } return builder.build(); } else { LOGGER.debug("no response type to map to, assume 200"); code = 200; } return Response.status(code).build(); } return Response.ok().build(); } finally { for (String key : inputStreams.keySet()) { File file = inputStreams.get(key); if (file != null) { LOGGER.debug("deleting file " + file.getPath()); file.delete(); } } } }