List of usage examples for org.apache.commons.csv CSVPrinter flush
@Override public void flush() throws IOException
From source file:com.ibm.g11n.pipeline.example.MultiBundleCSVFilter.java
@Override public void merge(InputStream baseStream, OutputStream outStream, Map<String, LanguageBundle> languageBundles, FilterOptions options) throws IOException, ResourceFilterException { // create key-value map for each bundle Map<String, Map<String, String>> kvMaps = new HashMap<String, Map<String, String>>(); for (Entry<String, LanguageBundle> bundleEntry : languageBundles.entrySet()) { LanguageBundle languageBundle = bundleEntry.getValue(); Map<String, String> kvMap = new HashMap<String, String>(); for (ResourceString resString : languageBundle.getResourceStrings()) { kvMap.put(resString.getKey(), resString.getValue()); }/*from w ww .j ava2 s .c o m*/ kvMaps.put(bundleEntry.getKey(), kvMap); } CSVParser parser = CSVParser.parse(baseStream, StandardCharsets.UTF_8, CSVFormat.RFC4180.withHeader("module", "key", "value").withSkipHeaderRecord(true)); BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outStream, StandardCharsets.UTF_8)); CSVPrinter printer = CSVFormat.RFC4180.withHeader("module", "key", "value").print(writer); for (CSVRecord record : parser) { String module = record.get(0); String key = record.get(1); String value = record.get(2); Map<String, String> moduleKVMap = kvMaps.get(module); if (moduleKVMap != null) { String trValue = moduleKVMap.get(key); if (trValue != null) { value = trValue; } } printer.printRecord(module, key, value); } printer.flush(); }
From source file:com.ibm.g11n.pipeline.example.MultiBundleCSVFilter.java
@Override public void write(OutputStream outStream, Map<String, LanguageBundle> languageBundles, FilterOptions options) throws IOException, ResourceFilterException { BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(outStream, StandardCharsets.UTF_8)); CSVPrinter printer = CSVFormat.RFC4180.withHeader("module", "key", "value").print(writer); // Sort by bundle TreeMap<String, LanguageBundle> sortedBundles = new TreeMap<>(languageBundles); for (Entry<String, LanguageBundle> bundleEntry : sortedBundles.entrySet()) { String module = bundleEntry.getKey(); LanguageBundle languageBundle = bundleEntry.getValue(); for (ResourceString resString : languageBundle.getSortedResourceStrings()) { printer.printRecord(module, resString.getKey(), resString.getValue()); }/*ww w . j a va 2 s .co m*/ } printer.flush(); }
From source file:biz.webgate.dominoext.poi.component.kernel.simpleviewexport.CSVExportProcessor.java
public void process2HTTP(ExportModel expModel, UISimpleViewExport uis, HttpServletResponse hsr, DateTimeHelper dth) {/*from w ww . j ava2 s . com*/ try { ByteArrayOutputStream csvBAOS = new ByteArrayOutputStream(); OutputStreamWriter csvWriter = new OutputStreamWriter(csvBAOS); CSVPrinter csvPrinter = new CSVPrinter(csvWriter, CSVFormat.DEFAULT); // BUILDING HEADER if (uis.isIncludeHeader()) { for (ExportColumn expColumn : expModel.getColumns()) { csvPrinter.print(expColumn.getColumnName()); } csvPrinter.println(); } // Processing Values for (ExportDataRow expRow : expModel.getRows()) { for (ExportColumn expColumn : expModel.getColumns()) { csvPrinter.print(convertValue(expRow.getValue(expColumn.getPosition()), expColumn, dth)); } csvPrinter.println(); } csvPrinter.flush(); hsr.setContentType("text/csv"); hsr.setHeader("Cache-Control", "no-cache"); hsr.setDateHeader("Expires", -1); hsr.setContentLength(csvBAOS.size()); hsr.addHeader("Content-disposition", "inline; filename=\"" + uis.getDownloadFileName() + "\""); OutputStream os = hsr.getOutputStream(); csvBAOS.writeTo(os); os.close(); } catch (Exception e) { ErrorPageBuilder.getInstance().processError(hsr, "Error during SVE-Generation (CSV Export)", e); } }
From source file:at.alladin.rmbt.statisticServer.export.ExportResource.java
@Get public Representation request(final String entity) { //Before doing anything => check if a cached file already exists and is new enough String property = System.getProperty("java.io.tmpdir"); final String filename_zip; final String filename_csv; //allow filtering by month/year int year = -1; int month = -1; int hours = -1; boolean hoursExport = false; boolean dateExport = false; if (getRequest().getAttributes().containsKey("hours")) { // export by hours try {/*from ww w. j a v a 2s.co m*/ hours = Integer.parseInt(getRequest().getAttributes().get("hours").toString()); } catch (NumberFormatException ex) { //Nothing -> just fall back } if (hours <= 7 * 24 && hours >= 1) { //limit to 1 week (avoid DoS) hoursExport = true; } } else if (!hoursExport && getRequest().getAttributes().containsKey("year")) { // export by month/year try { year = Integer.parseInt(getRequest().getAttributes().get("year").toString()); month = Integer.parseInt(getRequest().getAttributes().get("month").toString()); } catch (NumberFormatException ex) { //Nothing -> just fall back } if (year < 2099 && month > 0 && month <= 12 && year > 2000) { dateExport = true; } } if (hoursExport) { filename_zip = FILENAME_ZIP_HOURS.replace("%HOURS%", String.format("%03d", hours)); filename_csv = FILENAME_CSV_HOURS.replace("%HOURS%", String.format("%03d", hours)); cacheThresholdMs = 5 * 60 * 1000; //5 minutes } else if (dateExport) { filename_zip = FILENAME_ZIP.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%", String.format("%02d", month)); filename_csv = FILENAME_CSV.replace("%YEAR%", Integer.toString(year)).replace("%MONTH%", String.format("%02d", month)); cacheThresholdMs = 23 * 60 * 60 * 1000; //23 hours } else { filename_zip = FILENAME_ZIP_CURRENT; filename_csv = FILENAME_CSV_CURRENT; cacheThresholdMs = 3 * 60 * 60 * 1000; //3 hours } final File cachedFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv)); final File generatingFile = new File( property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp"); if (cachedFile.exists()) { //check if file has been recently created OR a file is currently being created if (((cachedFile.lastModified() + cacheThresholdMs) > (new Date()).getTime()) || (generatingFile.exists() && (generatingFile.lastModified() + cacheThresholdMs) > (new Date()).getTime())) { //if so, return the cached file instead of a cost-intensive new one final OutputRepresentation result = new OutputRepresentation( zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) { @Override public void write(OutputStream out) throws IOException { InputStream is = new FileInputStream(cachedFile); IOUtils.copy(is, out); out.close(); } }; if (zip) { final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT); disposition.setFilename(filename_zip); result.setDisposition(disposition); } return result; } } final String timeClause; if (dateExport) timeClause = " AND (EXTRACT (month FROM t.time AT TIME ZONE 'UTC') = " + month + ") AND (EXTRACT (year FROM t.time AT TIME ZONE 'UTC') = " + year + ") "; else if (hoursExport) timeClause = " AND time > now() - interval '" + hours + " hours' "; else timeClause = " AND time > current_date - interval '31 days' "; final String sql = "SELECT" + " ('P' || t.open_uuid) open_uuid," + " ('O' || t.open_test_uuid) open_test_uuid," + " to_char(t.time AT TIME ZONE 'UTC', 'YYYY-MM-DD HH24:MI:SS') time_utc," + " nt.group_name cat_technology," + " nt.name network_type," + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN" + " t.geo_lat" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_lat*1111)/1111" + " ELSE null" + " END) lat," + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') THEN" + " t.geo_long" + " WHEN (t.geo_accuracy < ?) THEN" + " ROUND(t.geo_long*741)/741 " + " ELSE null" + " END) long," + " (CASE WHEN ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN" + " 'rastered'" + //make raster transparent " ELSE t.geo_provider" + " END) loc_src," + " (CASE WHEN (t.geo_accuracy < ?) AND (t.geo_provider != 'manual') AND (t.geo_provider != 'geocoder') " + " THEN round(t.geo_accuracy::float * 10)/10 " + " WHEN (t.geo_accuracy < 100) AND ((t.geo_provider = 'manual') OR (t.geo_provider = 'geocoder')) THEN 100" + // limit accuracy to 100m " WHEN (t.geo_accuracy < ?) THEN round(t.geo_accuracy::float * 10)/10" + " ELSE null END) loc_accuracy, " + " (CASE WHEN (t.zip_code < 1000 OR t.zip_code > 9999) THEN null ELSE t.zip_code END) zip_code," + " t.gkz gkz," + " t.country_location country_location," + " t.speed_download download_kbit," + " t.speed_upload upload_kbit," + " round(t.ping_median::float / 100000)/10 ping_ms," + " t.lte_rsrp," + " t.lte_rsrq," + " ts.name server_name," + " duration test_duration," + " num_threads," + " t.plattform platform," + " COALESCE(adm.fullname, t.model) model," + " client_software_version client_version," + " network_operator network_mcc_mnc," + " network_operator_name network_name," + " network_sim_operator sim_mcc_mnc," + " nat_type," + " public_ip_asn asn," + " client_public_ip_anonymized ip_anonym," + " (ndt.s2cspd*1000)::int ndt_download_kbit," + " (ndt.c2sspd*1000)::int ndt_upload_kbit," + " COALESCE(t.implausible, false) implausible," + " t.signal_strength" + " FROM test t" + " LEFT JOIN network_type nt ON nt.uid=t.network_type" + " LEFT JOIN device_map adm ON adm.codename=t.model" + " LEFT JOIN test_server ts ON ts.uid=t.server_id" + " LEFT JOIN test_ndt ndt ON t.uid=ndt.test_id" + " WHERE " + " t.deleted = false" + timeClause + " AND status = 'FINISHED'" + " ORDER BY t.uid"; final String[] columns; final List<String[]> data = new ArrayList<>(); PreparedStatement ps = null; ResultSet rs = null; try { ps = conn.prepareStatement(sql); //insert filter for accuracy double accuracy = Double.parseDouble(settings.getString("RMBT_GEO_ACCURACY_DETAIL_LIMIT")); ps.setDouble(1, accuracy); ps.setDouble(2, accuracy); ps.setDouble(3, accuracy); ps.setDouble(4, accuracy); ps.setDouble(5, accuracy); ps.setDouble(6, accuracy); if (!ps.execute()) return null; rs = ps.getResultSet(); final ResultSetMetaData meta = rs.getMetaData(); final int colCnt = meta.getColumnCount(); columns = new String[colCnt]; for (int i = 0; i < colCnt; i++) columns[i] = meta.getColumnName(i + 1); while (rs.next()) { final String[] line = new String[colCnt]; for (int i = 0; i < colCnt; i++) { final Object obj = rs.getObject(i + 1); line[i] = obj == null ? null : obj.toString(); } data.add(line); } } catch (final SQLException e) { e.printStackTrace(); return null; } finally { try { if (rs != null) rs.close(); if (ps != null) ps.close(); } catch (final SQLException e) { e.printStackTrace(); } } final OutputRepresentation result = new OutputRepresentation( zip ? MediaType.APPLICATION_ZIP : MediaType.TEXT_CSV) { @Override public void write(OutputStream out) throws IOException { //cache in file => create temporary temporary file (to // handle errors while fulfilling a request) String property = System.getProperty("java.io.tmpdir"); final File cachedFile = new File( property + File.separator + ((zip) ? filename_zip : filename_csv) + "_tmp"); OutputStream outf = new FileOutputStream(cachedFile); if (zip) { final ZipOutputStream zos = new ZipOutputStream(outf); final ZipEntry zeLicense = new ZipEntry("LIZENZ.txt"); zos.putNextEntry(zeLicense); final InputStream licenseIS = getClass().getResourceAsStream("DATA_LICENSE.txt"); IOUtils.copy(licenseIS, zos); licenseIS.close(); final ZipEntry zeCsv = new ZipEntry(filename_csv); zos.putNextEntry(zeCsv); outf = zos; } final OutputStreamWriter osw = new OutputStreamWriter(outf); final CSVPrinter csvPrinter = new CSVPrinter(osw, csvFormat); for (final String c : columns) csvPrinter.print(c); csvPrinter.println(); for (final String[] line : data) { for (final String f : line) csvPrinter.print(f); csvPrinter.println(); } csvPrinter.flush(); if (zip) outf.close(); //if we reach this code, the data is now cached in a temporary tmp-file //so, rename the file for "production use2 //concurrency issues should be solved by the operating system File newCacheFile = new File(property + File.separator + ((zip) ? filename_zip : filename_csv)); Files.move(cachedFile.toPath(), newCacheFile.toPath(), StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING); FileInputStream fis = new FileInputStream(newCacheFile); IOUtils.copy(fis, out); fis.close(); out.close(); } }; if (zip) { final Disposition disposition = new Disposition(Disposition.TYPE_ATTACHMENT); disposition.setFilename(filename_zip); result.setDisposition(disposition); } return result; }
From source file:act.installer.wikipedia.ImportantChemicalsWikipedia.java
/** * This function writes the important chemicals set to a TSV file. * @param outputPath a String indicating where the file should be written (including its name) *//*from w w w . j av a2 s.c om*/ public void writeToTSV(String outputPath) { try { BufferedWriter writer = new BufferedWriter(new FileWriter(outputPath)); CSVPrinter printer = new CSVPrinter(writer, TSV_FORMAT); printer.printComment("This file has been generated by the ImportantChemicalsWikipedia.java script."); printer.printComment("Format: WIKIPEDIA<tab><wikipedia url><tab><inchi><tab><metadata>"); for (ImportantChemical importantChemical : importantChemicalsWikipedia) { List<String> nextLine = new ArrayList<>(); nextLine.add(importantChemical.getType()); nextLine.add(importantChemical.getDbid()); nextLine.add(importantChemical.getInchi()); nextLine.add(mapper.writeValueAsString(importantChemical.getMetadata())); printer.printRecord(nextLine); } printer.flush(); writer.close(); } catch (IOException e) { throw new RuntimeException(e); } }
From source file:biz.webgate.dominoext.poi.component.kernel.CSVProcessor.java
public ByteArrayOutputStream generateCSV(UICSV csvDef, FacesContext context) throws IOException, POIException { ByteArrayOutputStream csvBAOS = new ByteArrayOutputStream(); OutputStreamWriter csvWriter = new OutputStreamWriter(csvBAOS); CSVPrinter csvPrinter = new CSVPrinter(csvWriter, CSVFormat.DEFAULT); List<CSVColumn> lstColumns = csvDef.getColumns(); Collections.sort(lstColumns, new Comparator<CSVColumn>() { public int compare(CSVColumn o1, CSVColumn o2) { Integer p1 = Integer.valueOf(o1.getPosition()); Integer p2 = Integer.valueOf(o2.getPosition()); return p1.compareTo(p2); }// w w w. j av a 2 s . co m }); if (csvDef.isIncludeHeader()) { for (CSVColumn cl : lstColumns) { csvPrinter.print(cl.getTitle()); } csvPrinter.println(); } // DATASOURCE holen und verarbeiten. if (csvDef.getDataSource() != null) { EmbeddedDataSourceExportProcessor.getInstance().process(lstColumns, csvDef, csvPrinter, context); } else { XPagesDataSourceExportProcessor.getInstance().process(lstColumns, csvDef, csvPrinter, context); } csvPrinter.flush(); return csvBAOS; }
From source file:edu.harvard.hms.dbmi.bd2k.irct.ws.rs.resultconverter.CSVTabularDataConverter.java
@Override public StreamingOutput createStream(final Result result) { StreamingOutput stream = new StreamingOutput() { @Override/* w ww . j a v a2s. co m*/ public void write(OutputStream outputStream) throws IOException, WebApplicationException { ResultSet rs = null; CSVPrinter printer = null; try { rs = (ResultSet) result.getData(); rs.load(result.getResultSetLocation()); printer = new CSVPrinter(new OutputStreamWriter(outputStream), CSVFormat.DEFAULT); String[] columnHeaders = new String[rs.getColumnSize()]; for (int i = 0; i < rs.getColumnSize(); i++) { columnHeaders[i] = rs.getColumn(i).getName(); } printer.printRecord((Object[]) columnHeaders); rs.beforeFirst(); while (rs.next()) { String[] row = new String[rs.getColumnSize()]; for (int i = 0; i < rs.getColumnSize(); i++) { row[i] = rs.getString(i); } printer.printRecord((Object[]) row); } printer.flush(); } catch (ResultSetException | PersistableException e) { log.info("Error creating CSV Stream: " + e.getMessage()); } finally { if (printer != null) { printer.close(); } if (rs != null && !rs.isClosed()) { try { rs.close(); } catch (ResultSetException e) { e.printStackTrace(); } } if (outputStream != null) { outputStream.close(); } } } }; return stream; }
From source file:mekhq.Utilities.java
/** * Export a JTable to a CSV file//from ww w . ja va 2 s . co m * @param table * @param file * @return report */ public static String exportTabletoCSV(JTable table, File file) { String report; try { TableModel model = table.getModel(); BufferedWriter writer = Files.newBufferedWriter(Paths.get(file.getPath())); String[] columns = new String[model.getColumnCount()]; for (int i = 0; i < model.getColumnCount(); i++) { columns[i] = model.getColumnName(i); } CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT.withHeader(columns)); for (int i = 0; i < model.getRowCount(); i++) { Object[] towrite = new String[model.getColumnCount()]; for (int j = 0; j < model.getColumnCount(); j++) { // use regex to remove any HTML tags towrite[j] = model.getValueAt(i, j).toString().replaceAll("\\<[^>]*>", ""); } csvPrinter.printRecord(towrite); } csvPrinter.flush(); csvPrinter.close(); report = model.getRowCount() + " " + resourceMap.getString("RowsWritten.text"); } catch (Exception ioe) { MekHQ.getLogger().log(Utilities.class, "exportTabletoCSV", LogLevel.INFO, "Error exporting JTable"); report = "Error exporting JTable. See log for details."; } return report; }
From source file:mekhq.campaign.finances.Finances.java
public String exportFinances(String path, String format) { String report;/*from w w w . j a va 2 s. c o m*/ try { BufferedWriter writer = Files.newBufferedWriter(Paths.get(path)); CSVPrinter csvPrinter = new CSVPrinter(writer, CSVFormat.DEFAULT.withHeader("Date", "Category", "Description", "Amount", "RunningTotal")); SimpleDateFormat df = new SimpleDateFormat("MM/dd/yyyy"); int running_total = 0; for (int i = 0; i < transactions.size(); i++) { running_total += transactions.get(i).getAmount(); csvPrinter.printRecord(df.format(transactions.get(i).getDate()), transactions.get(i).getCategoryName(), transactions.get(i).getDescription(), transactions.get(i).getAmount(), running_total); } csvPrinter.flush(); csvPrinter.close(); report = transactions.size() + " " + resourceMap.getString("FinanceExport.text"); } catch (IOException ioe) { MekHQ.getLogger().log(getClass(), "exportFinances", LogLevel.INFO, "Error exporting finances to " + format); report = "Error exporting finances. See log for details."; } return report; }
From source file:com.rodaxsoft.mailgun.CampaignManager.java
/** * Saves campaign events to a CSV file with the following format: * <code><campaign name>_(<campaign id>)_<timestamp>.csv</code> * @param campaignId The campaign ID//from w w w . j a va 2 s .c o m * @throws ContextedException if a processing error occurs * @throws IOException if an I/O error occurs */ void saveCampaignEventsToCSV(String campaignId) throws ContextedException, IOException { Campaign campaign = getCampaign(campaignId); SimpleDateFormat format = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss"); String dateTime = format.format(new Date()); String fileName; if (campaign != null) { String name = StringUtils.replace(campaign.getName(), " ", "_"); fileName = new StringBuilder(name).append("_(").append(campaignId).append(")_").append(dateTime) .append(".csv").toString(); } else { fileName = campaignId + "_" + dateTime + ".csv"; } CSVPrinter csvPrinter = null; PrintWriter pw = null; CSVFormat csvFormat = null; try { pw = new PrintWriter(fileName); final List<Map<String, Object>> events = getEvents(campaignId); for (Map<String, Object> map : events) { if (null == csvPrinter) { final Set<String> keySet = map.keySet(); int size = keySet.size(); String[] keys = keySet.toArray(new String[size]); csvFormat = CSVFormat.DEFAULT.withHeader(keys); csvPrinter = new CSVPrinter(pw, csvFormat); } // city domain tags timestamp region ip country recipient event user_vars String[] headers = csvFormat.getHeader(); for (String key : headers) { csvPrinter.print(map.get(key)); } csvPrinter.println(); } } finally { if (csvPrinter != null) { csvPrinter.flush(); } IOUtils.closeQuietly(csvPrinter); } }