List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:com.trackplus.ddl.DataReader.java
public static void writeDataToSql(DatabaseInfo databaseInfo, String dirName) throws DDLException { LOGGER.info("Exporting SQL data from \"" + databaseInfo.getUrl() + "\" ..."); Map<String, String> info = new TreeMap<String, String>(); java.util.Date d1 = new java.util.Date(); info.put("start", d1.toString()); info.put("driver", databaseInfo.getDriver()); info.put("url", databaseInfo.getUrl()); info.put("user", databaseInfo.getUser()); info.put("user", databaseInfo.getUser()); info.put("usePassword", Boolean.toString(databaseInfo.getPassword() != null)); String databaseType = MetaDataBL.getDatabaseType(databaseInfo.getUrl()); info.put(DATABASE_TYPE, databaseType); Connection connection = getConnection(databaseInfo); //log the database meta data information's logDatabaseMetaDataInfo(databaseInfo, connection); String[] versions = MetaDataBL.getVersions(connection); info.put(SYSTEM_VERSION, versions[0]); info.put(DB_VERSION, versions[1]);// w ww .j a va2 s . c o m StringValueConverter stringValueConverter = new GenericStringValueConverter(); BufferedWriter writer = createBufferedWriter(dirName + File.separator + FILE_NAME_DATA); BufferedWriter writerUpdate = createBufferedWriter(dirName + File.separator + FILE_NAME_DATA_UPDATE); BufferedWriter writerClean = createBufferedWriter(dirName + File.separator + FILE_NAME_DATA_CLEAN); BufferedWriter writerUpdateClean = createBufferedWriter( dirName + File.separator + FILE_NAME_DATA_UPDATE_CLEAN); BufferedWriter writerBlob = createBufferedWriter(dirName + File.separator + FILE_NAME_BLOB); int idx = 0; String[] tableNames = MetaDataBL.getTableNames(); for (String tableName : tableNames) { LOGGER.debug("Processing table: " + tableName + "...."); int count = getTableData(writer, writerClean, writerUpdate, writerUpdateClean, connection, tableName, stringValueConverter); info.put("_" + tableName, count + ""); LOGGER.debug("Records exported:" + count + "\n"); idx = idx + count; } LOGGER.debug("Processing blob data ...."); int count = getBlobTableData(writerBlob, connection); LOGGER.debug(" Blob record exported:" + count + "\n"); info.put("table_BLOB", count + ""); idx = idx + count; try { char dataSeparator = (char) ASCII_DATA_SEPARATOR; writerBlob.write(dataSeparator); writerBlob.newLine(); writerBlob.newLine(); writerBlob.write("--TMSPROJECTEXCHANGE"); writerBlob.newLine(); } catch (IOException e) { LOGGER.error("Error on close blob stream file :" + e.getMessage()); throw new DDLException(e.getMessage(), e); } LOGGER.debug("Processing clob data ...."); count = getClobTableData(writerBlob, connection); LOGGER.debug(" Clob record exported:" + count + "\n"); info.put("table_TMSPROJECTEXCHANGE", count + ""); idx = idx + count; info.put("allData", idx + ""); try { writer.flush(); writer.close(); writerClean.flush(); writerClean.close(); writerUpdate.flush(); writerUpdate.close(); writerUpdateClean.flush(); writerUpdateClean.close(); writerBlob.flush(); writerBlob.close(); } catch (IOException e) { LOGGER.error("Error on close stream file: " + e.getMessage()); throw new DDLException(e.getMessage(), e); } try { connection.close(); } catch (SQLException e) { throw new DDLException(e.getMessage(), e); } java.util.Date d2 = new java.util.Date(); long timeSpend = d2.getTime() - d1.getTime(); info.put("timeSpend", Long.toString(timeSpend)); writeInfoToFile(info, dirName + File.separator + FILE_NAME_INFO); LOGGER.info("Data generated. All records found: " + idx + ". Time spend: " + timeSpend + " ms!"); }
From source file:org.web4thejob.print.CsvPrinter.java
@Override public File print(String title, RenderScheme renderScheme, Query query, List<Entity> entities) { Assert.notNull(renderScheme);/*from w w w.ja v a 2 s .c o m*/ Assert.isTrue(renderScheme.getSchemeType() == SchemeType.LIST_SCHEME); if (entities == null) { Assert.notNull(query); entities = ContextUtil.getDRS().findByQuery(query); } File file; try { String crlf = System.getProperty("line.separator"); file = createTempFile(); BufferedWriter writer = createFileStream(file); writer.write(title + crlf); writer.newLine(); if (query != null && query.hasMasterCriterion()) { writer.write(describeMasterCriteria(query)); writer.newLine(); } if (query != null) { writer.write(describeCriteria(query)); writer.newLine(); } CSVWriter csv = new CSVWriter(writer); List<String> header = new ArrayList<String>(); for (RenderElement item : renderScheme.getElements()) { if (item.getPropertyPath().getLastStep().isBlobType()) continue; header.add(item.getFriendlyName()); } csv.writeNext(header.toArray(new String[header.size()])); ConversionService conversionService = ContextUtil.getBean(ConversionService.class); for (final Entity entity : entities) { writeLine(csv, conversionService, entity, renderScheme); } writer.newLine(); //timestamp List<String> line = new ArrayList<String>(); line.add(L10nMessages.L10N_LABEL_TIMESTAMP.toString()); MessageFormat df = new MessageFormat(""); df.setLocale(CoreUtil.getUserLocale()); df.applyPattern("{0,date,yyyy-MM-dd hh:mm:ss}"); line.add(df.format(new Object[] { new Date() })); csv.writeNext(line.toArray(new String[line.size()])); writer.newLine(); writer.write("powered by web4thejob.org"); writer.close(); } catch (IOException e) { throw new RuntimeException(e); } return file; }
From source file:io.hops.transaction.context.TransactionsStats.java
private void dumpCSVLike() throws IOException { boolean fileExists = getCSVFile().exists(); BufferedWriter writer = getCSVFileWriter(); if (!fileExists) { writer.write(TransactionStat.getHeader()); writer.newLine(); }//from w w w . j a v a 2 s.com for (TransactionStat stat : transactionStats) { writer.write(stat.toString()); writer.newLine(); } }
From source file:com.jayway.maven.plugins.android.phase09package.Apklib2Mojo.java
private File createPropertiesFile(File directory) throws IOException, MojoExecutionException { File projProps = new File(directory, "project.properties"); BufferedWriter out = new BufferedWriter(new FileWriter(projProps)); out.write("android.library=true"); out.newLine(); out.write("target=android-"); out.write(getAndroidSdk().getApiLevel()); out.newLine();/*from w w w. j ava 2 s . c o m*/ out.close(); return projProps; }
From source file:io.hops.experiments.stats.TransactionStatsCumulative.java
public void dumpDepths(String transaction, String[] columns) throws IOException { if (statsDir.exists()) { File depthsStats = new File(statsDir, transaction + "-cumlativestats-depths"); depthsStats.mkdirs();//from ww w. ja v a 2 s . c o m for (int depth : DEPTHS) { File depthStats = new File(depthsStats, String.valueOf(depth)); BufferedWriter writer = new BufferedWriter(new FileWriter(depthStats)); writer.write("# Clients " + Joiner.on(" ").join(columns) + " " + Joiner.on(" ").join(RESOLVING_CACHE_COLUMNS)); writer.newLine(); for (int client : CLIENTS) { File clientDir = new File(statsDir, CLIENT + SEPERATOR + client); if (clientDir.exists()) { File depthDir = new File(clientDir, DEPTH + SEPERATOR + depth); if (depthDir.exists()) { System.out.println(client + " clients - Depth " + depth); writeStats(writer, transaction, columns, depthDir, client); } } } writer.close(); } generateGraphsFor(transaction, depthsStats, columns, "TotalTime", "Clients", "Time (miliseconds)", DEPTH); } }
From source file:net.sf.sessionAnalysis.SessionVisitorArrivalAndCompletionRate.java
public void writeArrivalCompletionRatesAndMaxNumSessions(final String outputdir) throws IOException { FileWriter fw = new FileWriter( outputdir + "/" + this.getClass().getSimpleName() + "-arrivalCompletionRatesAndMaxNumSessions.csv"); BufferedWriter writer = new BufferedWriter(fw); writer.write("timestamp;arrivalRate;completionRate;maxConcurrentSessions"); writer.newLine(); int numBuckets = this.completionRates.length; for (int i = 0; i < numBuckets; i++) { writer.write((minTimestampNanos + i * (resolutionValueNanos)) + ";" + this.arrivalRates[i] + ";" + this.completionRates[i] + ";" + this.maxNumSessionsPerInterval[i]); writer.newLine();/*from ww w . ja va2s. c o m*/ } writer.close(); fw.close(); }
From source file:io.hops.transaction.context.TransactionsStats.java
private void dumpResolvingCacheStats() throws IOException { boolean fileExists = getResolvingCacheCSVFile().exists(); BufferedWriter writer = getResolvingCSVFileWriter(); if (!fileExists) { writer.write(ResolvingCacheStat.getHeader()); writer.newLine(); }//from w w w .ja v a 2s .co m for (ResolvingCacheStat stat : resolvingCacheStats) { writer.write(stat.toString()); writer.newLine(); } }
From source file:com.ms.commons.standalone.pojo.StandaloneJob.java
/** * fullClassName conf????job/*from w w w . ja v a 2 s.c om*/ */ private synchronized void modifyDataSourceProperties(String fullClassName, String identity, String baseStandalonePath) throws Exception { if (StringUtils.contains(fullClassName, "msun")) { String filePath = baseStandalonePath + "/conf/msun.datasource.properties"; String tmpFile = filePath + ".tmp"; File file = new File(filePath); File tmp = new File(tmpFile); tmp.createNewFile(); if (file.exists()) { BufferedReader buffRead = new BufferedReader(new FileReader(file)); BufferedWriter write = new BufferedWriter(new FileWriter(tmp)); String content = null; while ((content = buffRead.readLine()) != null) { if (StringUtils.contains(content, "nisa.client.appname")) { content = "nisa.client.appname=" + identity; } write.write(content); write.newLine(); } write.close(); buffRead.close(); } tmp.renameTo(file); } }
From source file:io.hops.experiments.stats.TransactionStatsCumulative.java
public void dumpClients(String transaction, String[] columns) throws IOException { if (statsDir.exists()) { File clientsStats = new File(statsDir, transaction + "-cumlativestats-clients"); clientsStats.mkdirs();//from ww w . ja v a2s.c om for (int client : CLIENTS) { File clientDir = new File(statsDir, CLIENT + SEPERATOR + client); if (clientDir.exists()) { File clientStats = new File(clientsStats, String.valueOf(client)); BufferedWriter writer = new BufferedWriter(new FileWriter(clientStats)); writer.write("# Depth " + Joiner.on(" ").join(columns) + " " + Joiner.on(" ").join(RESOLVING_CACHE_COLUMNS)); writer.newLine(); for (int depth : DEPTHS) { File depthDir = new File(clientDir, DEPTH + SEPERATOR + depth); if (depthDir.exists()) { System.out.println(client + " clients - Depth " + depth); writeStats(writer, transaction, columns, depthDir, depth); } } writer.close(); } } generateGraphsFor(transaction, clientsStats, columns, "TotalTime", "File Tree Depth", "Time (miliseconds)", "Clients"); } }
From source file:fr.itinerennes.onebusaway.bundle.tasks.GenerateMarkersCsvTask.java
/** * {@inheritDoc}//from w ww. jav a 2s. c om * * @see java.lang.Runnable#run() */ @Override public void run() { final Collection<Stop> busStops = gtfsDao.getAllStops(); LOGGER.info("having {} bus stop", busStops.size()); BufferedWriter out = null; try { out = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(outputFile), CHARSET)); // output stop and stations count out.write(String.valueOf(busStops.size() + bikeStations.size() + subwayStations.size())); out.newLine(); for (final Stop stop : busStops) { out.write(toCsvLine("BUS", stop.getId().toString(), stop.getLon(), stop.getLat(), stop.getName(), stop.getDesc())); out.newLine(); } for (final BikeStation bike : bikeStations) { final String name = StringUtils.capitalize(bike.getName()); out.write(toCsvLine("BIKE", bike.getId(), bike.getLongitude(), bike.getLatitude(), name, "")); out.newLine(); } for (final SubwayStation subway : subwayStations) { final String name = StringUtils.capitalize(subway.getName()); out.write( toCsvLine("SUBWAY", subway.getId(), subway.getLongitude(), subway.getLatitude(), name, "")); out.newLine(); } } catch (final FileNotFoundException e) { LOGGER.error("output file not found", e); } catch (final IOException e) { LOGGER.error("can't write to output file", e); } finally { IOUtils.closeQuietly(out); } }