List of usage examples for java.io BufferedWriter newLine
public void newLine() throws IOException
From source file:com.all.client.util.FileUtil.java
public void writeLinesToFile(List<String> stringList, String path) { FileWriter fileWriter = null; BufferedWriter bw = null; try {/*from w w w .j av a2 s. c om*/ fileWriter = new FileWriter(path); bw = new BufferedWriter(fileWriter); for (String line : stringList) { bw.write(line); bw.newLine(); } bw.flush(); } catch (IOException e) { LOG.error(e, e); } finally { closeWriter(bw); closeWriter(fileWriter); } }
From source file:it.unibas.spicy.persistence.json.ExportJsonInstances.java
public void createJsonDocument(String tableName, String schema, INode tableNode, String folderPath, Statement statement) throws SQLException, IOException { BufferedWriter bw = null; ResultSet instancesSet = null; try {/*from w ww . ja v a 2s . c o m*/ File file = new File(folderPath + File.separator + tableName + ".json"); FileWriter fw = new FileWriter(file.getAbsoluteFile()); bw = new BufferedWriter(fw); if (!file.exists()) { file.createNewFile(); } instancesSet = statement.executeQuery( "SELECT row_to_json(\"" + tableName + "\") FROM " + schema + ".\"" + tableName + "\";"); //check to see if the result set is empty if (instancesSet.isBeforeFirst()) { bw.write("["); bw.newLine(); while (instancesSet.next()) { bw.write(instancesSet.getString(1)); //if it is not the last result if (!instancesSet.isLast()) { bw.write(","); } bw.newLine(); } bw.write("]"); } //alternative for one-row json /*instancesSet = statement.executeQuery("SELECT array_to_json(array_agg("+"\""+tableName+"\")) FROM "+schema+"\""+tableName+"\";"); while (instancesSet.next() && instancesSet.getString(1)!=null){ bw.write(instancesSet.getString(1)); } }*/ } finally { bw.close(); instancesSet.close(); } }
From source file:gtu._work.etc.GoogleContactUI.java
void googleTableMouseClicked(MouseEvent evt) { try {//from ww w. j av a 2s . c o m JPopupMenuUtil popupUtil = JPopupMenuUtil.newInstance(googleTable).applyEvent(evt); //CHANGE ENCODE popupUtil.addJMenuItem("set encode", new ActionListener() { public void actionPerformed(ActionEvent e) { try { String code = StringUtils.defaultString(JOptionPaneUtil.newInstance().iconPlainMessage() .showInputDialog("input file encode", "ENCODE"), "UTF8"); encode = Charset.forName(code).displayName(); } catch (Exception ex) { JCommonUtil.handleException(ex); } System.err.println("encode : " + encode); } }); //SIMPLE LOAD GOOGLE CSV FILE popupUtil.addJMenuItem("open Google CSV file", new ActionListener() { public void actionPerformed(ActionEvent e) { File file = JFileChooserUtil.newInstance().selectFileOnly().addAcceptFile("csv", ".csv") .showOpenDialog().getApproveSelectedFile(); if (file == null) { errorMessage("file is not correct!"); return; } try { if (file.getName().endsWith(".csv")) { DefaultTableModel model = (DefaultTableModel) googleTable.getModel(); LineNumberReader reader = new LineNumberReader( new InputStreamReader(new FileInputStream(file), GOOGLE_CVS_ENCODE)); for (String line = null; (line = reader.readLine()) != null;) { if (reader.getLineNumber() == 1) { continue; } model.addRow(line.split(",")); } reader.close(); googleTable.setModel(model); JTableUtil.newInstance(googleTable).hiddenAllEmptyColumn(); } } catch (Exception ex) { JCommonUtil.handleException(ex); } } }); //SAVE CSV FILE FOR GOOGLE popupUtil.addJMenuItem("save to Google CVS file", new ActionListener() { public void actionPerformed(ActionEvent e) { File file = JFileChooserUtil.newInstance().selectFileOnly().addAcceptFile(".csv", ".csv") .showSaveDialog().getApproveSelectedFile(); if (file == null) { errorMessage("file is not correct!"); return; } file = FileUtil.getIndicateFileExtension(file, ".csv"); try { BufferedWriter writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(file), GOOGLE_CVS_ENCODE)); StringBuilder sb = new StringBuilder(); for (Object title : googleColumns) { sb.append(title + ","); } sb.deleteCharAt(sb.length() - 1); System.out.println(sb); writer.write(sb.toString()); writer.newLine(); DefaultTableModel model = (DefaultTableModel) googleTable.getModel(); for (int row = 0; row < model.getRowCount(); row++) { sb = new StringBuilder(); for (int col = 0; col < model.getColumnCount(); col++) { String colVal = StringUtils.defaultString((String) model.getValueAt(row, col), ""); if (colVal.equalsIgnoreCase("null")) { colVal = ""; } sb.append(colVal + ","); } sb.deleteCharAt(sb.length() - 1); System.out.println(sb); writer.write(sb.toString()); writer.newLine(); } writer.flush(); writer.close(); } catch (Exception ex) { JCommonUtil.handleException(ex); } } }); //PASTE CLIPBOARD popupUtil.addJMenuItem("paste clipboard", new ActionListener() { public void actionPerformed(ActionEvent paramActionEvent) { JTableUtil.newInstance(googleTable).pasteFromClipboard_multiRowData(true); } }); popupUtil.addJMenuItem("paste clipboard to selected cell", new ActionListener() { public void actionPerformed(ActionEvent paramActionEvent) { JTableUtil.newInstance(googleTable).pasteFromClipboard_singleValueToSelectedCell(); } }); JMenuItem addEmptyRowItem = JTableUtil.newInstance(googleTable).jMenuItem_addRow(false, "add row count?"); addEmptyRowItem.setText("add row"); JMenuItem removeColumnItem = JTableUtil.newInstance(googleTable).jMenuItem_removeColumn(null); removeColumnItem.setText("remove column"); JMenuItem removeRowItem = JTableUtil.newInstance(googleTable).jMenuItem_removeRow(null); removeRowItem.setText("remove row"); JMenuItem removeAllRowItem = JTableUtil.newInstance(googleTable) .jMenuItem_removeAllRow("remove all row?"); removeAllRowItem.setText("remove all row"); JMenuItem clearSelectedCellItem = JTableUtil.newInstance(googleTable) .jMenuItem_clearSelectedCell("are you sure clear selected area?"); clearSelectedCellItem.setText("clear selected area"); popupUtil.addJMenuItem(addEmptyRowItem, removeColumnItem, removeRowItem, removeAllRowItem, clearSelectedCellItem); popupUtil.show(); } catch (Exception ex) { JCommonUtil.handleException(ex); } }
From source file:com.emc.vipr.sync.CasMigrationTest.java
private void testSyncClipList(int numClips, int maxBlobSize) throws Exception { FPPool sourcePool = new FPPool(connectString1); FPPool destPool = new FPPool(connectString2); // create random data (capture summary for comparison) StringWriter sourceSummary = new StringWriter(); List<String> clipIds = createTestClips(sourcePool, maxBlobSize, numClips, sourceSummary); // write clip file File clipFile = File.createTempFile("clip", "lst"); clipFile.deleteOnExit();//from ww w. j av a 2 s . c om BufferedWriter writer = new BufferedWriter(new FileWriter(clipFile)); for (String clipId : clipIds) { writer.write(clipId); writer.newLine(); } writer.close(); ViPRSync sync = createViPRSync(connectString1, connectString2, 20, true); ((CasSource) sync.getSource()).setClipIdFile(clipFile.getAbsolutePath()); sync.run(); System.out.println(sync.getStatsString()); String destSummary = summarize(destPool, clipIds); delete(sourcePool, clipIds); delete(destPool, clipIds); Assert.assertEquals("query summaries different", sourceSummary.toString(), destSummary); }
From source file:com.emc.ecs.sync.CasMigrationTest.java
private void testSyncClipList(int numClips, int maxBlobSize) throws Exception { FPPool sourcePool = new FPPool(connectString1); FPPool destPool = new FPPool(connectString2); // create random data (capture summary for comparison) StringWriter sourceSummary = new StringWriter(); List<String> clipIds = createTestClips(sourcePool, maxBlobSize, numClips, sourceSummary); // write clip file File clipFile = File.createTempFile("clip", "lst"); clipFile.deleteOnExit();//from w ww . j a v a2s. co m BufferedWriter writer = new BufferedWriter(new FileWriter(clipFile)); for (String clipId : clipIds) { writer.write(clipId); writer.newLine(); } writer.close(); EcsSync sync = createEcsSync(connectString1, connectString2, CAS_THREADS, true); ((CasSource) sync.getSource()).setClipIdFile(clipFile.getAbsolutePath()); run(sync); String destSummary = summarize(destPool, clipIds); delete(sourcePool, clipIds); delete(destPool, clipIds); Assert.assertEquals("query summaries different", sourceSummary.toString(), destSummary); }
From source file:net.rptools.lib.io.PackedFile.java
/** * Write the serialized object to the given path in the ZIP file; as the data is an object it is first converted to * XML and character set encoding will take place as the data is written to the (temporary) file. * /* w w w.ja v a 2s . com*/ * @param path * location within the ZIP file * @param obj * the object to be written * @throws IOException */ public void putFile(String path, Object obj) throws IOException { File explodedFile = putFileImpl(path); FileOutputStream fos = new FileOutputStream(explodedFile); OutputStreamWriter osw = new OutputStreamWriter(fos, "UTF-8"); BufferedWriter bw = new BufferedWriter(osw); xstream.toXML(obj, bw); bw.newLine(); // Not necessary but editing the file looks nicer. ;-) IOUtils.closeQuietly(bw); }
From source file:edu.indiana.soic.ts.mapreduce.pwd.PairWiseDistance.java
private void partitionData(String sequenceFile, int noOfSequences, int blockSize, FileSystem fs, int noOfDivisions, Configuration jobConf, Path inputDir) throws FileNotFoundException, IOException, URISyntaxException { // Break the sequences file in to parts based on the block size. Stores // the parts in HDFS and add them to the Hadoop distributed cache. Path path = new Path(sequenceFile); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(fs.open(path))); LOG.info("noOfDivisions : " + noOfDivisions); LOG.info("blockSize : " + blockSize); for (int partNo = 0; partNo < noOfDivisions; partNo++) { //// w ww . j a va2 s .c o m String filePartName = Constants.HDFS_SEQ_FILENAME + "_" + partNo; Path inputFilePart = new Path(inputDir, filePartName); OutputStream partOutStream = fs.create(inputFilePart); BufferedWriter bufferedWriter = new BufferedWriter(new OutputStreamWriter(partOutStream)); for (int sequenceIndex = 0; ((sequenceIndex < blockSize) & (sequenceIndex + (partNo * blockSize) < noOfSequences)); sequenceIndex++) { String line; line = bufferedReader.readLine(); if (line == null) { throw new IOException("Cannot read the sequence from input file."); } // write the sequence name bufferedWriter.write(line); bufferedWriter.newLine(); } bufferedWriter.flush(); bufferedWriter.close(); // Adding the sequences file to Hadoop cache URI cFileURI = new URI(inputFilePart.toUri() + "#" + filePartName); DistributedCache.addCacheFile(cFileURI, jobConf); DistributedCache.createSymlink(jobConf); } }
From source file:com.itemanalysis.jmetrik.workspace.JmetrikPreferencesManager.java
private void createLogProperties(String logHome) { //directory should already exist //create log4j properties file if it does not exist String header = "#DO NOT EDIT - JMETRIK LOG PROPERTIES FILE - DO NOT EDIT"; String fullPropertiesName = (logHome + "/" + DEFAULT_LOG_PROPERTY_FILE_V4); String fullLogFileName = (logHome + "/" + DEFAULT_LOG_NAME); String fullScriptLogFileName = (logHome + "/" + DEFAULT_SCRIPT_LOG_NAME); File f = new File(fullPropertiesName); if (!f.exists()) { try {//from ww w. j a v a 2s . co m createLogHome(logHome); f.createNewFile(); BufferedWriter bw = new BufferedWriter(new FileWriter(f)); bw.append(header); bw.newLine(); bw.append("log4j.logger.jmetrik-logger=ALL, adminAppender"); bw.newLine(); bw.append("log4j.logger.jmetrik-script-logger=INFO, scriptAppender"); bw.newLine(); bw.append("log4j.additivity.jmetrik-logger=false"); bw.newLine(); bw.append("log4j.additivity.jmetrik-script-logger=false"); bw.newLine(); //Main appender processes all levels bw.append("log4j.appender.adminAppender=org.apache.log4j.FileAppender"); bw.newLine(); bw.append("log4j.appender.adminAppender.layout=org.apache.log4j.PatternLayout"); bw.newLine(); bw.append("log4j.appender.adminAppender.File=" + fullLogFileName); bw.newLine(); bw.append("log4j.appender.adminAppender.Append=false"); bw.newLine(); bw.append("log4j.appender.adminAppender.layout.ConversionPattern=[%p] %d{DATE} %n%m%n%n"); bw.newLine(); //Script appender processes scripts only bw.append("log4j.appender.scriptAppender=org.apache.log4j.FileAppender"); bw.newLine(); bw.append("log4j.appender.scriptAppender.layout=org.apache.log4j.PatternLayout"); bw.newLine(); bw.append("log4j.appender.scriptAppender.File=" + fullScriptLogFileName); bw.newLine(); bw.append("log4j.appender.scriptAppender.Append=false"); bw.newLine(); bw.append("log4j.appender.scriptAppender.layout.ConversionPattern=%m%n%n"); bw.newLine(); bw.close(); } catch (IOException ex) { firePropertyChange("error", "", "Error - Log properties file could not be created."); } } }
From source file:com.healthmarketscience.jackcess.util.ExportUtil.java
/** * Copy a table in this database into a new delimited text file. * /*from w w w.j av a2 s. c o m*/ * @param cursor * Cursor to export * @param out * Writer to export to * @param header * If <code>true</code> the first line contains the column names * @param delim * The column delimiter, <code>null</code> for default (comma) * @param quote * The quote character * @param filter * valid export filter * * @see Builder */ public static void exportWriter(Cursor cursor, BufferedWriter out, boolean header, String delim, char quote, ExportFilter filter) throws IOException { String delimiter = (delim == null) ? DEFAULT_DELIMITER : delim; // create pattern which will indicate whether or not a value needs to be // quoted or not (contains delimiter, separator, or newline) Pattern needsQuotePattern = Pattern .compile("(?:" + Pattern.quote(delimiter) + ")|(?:" + Pattern.quote("" + quote) + ")|(?:[\n\r])"); List<? extends Column> origCols = cursor.getTable().getColumns(); List<Column> columns = new ArrayList<Column>(origCols); columns = filter.filterColumns(columns); Collection<String> columnNames = null; if (!origCols.equals(columns)) { // columns have been filtered columnNames = new HashSet<String>(); for (Column c : columns) { columnNames.add(c.getName()); } } // print the header row (if desired) if (header) { for (Iterator<Column> iter = columns.iterator(); iter.hasNext();) { writeValue(out, iter.next().getName(), quote, needsQuotePattern); if (iter.hasNext()) { out.write(delimiter); } } out.newLine(); } // print the data rows Object[] unfilteredRowData = new Object[columns.size()]; Row row; while ((row = cursor.getNextRow(columnNames)) != null) { // fill raw row data in array for (int i = 0; i < columns.size(); i++) { unfilteredRowData[i] = columns.get(i).getRowValue(row); } // apply filter Object[] rowData = filter.filterRow(unfilteredRowData); if (rowData == null) { continue; } // print row for (int i = 0; i < columns.size(); i++) { Object obj = rowData[i]; if (obj != null) { String value = null; if (obj instanceof byte[]) { value = ByteUtil.toHexString((byte[]) obj); } else { value = String.valueOf(rowData[i]); } writeValue(out, value, quote, needsQuotePattern); } if (i < columns.size() - 1) { out.write(delimiter); } } out.newLine(); } out.flush(); }
From source file:edu.cmu.lti.oaqa.knn4qa.apps.GenFeaturesAppImpl.java
@Override void procResults(String queryID, Map<String, String> docFields, CandidateEntry[] scoredDocs, int numRet, Map<String, DenseVector> docFeats) throws IOException { BufferedWriter featOut = mhOutFiles.get(numRet); if (null == featOut) throw new RuntimeException("Bug, output file is not init. for numRet=" + numRet); for (CandidateEntry e : scoredDocs) { String label = e.mIsRelev ? "1" : "0"; String docId = e.mDocId;//from ww w .j av a 2 s . c om DenseVector vect = docFeats.get(docId); StringBuffer sb = new StringBuffer(); sb.append(label + " "); sb.append("qid:" + queryID); for (int fn = 0; fn < vect.size(); ++fn) // Note that feature numbers should start from 1 or else some libraries like RankLib will not work correctly! sb.append(" " + (fn + 1) + ":" + vect.get(fn)); featOut.write(sb.toString()); featOut.newLine(); } }