List of usage examples for java.io PrintWriter println
public void println(Object x)
From source file:at.tuwien.ifs.somtoolbox.data.InputDataWriter.java
/** * Writes input data in the SOMPAK format (see * http://www.cis.hut.fi/projects/somtoolbox/package/docs2/som_read_data.html) *//*from ww w. j a v a 2 s. com*/ public static void writeAsSOMPAK(InputData data, String fileName) throws IOException { Logger.getLogger("at.tuwien.ifs.somtoolbox") .info("Writing input data as SOMPAK file to '" + fileName + "'."); PrintWriter writer = FileUtils.openFileForWriting("SOMPAK data", fileName, false); // number of dimensions first, simply the number writer.println(data.dim()); // now all component names TemplateVector tv = data.templateVector(); if (tv == null) { Logger.getLogger("at.tuwien.ifs.somtoolbox") .info("Template vector not loaded - creating a generic one."); tv = new SOMLibTemplateVector(data.numVectors(), data.dim()); } writer.println( SOMPAKInputData.INDICATOR_COMPONENTS + " " + StringUtils.toString(tv.getLabels(), "", "", " ")); // now all data, appended by the class name for (int i = 0; i < data.numVectors(); i++) { for (int j = 0; j < data.dim(); j++) { writer.print(data.getValue(i, j)); if (j + 1 < data.dim()) { writer.print(" "); } } if (data.classInformation() != null) { writer.print(" " + data.classInformation().getClassName(i)); } writer.println(); } writer.close(); }
From source file:it.geosolutions.geobatch.imagemosaic.ImageMosaicProperties.java
/** * If the regex file do not exists, build it using the passed configuration and return the * corresponding properties object/*from w w w . j a v a 2 s. c o m*/ * * @param regexFile * @param configuration * @return * @throws NullPointerException * @throws IOException */ private static Properties createRegexFile(File regexFile, String regex) throws NullPointerException, IOException { if (!regexFile.exists()) { FileWriter outFile = null; PrintWriter out = null; if (regex != null) { try { outFile = new FileWriter(regexFile); out = new PrintWriter(outFile); // Write text to file out.println("regex=" + regex); } catch (IOException e) { if (LOGGER.isErrorEnabled()) LOGGER.error("Error occurred while writing " + regexFile.getAbsolutePath() + " file!", e); } finally { if (out != null) { out.flush(); out.close(); } outFile = null; out = null; } } else throw new NullPointerException("Unable to build the property file using a null regex string"); return getPropertyFile(regexFile); } return null; }
From source file:gov.nih.nci.caarray.application.translation.geosoft.GeoSoftFileWriterUtil.java
private static void writeCharacteristics(String key, String value, PrintWriter out, Set<String> alreadyWritten) { if (StringUtils.isNotBlank(value)) { final String entry = "!Sample_characteristics=" + key + ':' + value; if (alreadyWritten.add(entry)) { out.println(entry); }// ww w . j av a 2 s. c om } }
From source file:com.txtweb.wikipedia.Wikipedia.java
private static void sendResponse(HttpServletResponse httpResponse, String response) { try {// w w w.java 2 s .c o m httpResponse.setContentType("text/html; charset=UTF-8"); PrintWriter out = httpResponse.getWriter(); // Add all the surrounding HTML String htmlResponse = "<html><head><title>Wikipedia</title>" + "<meta http-equiv='Content-Type' content='text/html; charset=UTF-8' />" + "<meta name='" + APPKEY_NAME + "' content='" + APPKEY_CONTENT + "' />" + "</head><body>" + response + "</body></html>"; out.println(htmlResponse); } catch (IOException e) { // } }
From source file:com.ebay.erl.mobius.core.mapred.ConfigurableJob.java
private static void writePartitionFile(JobConf job, Sampler sampler) { try {/* w ww .ja va 2 s . com*/ //////////////////////////////////////////////// // first, getting samples from the data sources //////////////////////////////////////////////// LOGGER.info("Running local sampling for job [" + job.getJobName() + "]"); InputFormat inf = job.getInputFormat(); Object[] samples = sampler.getSample(inf, job); LOGGER.info("Samples retrieved, sorting..."); //////////////////////////////////////////////// // sort the samples //////////////////////////////////////////////// RawComparator comparator = job.getOutputKeyComparator(); Arrays.sort(samples, comparator); if (job.getBoolean("mobius.print.sample", false)) { PrintWriter pw = new PrintWriter( new OutputStreamWriter(new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream( new File(job.get("mobius.sample.file", "./samples.txt.gz"))))))); for (Object obj : samples) { pw.println(obj); } pw.flush(); pw.close(); } //////////////////////////////////////////////// // start to write partition files //////////////////////////////////////////////// FileSystem fs = FileSystem.get(job); Path partitionFile = fs.makeQualified(new Path(TotalOrderPartitioner.getPartitionFile(job))); while (fs.exists(partitionFile)) { partitionFile = new Path(partitionFile.toString() + "." + System.currentTimeMillis()); } fs.deleteOnExit(partitionFile); TotalOrderPartitioner.setPartitionFile(job, partitionFile); LOGGER.info("write partition file to:" + partitionFile.toString()); int reducersNbr = job.getNumReduceTasks(); Set<Object> wroteSamples = new HashSet<Object>(); SequenceFile.Writer writer = SequenceFile.createWriter(fs, job, partitionFile, Tuple.class, NullWritable.class); float avgReduceSize = samples.length / reducersNbr; int lastBegin = 0; for (int i = 0; i < samples.length;) { // trying to distribute the load for every reducer evenly, // dividing the <code>samples</code> into a set of blocks // separated by boundaries, objects that selected from the // <code>samples</code> array, and each blocks should have // about the same size. // find the last index of element that equals to samples[i], as // such element might appear multiple times in the samples. int upperBound = Util.findUpperBound(samples, samples[i], comparator); int lowerBound = i;//Util.findLowerBound(samples, samples[i], comparator); // the repeat time of samples[i], if the key itself is too big // select it as boundary int currentElemSize = upperBound - lowerBound + 1; if (currentElemSize > avgReduceSize * 2) // greater than two times of average reducer size { // the current element is too big, greater than // two times of the <code>avgReduceSize</code>, // put itself as boundary writer.append(((DataJoinKey) samples[i]).getKey(), NullWritable.get()); wroteSamples.add(((DataJoinKey) samples[i]).getKey()); //pw.println(samples[i]); // immediate put the next element to the boundary, // the next element starts at <code> upperBound+1 // </code>, to prevent the current one consume even // more. if (upperBound + 1 < samples.length) { writer.append(((DataJoinKey) samples[upperBound + 1]).getKey(), NullWritable.get()); wroteSamples.add(((DataJoinKey) samples[upperBound + 1]).getKey()); //pw.println(samples[upperBound+1]); // move on to the next element of <code>samples[upperBound+1]/code> lastBegin = Util.findUpperBound(samples, samples[upperBound + 1], comparator) + 1; i = lastBegin; } else { break; } } else { // current element is small enough to be consider // with previous group int size = upperBound - lastBegin; if (size > avgReduceSize) { // by including the current elements, we have // found a block that's big enough, select it // as boundary writer.append(((DataJoinKey) samples[i]).getKey(), NullWritable.get()); wroteSamples.add(((DataJoinKey) samples[i]).getKey()); //pw.println(samples[i]); i = upperBound + 1; lastBegin = i; } else { i = upperBound + 1; } } } writer.close(); // if the number of wrote samples doesn't equals to number of // reducer minus one, then it means the key spaces is too small // hence TotalOrderPartitioner won't work, it works only if // the partition boundaries are distinct. // // we need to change the number of reducers if (wroteSamples.size() + 1 != reducersNbr) { LOGGER.info("Write complete, but key space is too small, sample size=" + wroteSamples.size() + ", reducer size:" + (reducersNbr)); LOGGER.info("Set the reducer size to:" + (wroteSamples.size() + 1)); // add 1 because the wrote samples define boundary, ex, if // the sample size is two with two element [300, 1000], then // there should be 3 reducers, one for handling i<300, one // for n300<=i<1000, and another one for 1000<=i job.setNumReduceTasks((wroteSamples.size() + 1)); } samples = null; } catch (IOException e) { LOGGER.error(e.getMessage(), e); throw new RuntimeException(e); } }
From source file:barrysw19.calculon.icc.ICCInterface.java
private static void saveChat(String s) { try {//from w w w .j a v a 2s. co m PrintWriter pw = new PrintWriter(new FileWriter("c:/Development/chatlog.txt", true)); pw.println(s); pw.close(); } catch (IOException e) { LOG.error("Error writing chatlog", e); } }
From source file:com.netscape.cmsutil.util.Utils.java
public static void copy(String orig, String dest) throws Exception { BufferedReader in = null;/* w ww. j av a 2 s. co m*/ PrintWriter out = null; try { in = new BufferedReader(new FileReader(orig)); out = new PrintWriter(new BufferedWriter(new FileWriter(dest))); String line = ""; while (in.ready()) { line = in.readLine(); if (line != null) out.println(line); } } catch (Exception ee) { ee.printStackTrace(); throw ee; } finally { if (in != null) { try { in.close(); } catch (IOException e) { e.printStackTrace(); } } if (out != null) { out.close(); } } }
From source file:com.mgmtp.perfload.loadprofiles.util.PlotFileCreator.java
/** * Create a plot of the start times of load events for all given load curve assignements. This * plot is normally used for diagnostic purposes. * // w w w .java2 s. c o m * @param file * The plot file * @param eventList * List of events to be plotted. * @param loadCurveAssignments * load curve assignements, by which the load events are grouped * @param nClients * Number of clients * @param timeUnitPlot * time unit of the plot. */ public static void createPlot(final File file, final Collection<LoadEvent> eventList, final List<LoadCurveAssignment> loadCurveAssignments, final int nClients, final String timeUnitPlot) throws IOException { double timeScalingFactor = LoadCurveCalculator.getTimeScalingFactor(LoadCurveCalculator.timeUnit_hour, timeUnitPlot); int nAssignements = loadCurveAssignments.size(); int[][] operationsOfType = new int[nAssignements][nClients]; String[] operationNames = new String[nAssignements]; for (int iAssignement = 0; iAssignement < nAssignements; iAssignement++) { operationNames[iAssignement] = loadCurveAssignments.get(iAssignement).getOperationName(); } for (int iClient = 0; iClient < nClients; iClient++) { for (LoadEvent event : eventList) { if (event.getClientId() == iClient) { int operationId = getOperationId(event.getOperation().getName(), operationNames); operationsOfType[operationId][iClient]++; } } } PrintWriter pw = null; try { pw = new PrintWriter(file, "UTF-8"); Format format = NumberFormat.getNumberInstance(); for (int iClient = 0; iClient < nClients; iClient++) { for (int iAssignement = 0; iAssignement < nAssignements; iAssignement++) { pw.println("Eventtime " + timeUnitPlot + "; Client " + iClient + " " + loadCurveAssignments.get(iAssignement).getLoadCurve().getName() + " " + operationNames[iAssignement]); log.info("Writing " + operationsOfType[iAssignement][iClient] + " operations of type " + loadCurveAssignments.get(iAssignement).getOperationName() + " for client " + iClient); for (LoadEvent event : eventList) { if (event.getClientId() == iClient) { if (event.getOperation().getName().equals(operationNames[iAssignement])) { double x = timeScalingFactor * event.getTime(); double y = LoadCurveCalculator .r(loadCurveAssignments.get(iAssignement).getLoadCurve(), event.getTime()); pw.println(format.format(x) + "; " + format.format(y)); } } } pw.println(); } } } finally { IOUtils.closeQuietly(pw); } }
From source file:net.metanotion.sqlc.SqlcPhp.java
public static int makeMethod(final PrintWriter writer, final SQLMethod m, final DoWrap qe, final int level, final int[] gensym, final int[] braces, final boolean retValue) { for (final QueryExpr<PreparedStatement> e : qe.exprs) { makeMethod(writer, m, e, level + 1, gensym, braces, retValue); }// www. j a v a 2 s. co m if (level == 0) { writer.println("\t\t\treturn $_" + (gensym[0] - 1) + ";"); while (braces[0] > 0) { writer.println("}"); braces[0]--; } } return gensym[0] - 1; }
From source file:com.me.edu.Servlet.ElasticSearch_Backup.java
public static void getDocument(PrintWriter out, Client client, String index, String type, String id) { GetResponse getResponse = client.prepareGet(index, type, id).execute().actionGet(); Map<String, Object> source = getResponse.getSource(); out.println("Index: " + getResponse.getIndex() + "<br>"); out.println("Type: " + getResponse.getType() + "<br>"); out.println("Id: " + getResponse.getId() + "<br>"); out.println("Version: " + getResponse.getVersion() + "<br>"); out.println(source + "<br>"); }