List of usage examples for java.io PrintStream close
public void close()
From source file:com.panet.imeta.shared.SharedObjects.java
public void saveToFile() throws IOException, KettleException { OutputStream outputStream = KettleVFS.getOutputStream(filename, false); PrintStream out = new PrintStream(outputStream); out.print(XMLHandler.getXMLHeader(Const.XML_ENCODING)); out.println("<" + XML_TAG + ">"); Collection<SharedObjectInterface> collection = objectsMap.values(); for (SharedObjectInterface sharedObject : collection) { out.println(sharedObject.getXML()); }//from w ww .j a va2 s. c om out.println("</" + XML_TAG + ">"); out.flush(); out.close(); outputStream.close(); }
From source file:org.apache.pig.test.TestLocal.java
License:asdf
@Test public void testDefinedFunctions() throws Throwable { File tmpFile = File.createTempFile("test", ".txt"); PrintStream ps = new PrintStream(new FileOutputStream(tmpFile)); for (int i = 0; i < 1; i++) { ps.println(i);/*from www . j a v a 2 s . com*/ } ps.close(); pig.registerFunction("foo", new FuncSpec(MyApply.class.getName() + "('foo')")); String query = "foreach (group (load '" + Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "' using " + MyStorage.class.getName() + "()) by " + MyGroup.class.getName() + "('all')) generate flatten(foo($1)) ;"; System.out.println(query); pig.registerQuery("asdf_id = " + query); Iterator it = pig.openIterator("asdf_id"); tmpFile.delete(); Tuple t; int count = 0; while (it.hasNext()) { t = (Tuple) it.next(); assertEquals("foo", t.get(0).toString()); Integer.parseInt(t.get(1).toString()); count++; } assertEquals(count, MyStorage.COUNT); }
From source file:org.apache.pig.test.TestLocal.java
License:asdf
public void definedFunctions(String[][] data) throws Throwable { File tmpFile = TestHelper.createTempFile(data); PrintStream ps = new PrintStream(new FileOutputStream(tmpFile)); for (int i = 0; i < 1; i++) { ps.println(i);//from ww w .j a v a 2 s. c o m } ps.close(); pig.registerFunction("foo", new FuncSpec(MyApply.class.getName() + "('foo')")); String query = "foreach (group (load '" + Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "' using " + MyStorage.class.getName() + "()) by " + MyGroup.class.getName() + "('all')) generate flatten(foo($1)) ;"; System.out.println(query); pig.registerQuery("asdf_id = " + query); Iterator it = pig.openIterator("asdf_id"); tmpFile.delete(); Tuple t; int count = 0; while (it.hasNext()) { t = (Tuple) it.next(); assertEquals("foo", t.get(0).toString()); if (t.get(1).toString() != "") { Integer.parseInt(t.get(1).toString()); } count++; } assertEquals(count, MyStorage.COUNT); }
From source file:org.apache.pig.test.TestLocal.java
License:asdf
@Test public void testQualifiedFunctions() throws Throwable { //create file File tmpFile = File.createTempFile("test", ".txt"); PrintStream ps = new PrintStream(new FileOutputStream(tmpFile)); for (int i = 0; i < 1; i++) { ps.println(i);/*from w w w . j a va 2 s . c o m*/ } ps.close(); // execute query String query = "foreach (group (load '" + Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "' using " + MyStorage.class.getName() + "()) by " + MyGroup.class.getName() + "('all')) generate flatten(" + MyApply.class.getName() + "($1)) ;"; System.out.println(query); pig.registerQuery("asdf_id = " + query); //Verfiy query Iterator<Tuple> it = pig.openIterator("asdf_id"); Tuple t; int count = 0; while (it.hasNext()) { t = it.next(); assertEquals(t.get(0).toString(), "Got"); Integer.parseInt(t.get(1).toString()); count++; } assertEquals(MyStorage.COUNT, count); tmpFile.delete(); }
From source file:org.apache.hadoop.fs.dfsioe.TestDFSIOEnh.java
@Deprecated protected static void analyzeResult(FileSystem fs, int testType, long execTime, String resFileName, int nrFiles, long fileSize, long tStart, int plotInterval, long sampleUnit, int threshold, String tputResFileName, boolean tputReportEach, boolean tputReportTotal) throws IOException { //the original report //TestDFSIO.analyzeResult(fs,testType,execTime,resFileName); long tasks = 0; long size = 0; long time = 0; float rate = 0; float sqrate = 0; Path reduceFile;/*from w ww . j av a 2s.c om*/ if (testType == TEST_TYPE_WRITE) reduceFile = new Path(DfsioeConfig.getInstance().getWriteDir(fsConfig), "part-00000"); else reduceFile = new Path(DfsioeConfig.getInstance().getReadDir(fsConfig), "part-00000"); //long time = 0; float loggingTime = 0; String line; ArrayList<String> wrSamples = new ArrayList<String>(); int maxslot = (int) (execTime / plotInterval) + 1; int[] concurrency = new int[maxslot + 1]; for (int i = 0; i < maxslot + 1; i++) concurrency[i] = 0; DataInputStream in = null; BufferedReader lines = null; try { in = new DataInputStream(fs.open(reduceFile)); lines = new BufferedReader(new InputStreamReader(in)); while ((line = lines.readLine()) != null) { StringTokenizer tokens = new StringTokenizer(line, " \t\n\r\f%"); String attr = tokens.nextToken(); if (attr.endsWith(":time")) { time = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":logging_time")) { loggingTime = Float.parseFloat(tokens.nextToken()); } else if (attr.endsWith(":tput_samples")) { String[] tags = attr.split(":"); wrSamples.add(tags[1]); wrSamples.add(tokens.nextToken()); } else if (attr.endsWith(":io_start_end")) { String[] t = tokens.nextToken().split(";"); int start = (int) ((Long.parseLong(t[0]) - tStart) / plotInterval) + 1; int end = (int) ((Long.parseLong(t[1]) - tStart) / plotInterval) - 1; if (start < 0) start = 0; for (int i = start; i <= end; i++) { if (i > concurrency.length - 1) break; concurrency[i]++; } } else if (attr.endsWith(":tasks")) { tasks = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":size")) { size = Long.parseLong(tokens.nextToken()); } else if (attr.endsWith(":rate")) { rate = Float.parseFloat(tokens.nextToken()); } else if (attr.endsWith(":sqrate")) { sqrate = Float.parseFloat(tokens.nextToken()); } } } finally { if (in != null) in.close(); if (lines != null) lines.close(); } double med = rate / 1000 / tasks; double stdDev = Math.sqrt(Math.abs(sqrate / 1000 / tasks - med * med)); String resultLines[] = { "----- TestDFSIO ----- : " + ((testType == TEST_TYPE_WRITE) ? "write" : (testType == TEST_TYPE_READ) ? "read" : "unknown"), " Date & time: " + new Date(System.currentTimeMillis()), " Number of files: " + tasks, "Total MBytes processed: " + size / MEGA, " Throughput mb/sec: " + size * 1000.0 / (time * MEGA), "Average IO rate mb/sec: " + med, " IO rate std deviation: " + stdDev, " Test exec time sec: " + (float) execTime / 1000, "" }; String[] tputResultLines = analyzeTputSamples(wrSamples, nrFiles, fileSize, tStart, execTime, concurrency, plotInterval, sampleUnit, threshold, tputResFileName, tputReportTotal, tputReportEach); String enhResultLines[] = { "-- Extended Metrics -- : " + ((testType == TEST_TYPE_WRITE) ? "write" : (testType == TEST_TYPE_READ) ? "read" : "unknown"), "Result file name : " + tputResFileName, "Sampling overhead : " + (loggingTime / time) * 100 + "%", "Reference Start Time : " + String.valueOf(tStart) }; PrintStream res = new PrintStream(new FileOutputStream(new File(resFileName), true)); for (int i = 0; i < resultLines.length; i++) { LOG.info(resultLines[i]); res.println(resultLines[i]); } for (int i = 0; i < enhResultLines.length; i++) { LOG.info(enhResultLines[i]); res.println(enhResultLines[i]); } for (int j = 0; j < tputResultLines.length; j++) { LOG.info(tputResultLines[j]); res.println(tputResultLines[j]); } res.close(); }
From source file:org.apache.hadoop.fs.dfsioe.TestDFSIOEnh.java
/** * Analyze Aggregated throughput samples. *///from w w w . ja va2 s . c o m @Deprecated protected static String[] analyzeTputSamples(ArrayList<String> wrSamples, int nrFiles, long fileSize, long tStart, long execTime, int[] concurrency, int plotInterval, long sampleUnit, int threshold, String tputResFile, boolean writeReportTotal, boolean writeReportEach) throws IOException { int maxslot = (int) (execTime / plotInterval) + 1; double[] bytesTotal = new double[maxslot + 1]; double[] bytesChanged = new double[maxslot + 1]; double[] resultValue = new double[maxslot + 1]; int[] resultSample = new int[maxslot + 1]; //initialize the arrays for (int i = 0; i <= maxslot; i++) { bytesTotal[i] = 0; bytesChanged[i] = 0; } for (int f = 0; f < nrFiles; f++) { // clear up the arrays for (int j = 0; j < maxslot + 1; j++) { resultValue[j] = 0; resultSample[j] = 0; } // add to interpolator LinearInterpolator processor = new LinearInterpolator(); double min_timeSpot = Double.MAX_VALUE; double max_timeSpot = 0; for (int i = 0; i < wrSamples.size(); i += 2) { String wrFileName = wrSamples.get(i); if (!wrFileName.equals(getFileName(f))) continue; String sampleLog = wrSamples.get(i + 1); String[] samples = sampleLog.split(";"); //collect the samples for (int j = 0; !samples[j].startsWith("EoR"); j++) { String[] items = samples[j].split(":"); long timeStamp = Long.parseLong(items[0]); long bytes = Long.parseLong(items[1]); double timePassed = (timeStamp - tStart) / (double) plotInterval; if (timePassed > max_timeSpot) max_timeSpot = timePassed; if (timePassed < min_timeSpot) min_timeSpot = timePassed; processor.add((double) timePassed, (double) bytes); } } processor.add(0, 0); processor.add(maxslot + 0.1, fileSize); //get value for each time slot for (int k = 0; k <= maxslot; k++) { resultValue[k] = processor.get(k); } if (writeReportEach) { PrintStream res = new PrintStream(new FileOutputStream( new File(tputResFile + "" + "test_io_" + String.valueOf(f) + ".csv"), true)); for (int i = 0; i <= maxslot - 1; i++) bytesChanged[i] = resultValue[i + 1] - resultValue[i]; bytesChanged[maxslot] = 0; for (int ri = 0; ri <= maxslot; ri++) res.println(ri + "," + resultValue[ri] / (double) sampleUnit + "," + bytesChanged[ri] / (double) sampleUnit); res.close(); } //add into total bytes for (int k = 0; k <= maxslot; k++) bytesTotal[k] += resultValue[k]; } //change unit for (int i = 0; i <= maxslot; i++) bytesTotal[i] /= (double) sampleUnit; //calculate the aggregated throughput for (int i = 0; i <= maxslot - 1; i++) bytesChanged[i] = bytesTotal[i + 1] - bytesTotal[i]; bytesChanged[maxslot] = 0; if (writeReportTotal) { PrintStream res = new PrintStream(new FileOutputStream(new File(tputResFile), true)); for (int ri = 0; ri <= maxslot; ri++) res.println(ri + "," + bytesTotal[ri] + "," + bytesChanged[ri]); res.close(); } String unit = ""; if (sampleUnit == KILO) unit = "kb"; else if (sampleUnit == MEGA) unit = "mb"; else if (sampleUnit == 1) unit = "b"; else if (sampleUnit == GIGA) unit = "gb"; return calcSummary(bytesChanged, concurrency, threshold, unit); }
From source file:org.apache.hadoop.mapreduce.jobhistory.TestHistoryViewerPrinter.java
private String run(HistoryViewerPrinter printer) throws Exception { ByteArrayOutputStream boas = new ByteArrayOutputStream(); PrintStream out = new PrintStream(boas, true); printer.print(out);//from w ww . j av a 2 s. c om out.close(); String outStr = boas.toString("UTF-8"); LOG.info("out = " + outStr); return outStr; }
From source file:org.apache.pig.test.TestFilterOpNumeric.java
@Test public void testNumericGte() throws Throwable { File tmpFile = File.createTempFile("test", "txt"); PrintStream ps = new PrintStream(new FileOutputStream(tmpFile)); for (int i = 0; i < LOOP_COUNT; i++) { if (i % 5 == 0) { ps.println(i + ":" + (double) i); } else if (i % 3 == 0) { ps.println(i - 1 + ":" + (double) (i)); } else {/*from w w w . ja va 2 s . c o m*/ ps.println(i + 1 + ":" + (double) (i)); } } ps.close(); pig.registerQuery("A=load '" + Util.encodeEscape(Util.generateURI(tmpFile.toString(), pig.getPigContext())) + "' using " + PigStorage.class.getName() + "(':');"); String query = "A = filter A by ($0 > $1 or $0 >= $1);"; log.info(query); pig.registerQuery(query); Iterator<Tuple> it = pig.openIterator("A"); tmpFile.delete(); while (it.hasNext()) { Tuple t = it.next(); Double first = Double.valueOf(t.get(0).toString()); Double second = Double.valueOf(t.get(1).toString()); assertTrue(first.compareTo(second) >= 0); } }
From source file:org.apache.pig.test.TestFilterOpNumeric.java
@Test public void testNumericLte() throws Throwable { File tmpFile = File.createTempFile("test", "txt"); PrintStream ps = new PrintStream(new FileOutputStream(tmpFile)); for (int i = 0; i < LOOP_COUNT; i++) { if (i % 5 == 0) { ps.println(i + ":" + (double) i); } else if (i % 3 == 0) { ps.println(i - 1 + ":" + (double) (i)); } else {/*w ww.j a v a 2s . c om*/ ps.println(i + 1 + ":" + (double) (i)); } } ps.close(); pig.registerQuery("A=load '" + Util.encodeEscape(Util.generateURI(tmpFile.toString(), pig.getPigContext())) + "' using " + PigStorage.class.getName() + "(':') as (a: double, b:double);"); String query = "A = filter A by ($0 <= $1 or $0 < $1);"; log.info(query); pig.registerQuery(query); Iterator<Tuple> it = pig.openIterator("A"); tmpFile.delete(); while (it.hasNext()) { Tuple t = it.next(); Double first = Double.valueOf(t.get(0).toString()); Double second = Double.valueOf(t.get(1).toString()); assertTrue(first.compareTo(second) <= 0); } }
From source file:com.zhonghui.tool.controller.HttpClient.java
/** * HTTP Post???// w ww . j a va2s . co m * * @param connection * @param message ??? * @throws IOException */ private void requestServer(final URLConnection connection, String message, String encoder) throws Exception { PrintStream out = null; try { connection.connect(); out = new PrintStream(connection.getOutputStream(), false, encoder); out.print(message); out.flush(); } catch (Exception e) { throw e; } finally { if (null != out) { out.close(); } } }