List of usage examples for java.io PrintStream close
public void close()
From source file:org.apache.pig.test.TestLocal.java
License:asdf
@Test public void testStoreFunction() throws Throwable { File tmpFile = File.createTempFile("test", ".txt"); PrintStream ps = new PrintStream(new FileOutputStream(tmpFile)); for (int i = 0; i < 10; i++) { ps.println(i + "\t" + i); }/*from www . j a va 2 s . co m*/ ps.close(); //Load, Execute and Store query String query = "foreach (load '" + Util.generateURI(tmpFile.toString(), pig.getPigContext()) + "') generate $0,$1;"; System.out.println(query); pig.registerQuery("asdf_id = " + query); try { pig.deleteFile("frog"); } catch (Exception e) { } pig.store("asdf_id", "frog", MyStorage.class.getName() + "()"); //verify query InputStream is = FileLocalizer.open("frog", pig.getPigContext()); BufferedReader br = new BufferedReader(new InputStreamReader(is)); String line; int i = 0; while ((line = br.readLine()) != null) { assertEquals(line, Integer.toString(i) + '-' + Integer.toString(i)); i++; } br.close(); try { pig.deleteFile("frog"); } catch (Exception e) { } tmpFile.delete(); }
From source file:com.meetingninja.csse.database.ProjectDatabaseAdapter.java
public static Project createProject(Project p) throws IOException, MalformedURLException { // Server URL setup String _url = getBaseUri().build().toString(); // establish connection URL url = new URL(_url); HttpURLConnection conn = (HttpURLConnection) url.openConnection(); conn.setRequestMethod("POST"); addRequestHeader(conn, true);//from www .j a v a 2s . com // prepare POST payload ByteArrayOutputStream json = new ByteArrayOutputStream(); // this type of print stream allows us to get a string easily PrintStream ps = new PrintStream(json); // Create a generator to build the JSON string JsonGenerator jgen = JFACTORY.createGenerator(ps, JsonEncoding.UTF8); // Build JSON Object jgen.writeStartObject(); jgen.writeStringField(Keys.Project.TITLE, p.getProjectTitle()); jgen.writeArrayFieldStart(Keys.Project.MEETINGS); for (Meeting meeting : p.getMeetings()) { jgen.writeStartObject(); jgen.writeStringField(Keys.Meeting.ID, meeting.getID()); jgen.writeEndObject(); } jgen.writeEndArray(); jgen.writeArrayFieldStart(Keys.Project.NOTES); for (Note note : p.getNotes()) { jgen.writeStartObject(); jgen.writeStringField(Keys.Note.ID, note.getID()); jgen.writeEndObject(); } jgen.writeEndArray(); jgen.writeArrayFieldStart(Keys.Project.MEMBERS); for (User member : p.getMembers()) { jgen.writeStartObject(); jgen.writeStringField(Keys.User.ID, member.getID()); jgen.writeEndObject(); } jgen.writeEndArray(); jgen.writeEndObject(); jgen.close(); // Get JSON Object payload from print stream String payload = json.toString("UTF8"); ps.close(); // send payload sendPostPayload(conn, payload); String response = getServerResponse(conn); // prepare to get the id of the created Meeting // Map<String, String> responseMap = new HashMap<String, String>(); /* * result should get valid={"meetingID":"##"} */ String result = new String(); if (!response.isEmpty()) { // responseMap = MAPPER.readValue(response, // new TypeReference<HashMap<String, String>>() { // }); JsonNode projectNode = MAPPER.readTree(response); if (!projectNode.has(Keys.Project.ID)) { result = "invalid"; } else result = projectNode.get(Keys.Project.ID).asText(); } if (!result.equalsIgnoreCase("invalid")) p.setProjectID(result); conn.disconnect(); return p; }
From source file:edu.emory.mathcs.nlp.zzz.CSVSentiment.java
public void toTSV(String inputFile) throws Exception { CSVParser parser = new CSVParser(IOUtils.createBufferedReader(inputFile), CSVFormat.DEFAULT); PrintStream fout = IOUtils.createBufferedPrintStream(inputFile + ".tsv"); List<CSVRecord> records = parser.getRecords(); List<Token> tokens;/*from w ww. jav a 2 s.c om*/ CSVRecord record; int label; System.out.println(inputFile); for (int i = 0; i < records.size(); i++) { if (i == 0) continue; record = records.get(i); label = toIntLabel(record.get(0)); tokens = tokenizer.tokenize(record.get(6)); fout.println(label + "\t" + Joiner.join(tokens, " ", Token::getWordForm)); } fout.close(); parser.close(); }
From source file:gestionale.persistence.DAOSalva.java
public void scriviFileSalva(int numeroAvii) { numeroAvii--;/*from ww w . j a va2s. c o m*/ if (numeroAvii >= 0) { try { PrintStream ps = new PrintStream(new FileOutputStream("License.txt")); String caratteriCifrati = encrypt("" + numeroAvii); //logger.info(caratteriCifrati); ps.println(caratteriCifrati); ps.close(); } catch (FileNotFoundException ex) { logger.info("FileNotFoundException: " + ex.getMessage()); } } }
From source file:azkaban.jobtype.ReportalHiveRunner.java
@Override protected void runReportal() throws Exception { System.out.println("Reportal Hive: Setting up Hive"); HiveConf conf = new HiveConf(SessionState.class); if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) { conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION")); }/*from w w w . j a v a2s . c om*/ File tempTSVFile = new File("./temp.tsv"); OutputStream tsvTempOutputStream = new BoundedOutputStream( new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity); PrintStream logOut = System.out; // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded // criccomini@linkedin.com: I disabled this because it appears to swallow // all future logging (even outside of hive). // SessionState.initHiveLog4j(); String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); CliSessionState sessionState = new CliSessionState(conf); sessionState.in = System.in; sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8"); sessionState.err = new PrintStream(logOut, true, "UTF-8"); OptionsProcessor oproc = new OptionsProcessor(); // Feed in Hive Args String[] args = buildHiveArgs(); if (!oproc.process_stage1(args)) { throw new Exception("unable to parse options stage 1"); } if (!oproc.process_stage2(sessionState)) { throw new Exception("unable to parse options stage 2"); } // Set all properties specified via command line for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } SessionState.start(sessionState); String expanded = expandHiveAuxJarsPath(orig); if (orig == null || orig.equals(expanded)) { System.out.println("Hive aux jars variable not expanded"); } else { System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]"); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded); } if (!ShimLoader.getHadoopShims().usesJobShell()) { // hadoop-20 and above - we need to augment classpath using hiveconf // components // see also: code in ExecDriver.java ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); System.out.println("Got auxJars = " + auxJars); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } CliDriver cli = new CliDriver(); int returnValue = 0; String prefix = ""; returnValue = cli.processLine("set hive.cli.print.header=true;"); String[] queries = jobQuery.split("\n"); for (String line : queries) { if (!prefix.isEmpty()) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; line = injectVariables(line); System.out.println("Reportal Hive: Running Hive Query: " + line); System.out.println("Reportal Hive: HiveConf HIVEAUXJARS: " + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS)); returnValue = cli.processLine(line); prefix = ""; } else { prefix = prefix + line; continue; } } tsvTempOutputStream.close(); // convert tsv to csv and write it do disk System.out.println("Reportal Hive: Converting output"); InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile)); Scanner rowScanner = new Scanner(tsvTempInputStream); PrintStream csvOutputStream = new PrintStream(outputStream); while (rowScanner.hasNextLine()) { String tsvLine = rowScanner.nextLine(); // strip all quotes, and then quote the columns csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\""); } rowScanner.close(); csvOutputStream.close(); // Flush the temp file out tempTSVFile.delete(); if (returnValue != 0) { throw new Exception("Hive query finished with a non zero return code"); } System.out.println("Reportal Hive: Ended successfully"); }
From source file:eu.scape_project.tika_identify.TikaIdentification.java
public void startApplication() throws IOException { long startClock = System.currentTimeMillis(); File dir = new File(appConfig.getInputStr()); if (!dir.isDirectory()) { throw new IllegalArgumentException("Input is not a directory: " + appConfig.getInputStr()); }/*from w ww. jav a 2s . c o m*/ PrintStream pout = null; String outputPathStr = appConfig.getOutputStr(); if (outputPathStr != null) { FileOutputStream fos; try { fos = new FileOutputStream(outputPathStr, true); pout = new PrintStream(fos); System.setOut(pout); } catch (FileNotFoundException ex) { LOG.error("File not found error", ex); } } this.processFiles(new File(appConfig.getInputStr())); if (pout != null) { pout.close(); } long elapsedTimeMillis = System.currentTimeMillis() - startClock; LOG.info("Identification finished after " + elapsedTimeMillis + " milliseconds"); }
From source file:JavaFiles.AbstractWsToolClient.java
/** Write a string to a file. * // w ww . j ava2 s . c o m * @param file the file to create/write to * @param data the string to write * @return an integer value indicating success/failure * @throws IOException if there is a problem with the file operations */ public int writeFile(File file, String data) throws IOException { printDebugMessage("writeFile", "Begin", 1); printDebugMessage("writeFile", "file: " + file.getName(), 2); printDebugMessage("writeFile", "data: " + data.length() + " characters", 2); OutputStream os = new FileOutputStream(file); PrintStream p = new PrintStream(os); p.println(data); p.close(); printDebugMessage("writeFile", "End", 1); return 0; }
From source file:com.ning.metrics.collector.processing.db.DatabaseCounterStorage.java
/** * serialize the given rolled-up counter data's distribution to a byte * array for storage in a blob//from w w w .java 2 s .c o m * @param counter * @return * @throws java.io.IOException technically, but unlikely because all ops are * in memory */ public static byte[] serializeDistribution(RolledUpCounterData counter) throws IOException { ByteArrayOutputStream result = new ByteArrayOutputStream(); GZIPOutputStream zipStream = new GZIPOutputStream(result); PrintStream printer = new PrintStream(zipStream, true, "UTF-8"); int index = 0; // iterate through all the entries in the distribution and generate a // serialization of the form: // // uniqueId1|count1\n // uniqueId2|count2\n // ... // // and then gzip the result into a byte array for (Map.Entry<String, Integer> entry : counter.getDistribution().entrySet()) { String id = entry.getKey(); int value = entry.getValue() == null ? 0 : entry.getValue(); // Don't write unique ids that have a zero count if (value == 0) { continue; } if (index++ > 0) { printer.println(); } printer.print(id); printer.print('|'); printer.print(Integer.toString(value)); } zipStream.finish(); printer.close(); return result.toByteArray(); }
From source file:eu.scape_project.droid_identify.DroidIdentification.java
public void startApplication() throws IOException { long startClock = System.currentTimeMillis(); File dir = new File(appConfig.getInputStr()); // if (!dir.isDirectory()) { // throw new IllegalArgumentException("Input is not a directory: " + appConfig.getInputStr()); // }//from w w w . j av a2s. co m PrintStream pout = null; String outputPathStr = appConfig.getOutputStr(); if (outputPathStr != null) { FileOutputStream fos; try { fos = new FileOutputStream(outputPathStr, true); pout = new PrintStream(fos); System.setOut(pout); } catch (FileNotFoundException ex) { LOG.error("File not found error", ex); } } this.processFiles(new File(appConfig.getInputStr())); if (pout != null) { pout.close(); } long elapsedTimeMillis = System.currentTimeMillis() - startClock; LOG.info("Identification finished after " + elapsedTimeMillis + " milliseconds"); }
From source file:eu.scape_project.up2ti.output.SimpleKeyValueOutputWriter.java
/** * Record method for command line application. * * @param resultMap Result map where K: recordkey-identificationtype, V: * tool identificationtype identificationresult) *///from w w w . ja v a2s .c o m @Override public void write(HashMap<String, List<String>> resultMap) { Iterator iter = resultMap.keySet().iterator(); while (iter.hasNext()) { String key = (String) iter.next(); List<String> valueList = resultMap.get(key); PrintStream pout = null; if (outputPathStr != null) { FileOutputStream fos; try { fos = new FileOutputStream(outputPathStr, true); pout = new PrintStream(fos); System.setOut(pout); } catch (FileNotFoundException ex) { LOG.error("File not found error", ex); } } for (String value : valueList) { System.out.println(key + separator + value); } if (pout != null) { pout.close(); } } }