List of usage examples for java.io PrintStream close
public void close()
From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.sensitivity.SampleGenerator.java
@Override public void run(CommandLine commandLine) throws IOException { ParameterFile parameterFile = new ParameterFile(new File(commandLine.getOptionValue("parameterFile"))); int N = Integer.parseInt(commandLine.getOptionValue("numberOfSamples")); int D = parameterFile.size(); Sequence sequence = null;//from ww w .j a va 2s. c om if (commandLine.hasOption("method")) { OptionCompleter completer = new OptionCompleter("uniform", "latin", "sobol", "saltelli"); String method = completer.lookup(commandLine.getOptionValue("method")); if (method.equals("latin")) { sequence = new LatinHypercube(); } else if (method.equals("sobol")) { sequence = new Sobol(); } else if (method.equals("saltelli")) { N *= (2 * D + 2); sequence = new Saltelli(); } else if (method.equals("uniform")) { sequence = new Uniform(); } else { throw new IllegalArgumentException("invalid method: " + commandLine.getOptionValue("method")); } } else { sequence = new Sobol(); } if (commandLine.hasOption("seed")) { PRNG.setSeed(Long.parseLong(commandLine.getOptionValue("seed"))); } PrintStream output = System.out; try { if (commandLine.hasOption("output")) { output = new PrintStream( new BufferedOutputStream(new FileOutputStream(commandLine.getOptionValue("output")))); } double[][] samples = sequence.generate(N, D); for (int i = 0; i < N; i++) { output.print(parameterFile.get(0).getLowerBound() + samples[i][0] * (parameterFile.get(0).getUpperBound() - parameterFile.get(0).getLowerBound())); for (int j = 1; j < D; j++) { output.print(' '); output.print(parameterFile.get(j).getLowerBound() + samples[i][j] * (parameterFile.get(j).getUpperBound() - parameterFile.get(j).getLowerBound())); } output.println(); } } finally { if ((output != null) && (output != System.out)) { output.close(); } } }
From source file:it.unimi.di.big.mg4j.tool.Combine.java
public void run() throws ConfigurationException, IOException { final ProgressLogger pl = new ProgressLogger(LOGGER, logInterval, TimeUnit.MILLISECONDS); pl.displayFreeMemory = true;/*from www . j a v a2 s . c o m*/ final int maxDocSize; if (writeSizes) { LOGGER.info("Combining sizes..."); final OutputBitStream sizesOutputBitStream = new OutputBitStream( ioFactory.getOutputStream(outputBasename + DiskBasedIndex.SIZES_EXTENSION), bufferSize, false); maxDocSize = combineSizes(sizesOutputBitStream); sizesOutputBitStream.close(); LOGGER.info("Sizes combined."); } else maxDocSize = -1; // To write the new term list final PrintWriter termFile = new PrintWriter(new BufferedWriter(new OutputStreamWriter( ioFactory.getOutputStream(outputBasename + DiskBasedIndex.TERMS_EXTENSION), "UTF-8"), bufferSize)); // The current term MutableString currTerm; long totalOccurrency = 0; pl.expectedUpdates = haveOccurrencies ? numberOfOccurrences : -1; pl.itemsName = haveOccurrencies ? "occurrences" : "terms"; pl.logInterval = logInterval; pl.start("Combining lists..."); int numUsedIndices, k; predictedSize = -1; predictedLengthNumBits = -1; // Discard first zero from offsets if (p != 0) for (InputBitStream ibs : offsets) ibs.readGamma(); // TODO: use the front of the queue? while (!termQueue.isEmpty()) { numUsedIndices = 0; // We read a new word from the queue, copy it and write it to the term file currTerm = term[k = usedIndex[numUsedIndices++] = termQueue.first()].copy(); if (DEBUG) System.err.println("Merging term " + currTerm); currTerm.println(termFile); if (termReader[k].readLine(term[k]) == null) termQueue.dequeue(); else termQueue.changed(); // Then, we extract all equal words from the queue, accumulating the set of indices in inIndex and currIndex while (!termQueue.isEmpty() && term[termQueue.first()].equals(currTerm)) { k = usedIndex[numUsedIndices++] = termQueue.first(); if (termReader[k].readLine(term[k]) == null) termQueue.dequeue(); else termQueue.changed(); } if (numUsedIndices > 1) Arrays.sort(usedIndex, 0, numUsedIndices); // Load index iterators for (int i = numUsedIndices; i-- != 0;) indexIterator[usedIndex[i]] = indexReader[usedIndex[i]].nextIterator(); if (haveOccurrencies) { // Compute and write the total occurrency. This works for any type of combination. totalOccurrency = 0; for (int i = numUsedIndices; i-- != 0;) totalOccurrency += occurrencies[usedIndex[i]].readLongGamma(); } if (p != 0) { predictedSize = 0; predictedLengthNumBits = 0; for (int i = numUsedIndices; i-- != 0;) { if (index[usedIndex[i]] instanceof BitStreamHPIndex) { predictedSize += offsets[usedIndex[i]].readLongGamma(); if (hasPositions) predictedLengthNumBits += posNumBits[usedIndex[i]].readLongGamma(); } else { // Interleaved index: we must subtract the number of bits used for positions from the length of the overall inverted list final long t = hasPositions ? posNumBits[usedIndex[i]].readLongGamma() : 0; predictedSize += offsets[usedIndex[i]].readLongGamma() - t; predictedLengthNumBits += t; } } } combine(numUsedIndices, totalOccurrency); /* A trick to get a correct prediction. */ if (haveOccurrencies) pl.count += totalOccurrency - 1; pl.update(); } pl.done(); termFile.close(); if (!metadataOnly) { for (int i = numIndices; i-- != 0;) { indexReader[i].close(); if (haveOccurrencies) occurrencies[i].close(); if (sumsMaxPos[i] != null) sumsMaxPos[i].close(); if (p != 0) { offsets[i].close(); if (posNumBits[i] != null) posNumBits[i].close(); } termReader[i].close(); } final long indexSize = indexWriter.writtenBits(); indexWriter.close(); final Properties properties = indexWriter.properties(); additionalProperties.setProperty(Index.PropertyKeys.SIZE, indexSize); additionalProperties.setProperty(Index.PropertyKeys.MAXDOCSIZE, maxDocSize); additionalProperties.setProperty(Index.PropertyKeys.MAXCOUNT, maxCount); additionalProperties.setProperty(Index.PropertyKeys.OCCURRENCES, numberOfOccurrences); properties.addAll(additionalProperties); LOGGER.debug("Post-merge properties: " + new ConfigurationMap(properties)); Scan.saveProperties(ioFactory, properties, outputBasename + DiskBasedIndex.PROPERTIES_EXTENSION); } final PrintStream stats = new PrintStream( ioFactory.getOutputStream(outputBasename + DiskBasedIndex.STATS_EXTENSION)); if (!metadataOnly) indexWriter.printStats(stats); stats.close(); }
From source file:com.sap.prd.mobile.ios.mios.XCodeVerificationCheckMojo.java
private Exception performCheck(ClassRealm verificationCheckRealm, final Check checkDesc) throws MojoExecutionException { getLog().info(String.format("Performing verification check '%s'.", checkDesc.getClazz())); if (getLog().isDebugEnabled()) { final Charset defaultCharset = Charset.defaultCharset(); final ByteArrayOutputStream byteOs = new ByteArrayOutputStream(); final PrintStream ps; try {/*from w w w . j ava 2 s. co m*/ ps = new PrintStream(byteOs, true, defaultCharset.name()); } catch (UnsupportedEncodingException ex) { throw new MojoExecutionException( String.format("Charset '%s' cannot be found.", defaultCharset.name())); } try { verificationCheckRealm.display(ps); ps.close(); getLog().debug(String.format("Using classloader for loading verification check '%s':%s%s", checkDesc.getClazz(), System.getProperty("line.separator"), new String(byteOs.toByteArray(), defaultCharset))); } finally { IOUtils.closeQuietly(ps); } } try { final Class<?> verificationCheckClass = Class.forName(checkDesc.getClazz(), true, verificationCheckRealm); getLog().debug(String.format("Verification check class %s has been loaded by %s.", verificationCheckClass.getName(), verificationCheckClass.getClassLoader())); getLog().debug(String.format("Verification check super class %s has been loaded by %s.", verificationCheckClass.getSuperclass().getName(), verificationCheckClass.getSuperclass().getClassLoader())); getLog().debug(String.format("%s class used by this class (%s) has been loaded by %s.", VerificationCheck.class.getName(), this.getClass().getName(), VerificationCheck.class.getClassLoader())); for (final String configuration : getConfigurations()) { for (final String sdk : getSDKs()) { getLog().info( String.format("Executing verification check: '%s' for configuration '%s' and sdk '%s'.", verificationCheckClass.getName(), configuration, sdk)); final VerificationCheck verificationCheck = (VerificationCheck) verificationCheckClass .newInstance(); verificationCheck .setXcodeContext(getXCodeContext(SourceCodeLocation.WORKING_COPY, configuration, sdk)); verificationCheck.setMavenProject(project); verificationCheck.setEffectiveBuildSettings(new EffectiveBuildSettings()); try { verificationCheck.check(); } catch (VerificationException ex) { return ex; } catch (RuntimeException ex) { return ex; } } } return null; } catch (ClassNotFoundException ex) { throw new MojoExecutionException("Could not load verification check '" + checkDesc.getClazz() + "'. May be your classpath has not been properly extended. " + "Provide the GAV of the project containing the check as attributes as part of the check defintion in the check configuration file.", ex); } catch (NoClassDefFoundError err) { getLog().error(String.format( "Could not load verification check '%s'. " + "May be your classpath has not been properly extended. " + "Additional dependencies need to be declard inside the check definition file: %s", checkDesc.getClazz(), err.getMessage()), err); throw err; } catch (InstantiationException ex) { throw new MojoExecutionException(String.format("Could not instanciate verification check '%s': %s", checkDesc.getClazz(), ex.getMessage()), ex); } catch (IllegalAccessException ex) { throw new MojoExecutionException(String.format("Could not access verification check '%s': %s", checkDesc.getClazz(), ex.getMessage()), ex); } }
From source file:at.ofai.music.util.WormFileParseException.java
public void writeBeatsAsText(String fileName) throws Exception { PrintStream out = new PrintStream(new File(fileName)); char separator = '\n'; if (fileName.endsWith(".csv")) separator = ','; for (Iterator<Event> it = iterator(); it.hasNext();) { Event e = it.next();/*from w w w. ja v a 2 s. com*/ out.printf("%5.3f%c", e.keyDown, it.hasNext() ? separator : '\n'); } out.close(); }
From source file:com.moscona.dataSpace.persistence.DirectoryDataStore.java
private void dumpMetaDataFile() throws DataSpaceException { File metadataFile = new File(path + "/" + METADATA_FILE_NAME); try {// w ww . j a va 2 s . c o m PrintStream out = new PrintStream(metadataFile); try { String yaml = new Yaml().dump(metadata); out.println(yaml); } finally { out.close(); } } catch (Exception e) { throw new DataSpaceException( "Exception while dumping metadata file " + metadataFile.getAbsolutePath() + ": " + e, e); } }
From source file:hudson.plugins.clearcase.changelog.UcmChangeLogSet.java
@Override public void saveToFile(File changeLogFile) throws IOException { FileOutputStream fileOutputStream = new FileOutputStream(changeLogFile); PrintStream stream = new PrintStream(fileOutputStream, false, "UTF-8"); stream.println("<?xml version='1.0' encoding='UTF-8'?>"); stream.println("<history>"); for (UcmActivity activity : history) { stream.println("\t<entry>"); stream.println("\t\t<name>" + escapeXml(activity.getName()) + "</name>"); stream.println("\t\t<headline>" + escapeXml(activity.getHeadline()) + "</headline>"); stream.println("\t\t<stream>" + escapeXml(activity.getStream()) + "</stream>"); stream.println("\t\t<user>" + escapeXml(activity.getUser()) + "</user>"); for (UcmActivity subActivity : activity.getSubActivities()) { printSubActivity(stream, subActivity); }// ww w .j a va2s .c o m for (hudson.plugins.clearcase.objects.AffectedFile file : activity.getFiles()) { printFile(stream, file); } stream.println("\t</entry>"); } stream.println("</history>"); stream.close(); fileOutputStream.close(); }
From source file:net.gbmb.collector.example.SimpleLogPush.java
private String storeArchive(Collection collection) throws IOException { String cid = collection.getId(); java.util.Collection<CollectionRecord> records = collectionRecords.get(cid); // index//w w w.j a v a2 s . co m ByteArrayOutputStream indexStream = new ByteArrayOutputStream(DEFAULT_BUFFER_SIZE); PrintStream output = new PrintStream(indexStream); // zip ByteArrayOutputStream archiveStream = new ByteArrayOutputStream(DEFAULT_BUFFER_SIZE); ZipOutputStream zos = new ZipOutputStream(archiveStream); output.println("Serialize collection: " + collection.getId()); output.println(" creation date: " + collection.getCreationDate()); output.println(" end date: " + collection.getEndDate()); for (CollectionRecord cr : records) { output.print(cr.getRecordDate()); output.print(" "); output.println(cr.getContent()); if (cr.getAttachment() != null) { String attName = cr.getAttachment(); output.println(" > " + attName); ZipEntry entry = new ZipEntry(cr.getAttachment()); zos.putNextEntry(entry); InputStream content = temporaryStorage.get(cid, attName); IOUtils.copy(content, zos); } } // add the index file output.close(); ZipEntry index = new ZipEntry("index"); zos.putNextEntry(index); IOUtils.write(indexStream.toByteArray(), zos); // close zip zos.close(); ByteArrayInputStream content = new ByteArrayInputStream(archiveStream.toByteArray()); // send to final storage return finalStorage.store(cid, content); }
From source file:it.units.malelab.ege.MappingPropertiesExperimenter.java
public static void main(String[] args) throws IOException, InterruptedException, ExecutionException { final int n = 10000; final int nDist = 10000; //prepare problems and methods List<String> problems = Lists.newArrayList("bool-parity5", "bool-mopm3", "sr-keijzer6", "sr-nguyen7", "sr-pagie1", "sr-vladislavleva4", "other-klandscapes3", "other-klandscapes7", "other-text"); List<String> mappers = new ArrayList<>(); for (int gs : new int[] { 64, 128, 256, 512, 1024 }) { mappers.add("ge-" + gs + "-2"); mappers.add("ge-" + gs + "-4"); mappers.add("ge-" + gs + "-8"); mappers.add("ge-" + gs + "-12"); mappers.add("pige-" + gs + "-4"); mappers.add("pige-" + gs + "-8"); mappers.add("pige-" + gs + "-16"); mappers.add("pige-" + gs + "-24"); mappers.add("hge-" + gs + "-0"); mappers.add("whge-" + gs + "-2"); mappers.add("whge-" + gs + "-3"); mappers.add("whge-" + gs + "-5"); }/*from w w w. java 2s . c o m*/ mappers.add("sge-0-5"); mappers.add("sge-0-6"); mappers.add("sge-0-7"); mappers.add("sge-0-8"); mappers.clear(); mappers.addAll(Lists.newArrayList("ge-1024-8", "pige-1024-16", "hge-1024-0", "whge-1024-3", "sge-0-6")); PrintStream filePrintStream = null; if (args.length > 0) { filePrintStream = new PrintStream(args[0]); } else { filePrintStream = System.out; } filePrintStream.printf("problem;mapper;genotypeSize;param;property;value%n"); //prepare distances Distance<Node<String>> phenotypeDistance = new CachedDistance<>(new LeavesEdit<String>()); Distance<Sequence> genotypeDistance = new CachedDistance<>(new Hamming()); //iterate for (String problemName : problems) { for (String mapperName : mappers) { System.out.printf("%20.20s, %20.20s", problemName, mapperName); //build problem Problem<String, NumericFitness> problem = null; if (problemName.equals("bool-parity5")) { problem = new Parity(5); } else if (problemName.equals("bool-mopm3")) { problem = new MultipleOutputParallelMultiplier(3); } else if (problemName.equals("sr-keijzer6")) { problem = new HarmonicCurve(); } else if (problemName.equals("sr-nguyen7")) { problem = new Nguyen7(1); } else if (problemName.equals("sr-pagie1")) { problem = new Pagie1(); } else if (problemName.equals("sr-vladislavleva4")) { problem = new Vladislavleva4(1); } else if (problemName.equals("other-klandscapes3")) { problem = new KLandscapes(3); } else if (problemName.equals("other-klandscapes7")) { problem = new KLandscapes(7); } else if (problemName.equals("other-text")) { problem = new Text(); } //build configuration and evolver Mapper mapper = null; int genotypeSize = Integer.parseInt(mapperName.split("-")[1]); int mapperMainParam = Integer.parseInt(mapperName.split("-")[2]); if (mapperName.split("-")[0].equals("ge")) { mapper = new StandardGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("pige")) { mapper = new PiGEMapper<>(mapperMainParam, 1, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("sge")) { mapper = new SGEMapper<>(mapperMainParam, problem.getGrammar()); } else if (mapperName.split("-")[0].equals("hge")) { mapper = new HierarchicalMapper<>(problem.getGrammar()); } else if (mapperName.split("-")[0].equals("whge")) { mapper = new WeightedHierarchicalMapper<>(mapperMainParam, false, true, problem.getGrammar()); } //prepare things Random random = new Random(1); Set<Sequence> genotypes = new LinkedHashSet<>(n); //build genotypes if (mapperName.split("-")[0].equals("sge")) { SGEGenotypeFactory<String> factory = new SGEGenotypeFactory<>((SGEMapper) mapper); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } genotypeSize = factory.getBitSize(); } else { BitsGenotypeFactory factory = new BitsGenotypeFactory(genotypeSize); while (genotypes.size() < n) { genotypes.add(factory.build(random)); } } //build and fill map Multimap<Node<String>, Sequence> multimap = HashMultimap.create(); int progress = 0; for (Sequence genotype : genotypes) { Node<String> phenotype; try { if (mapperName.split("-")[0].equals("sge")) { phenotype = mapper.map((SGEGenotype<String>) genotype, new HashMap<>()); } else { phenotype = mapper.map((BitsGenotype) genotype, new HashMap<>()); } } catch (MappingException e) { phenotype = Node.EMPTY_TREE; } multimap.put(phenotype, genotype); progress = progress + 1; if (progress % Math.round(n / 10) == 0) { System.out.print("."); } } System.out.println(); //compute distances List<Pair<Double, Double>> allDistances = new ArrayList<>(); List<Pair<Double, Double>> allValidDistances = new ArrayList<>(); Multimap<Node<String>, Double> genotypeDistances = ArrayListMultimap.create(); for (Node<String> phenotype : multimap.keySet()) { for (Sequence genotype1 : multimap.get(phenotype)) { for (Sequence genotype2 : multimap.get(phenotype)) { double gDistance = genotypeDistance.d(genotype1, genotype2); genotypeDistances.put(phenotype, gDistance); if (genotypeDistances.get(phenotype).size() > nDist) { break; } } if (genotypeDistances.get(phenotype).size() > nDist) { break; } } } List<Map.Entry<Node<String>, Sequence>> entries = new ArrayList<>(multimap.entries()); Collections.shuffle(entries, random); for (Map.Entry<Node<String>, Sequence> entry1 : entries) { for (Map.Entry<Node<String>, Sequence> entry2 : entries) { double gDistance = genotypeDistance.d(entry1.getValue(), entry2.getValue()); double pDistance = phenotypeDistance.d(entry1.getKey(), entry2.getKey()); allDistances.add(new Pair<>(gDistance, pDistance)); if (!Node.EMPTY_TREE.equals(entry1.getKey()) && !Node.EMPTY_TREE.equals(entry2.getKey())) { allValidDistances.add(new Pair<>(gDistance, pDistance)); } if (allDistances.size() > nDist) { break; } } if (allDistances.size() > nDist) { break; } } //compute properties double invalidity = (double) multimap.get(Node.EMPTY_TREE).size() / (double) genotypes.size(); double redundancy = 1 - (double) multimap.keySet().size() / (double) genotypes.size(); double validRedundancy = redundancy; if (multimap.keySet().contains(Node.EMPTY_TREE)) { validRedundancy = 1 - ((double) multimap.keySet().size() - 1d) / (double) (genotypes.size() - multimap.get(Node.EMPTY_TREE).size()); } double locality = Utils.pearsonCorrelation(allDistances); double validLocality = Utils.pearsonCorrelation(allValidDistances); double[] sizes = new double[multimap.keySet().size()]; double[] meanGenotypeDistances = new double[multimap.keySet().size()]; int invalidIndex = -1; int c = 0; for (Node<String> phenotype : multimap.keySet()) { if (Node.EMPTY_TREE.equals(phenotype)) { invalidIndex = c; } sizes[c] = multimap.get(phenotype).size(); double[] distances = new double[genotypeDistances.get(phenotype).size()]; int k = 0; for (Double distance : genotypeDistances.get(phenotype)) { distances[k] = distance; k = k + 1; } meanGenotypeDistances[c] = StatUtils.mean(distances); c = c + 1; } double nonUniformity = Math.sqrt(StatUtils.variance(sizes)) / StatUtils.mean(sizes); double nonSynonymousity = StatUtils.mean(meanGenotypeDistances) / StatUtils.mean(firsts(allDistances)); double validNonUniformity = nonUniformity; double validNonSynonymousity = nonSynonymousity; if (invalidIndex != -1) { double[] validSizes = new double[multimap.keySet().size() - 1]; double[] validMeanGenotypeDistances = new double[multimap.keySet().size() - 1]; if (invalidIndex > 0) { System.arraycopy(sizes, 0, validSizes, 0, invalidIndex); System.arraycopy(meanGenotypeDistances, 0, validMeanGenotypeDistances, 0, invalidIndex); } System.arraycopy(sizes, invalidIndex + 1, validSizes, invalidIndex, sizes.length - invalidIndex - 1); System.arraycopy(meanGenotypeDistances, invalidIndex + 1, validMeanGenotypeDistances, invalidIndex, meanGenotypeDistances.length - invalidIndex - 1); validNonUniformity = Math.sqrt(StatUtils.variance(validSizes)) / StatUtils.mean(validSizes); validNonSynonymousity = StatUtils.mean(validMeanGenotypeDistances) / StatUtils.mean(firsts(allValidDistances)); } //compute locality filePrintStream.printf("%s;%s;%d;%d;invalidity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, invalidity); filePrintStream.printf("%s;%s;%d;%d;redundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, redundancy); filePrintStream.printf("%s;%s;%d;%d;validRedundancy;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validRedundancy); filePrintStream.printf("%s;%s;%d;%d;locality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, locality); filePrintStream.printf("%s;%s;%d;%d;validLLocality;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validLocality); filePrintStream.printf("%s;%s;%d;%d;nonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonUniformity); filePrintStream.printf("%s;%s;%d;%d;validNonUniformity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonUniformity); filePrintStream.printf("%s;%s;%d;%d;nonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, nonSynonymousity); filePrintStream.printf("%s;%s;%d;%d;validNonSynonymousity;%f %n", problemName, mapperName.split("-")[0], genotypeSize, mapperMainParam, validNonSynonymousity); } } if (filePrintStream != null) { filePrintStream.close(); } }
From source file:com.inrista.loggliest.Loggly.java
private static void logToFile(List<JSONObject> msgBatch) { if (msgBatch.isEmpty()) return;//from ww w . j a va2s. c om try { // Size of logs on disk File dir = mContext.getDir(LOG_FOLDER, Context.MODE_PRIVATE); long totalSize = 0; File[] logFiles = dir.listFiles(); for (File logFile : logFiles) totalSize += logFile.length(); // Check if size of logs on disk exceeds the limit, drop // oldest messages in this case if (totalSize > mMaxSizeOnDisk) { int numFiles = logFiles.length; if (numFiles <= 1) return; oldestLogFile().delete(); } // Create a new log file if necessary if (mRecentLogFile == null || mRecentLogFile.length() > LOGGLY_MAX_POST_SIZE) mRecentLogFile = createLogFile(); PrintStream ps = new PrintStream(new FileOutputStream(mRecentLogFile, true)); for (JSONObject msg : msgBatch) { try { if (msg.has(UPDATE_STICKY_INFO_MSG)) { mStickyInfo.put(msg.getString("key"), msg.getString("msg")); continue; } // Reformat timestamp to ISO-8601 as expected by Loggly long timestamp = msg.getLong("timestamp"); msg.remove("timestamp"); msg.put("timestamp", mDateFormat.format(timestamp)); // Append default info if (mAppendDefaultInfo) { msg.put("appversionname", mAppVersionName); msg.put("appversioncode", Integer.toString(mAppVersionCode)); msg.put("devicemodel", android.os.Build.MODEL); msg.put("androidversioncode", Integer.toString(android.os.Build.VERSION.SDK_INT)); } // Append sticky info if (!mStickyInfo.isEmpty()) { for (String key : mStickyInfo.keySet()) msg.put(key, mStickyInfo.get(key)); } } catch (JSONException e) { } ps.println(msg.toString().replace("\n", "\\n")); } ps.close(); } catch (FileNotFoundException e) { } }
From source file:org.apache.pig.backend.hadoop.executionengine.MRExecutionEngine.java
@Override public void explain(LogicalPlan lp, PigContext pc, PrintStream ps, String format, boolean verbose, File file, String suffix) throws PlanException, VisitorException, IOException, FrontendException { PrintStream pps = ps; PrintStream eps = ps;// w w w . j a v a 2s .co m try { if (file != null) { pps = new PrintStream(new File(file, "physical_plan-" + suffix)); eps = new PrintStream(new File(file, "exec_plan-" + suffix)); } PhysicalPlan pp = compile(lp, pc.getProperties()); pp.explain(pps, format, verbose); MapRedUtil.checkLeafIsStore(pp, pigContext); launcher.explain(pp, pigContext, eps, format, verbose); } finally { launcher.reset(); //Only close the stream if we opened it. if (file != null) { pps.close(); eps.close(); } } }