Example usage for java.io PrintStream close

List of usage examples for java.io PrintStream close

Introduction

In this page you can find the example usage for java.io PrintStream close.

Prototype

public void close() 

Source Link

Document

Closes the stream.

Usage

From source file:br.pcfl.up.mail.store.WebDavStore.java

/**
 * Returns a string of the stacktrace for a Throwable to allow for easy inline printing of errors.
 *//*w w  w. j ava 2s.co m*/
private String processException(Throwable t) {
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    PrintStream ps = new PrintStream(baos);

    t.printStackTrace(ps);
    ps.close();

    return baos.toString();
}

From source file:org.apache.hadoop.hbase.rest.PerformanceEvaluation.java

private Path writeInputFile(final Configuration c) throws IOException {
    SimpleDateFormat formatter = new SimpleDateFormat("yyyyMMddHHmmss");
    Path jobdir = new Path(PERF_EVAL_DIR, formatter.format(new Date()));
    Path inputDir = new Path(jobdir, "inputs");

    FileSystem fs = FileSystem.get(c);
    fs.mkdirs(inputDir);/*w  ww  .  j a  v a 2  s.c  om*/
    Path inputFile = new Path(inputDir, "input.txt");
    PrintStream out = new PrintStream(fs.create(inputFile));
    // Make input random.
    Map<Integer, String> m = new TreeMap<Integer, String>();
    Hash h = MurmurHash.getInstance();
    int perClientRows = (this.R / this.N);
    try {
        for (int i = 0; i < 10; i++) {
            for (int j = 0; j < N; j++) {
                String s = "tableName=" + this.tableName + ", startRow="
                        + ((j * perClientRows) + (i * (perClientRows / 10))) + ", perClientRunRows="
                        + (perClientRows / 10) + ", totalRows=" + this.R + ", clients=" + this.N
                        + ", flushCommits=" + this.flushCommits + ", writeToWAL=" + this.writeToWAL
                        + ", useTags=" + this.useTags + ", noOfTags=" + this.noOfTags;
                int hash = h.hash(Bytes.toBytes(s));
                m.put(hash, s);
            }
        }
        for (Map.Entry<Integer, String> e : m.entrySet()) {
            out.println(e.getValue());
        }
    } finally {
        out.close();
    }
    return inputDir;
}

From source file:kieker.tools.traceAnalysis.TraceAnalysisTool.java

private boolean writeTraceEquivalenceReport(final String outputFnPrefixL,
        final TraceEquivalenceClassFilter traceEquivFilter) throws IOException {
    boolean retVal = true;
    final String outputFn = new File(outputFnPrefixL).getCanonicalPath();
    PrintStream ps = null;
    try {// w  w w. java 2 s. c  om
        ps = new PrintStream(new FileOutputStream(outputFn), false, ENCODING);
        int numClasses = 0;
        final Map<ExecutionTrace, Integer> classMap = traceEquivFilter.getEquivalenceClassMap(); // NOPMD (UseConcurrentHashMap)
        for (final Entry<ExecutionTrace, Integer> e : classMap.entrySet()) {
            final ExecutionTrace t = e.getKey();
            ps.println("Class " + numClasses++ + " ; cardinality: " + e.getValue() + "; # executions: "
                    + t.getLength() + "; representative: " + t.getTraceId() + "; max. stack depth: "
                    + t.getMaxEss());
        }
        if (LOG.isDebugEnabled()) {
            LOG.debug("");
            LOG.debug("#");
            LOG.debug("# Plugin: " + "Trace equivalence report");
            LOG.debug("Wrote " + numClasses + " equivalence class" + (numClasses > 1 ? "es" : "") + " to file '"
                    + outputFn + "'"); // NOCS
        }
    } catch (final FileNotFoundException e) {
        LOG.error("File not found", e);
        retVal = false;
    } finally {
        if (ps != null) {
            ps.close();
        }
    }

    return retVal;
}

From source file:com.tamingtext.tagrecommender.TestStackOverflowTagger.java

public void execute() {
    PrintStream out = null;

    try {//  w w w. j  av a  2  s .c o  m
        OpenObjectIntHashMap<String> tagCounts = new OpenObjectIntHashMap<String>();
        OpenObjectIntHashMap<String> tagCorrect = new OpenObjectIntHashMap<String>();
        loadTags(tagCounts);

        StackOverflowStream stream = new StackOverflowStream();
        stream.open(inputFile.getAbsolutePath());

        out = new PrintStream(new FileOutputStream(outputFile));

        int correctTagCount = 0;
        int postCount = 0;

        HashSet<String> postTags = new HashSet<String>();
        float postPctCorrect;

        int totalSingleCorrect = 0;
        int totalHalfCorrect = 0;

        for (StackOverflowPost post : stream) {
            correctTagCount = 0;
            postCount++;

            postTags.clear();
            postTags.addAll(post.getTags());
            for (String tag : post.getTags()) {
                if (tagCounts.containsKey(tag)) {
                    tagCounts.adjustOrPutValue(tag, 1, 1);
                }
            }

            ScoreTag[] tags = client.getTags(post.getTitle() + "\n" + post.getBody(), maxTags);

            for (ScoreTag tag : tags) {
                if (postTags.contains(tag.getTag())) {
                    correctTagCount += 1;
                    tagCorrect.adjustOrPutValue(tag.getTag(), 1, 1);
                }
            }

            if (correctTagCount > 0) {
                totalSingleCorrect += 1;
            }

            postPctCorrect = correctTagCount / (float) postTags.size();
            if (postPctCorrect >= 0.50f) {
                totalHalfCorrect += 1;
            }

            if ((postCount % 100) == 0) {
                dumpStats(System.err, postCount, totalSingleCorrect, totalHalfCorrect);
            }

        }

        dumpStats(System.err, postCount, totalSingleCorrect, totalHalfCorrect);
        dumpStats(out, postCount, totalSingleCorrect, totalHalfCorrect);
        dumpTags(out, tagCounts, tagCorrect);
    } catch (Exception ex) {
        throw (RuntimeException) new RuntimeException().initCause(ex);
    } finally {
        if (out != null) {
            out.close();
        }
    }
}

From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.sensitivity.ExtractData.java

@Override
public void run(CommandLine commandLine) throws Exception {
    String separator = commandLine.hasOption("separator") ? commandLine.getOptionValue("separator") : " ";

    String[] fields = commandLine.getArgs();

    // indicators are prepared, run the data extraction routine
    ResultFileReader input = null;/* w  ww  . jav a2s . co m*/
    PrintStream output = null;

    try {
        // setup the problem
        if (commandLine.hasOption("problem")) {
            problem = ProblemFactory.getInstance().getProblem(commandLine.getOptionValue("problem"));
        } else {
            problem = new ProblemStub(Integer.parseInt(commandLine.getOptionValue("dimension")));
        }

        try {
            input = new ResultFileReader(problem, new File(commandLine.getOptionValue("input")));

            try {
                output = commandLine.hasOption("output")
                        ? new PrintStream(new File(commandLine.getOptionValue("output")))
                        : System.out;

                // optionally print header line
                if (!commandLine.hasOption("noheader")) {
                    output.print('#');

                    for (int i = 0; i < fields.length; i++) {
                        if (i > 0) {
                            output.print(separator);
                        }

                        output.print(fields[i]);
                    }

                    output.println();
                }

                // process entries
                while (input.hasNext()) {
                    ResultEntry entry = input.next();
                    Properties properties = entry.getProperties();

                    for (int i = 0; i < fields.length; i++) {
                        if (i > 0) {
                            output.print(separator);
                        }

                        if (properties.containsKey(fields[i])) {
                            output.print(properties.getProperty(fields[i]));
                        } else if (fields[i].startsWith("+")) {
                            output.print(evaluate(fields[i].substring(1), entry, commandLine));
                        } else {
                            throw new FrameworkException("missing field");
                        }
                    }

                    output.println();
                }
            } finally {
                if ((output != null) && (output != System.out)) {
                    output.close();
                }
            }
        } finally {
            if (input != null) {
                input.close();
            }
        }
    } finally {
        if (problem != null) {
            problem.close();
        }
    }
}

From source file:com.surevine.alfresco.webscript.gsa.canuserseeitems.CanUserSeeItemsCommandWebscriptImpl.java

@Override
public void execute(WebScriptRequest request, WebScriptResponse response) throws IOException {
    if (_logger.isDebugEnabled()) {
        _logger.debug("Beginning canUserSeeItems Webscript");
    }/*  w  w w .j a v a 2 s .  co  m*/
    PrintStream ps = null;
    try {

        ps = new PrintStream(response.getOutputStream());

        InputStream payloadInputStream = request.getContent().getInputStream();
        StringWriter writer = new StringWriter();
        IOUtils.copy(payloadInputStream, writer, "UTF-8");
        String requestXMLString = writer.toString();
        requestXMLString = URLDecoder.decode(requestXMLString, "UTF-8");
        if (_logger.isDebugEnabled()) {
            _logger.debug("Request: " + requestXMLString);
        }

        //String requestXMLString = request.getParameter("");
        if (null == requestXMLString) {
            throw new GSAInvalidParameterException("Request payload XML is empty", null, 500177);
        }

        Map<Integer, Object> parsedRequestDataMap = this.requestToNodeRefCollection(requestXMLString);

        Collection<NodeRef> nodeRefs = (Collection<NodeRef>) parsedRequestDataMap.get(NODE_REFS_KEY);
        String runAsUser = (String) parsedRequestDataMap.get(RUN_AS_USER_KEY);

        String XMLResponseString = this.getXMLResponse(nodeRefs, runAsUser);
        if (_logger.isDebugEnabled()) {
            _logger.debug("Response: " + XMLResponseString);
        }
        ps.println(XMLResponseString); //Write the response to the target OutputStream

    }
    //If we catch an exception, return an appropriate HTTP response code and a summary of the exception, then dump the full
    //details of the exception to the logs.  Non-GSA Exceptions are wrapped, non-Exception Throwables are left to percolate upward
    catch (GSAInvalidParameterException e) {
        response.setStatus(400); //Something wrong with the parameters so we use Bad Request
        ps.println(e);
        _logger.error(e.getMessage(), e);
    } catch (GSAProcessingException exx) {
        response.setStatus(500); //Internal Server Error
        ps.println(exx);
        _logger.error(exx.getMessage(), exx);
    } catch (Exception ex) {
        response.setStatus(500);
        ps.println(new GSAProcessingException("Exception occured processing the commanmd: " + ex, ex, 1000));
        _logger.error(ex.getMessage(), ex);
    } finally {
        if (ps != null) {
            ps.close();
        }
    }

}

From source file:edu.cornell.med.icb.goby.modes.TallyBasesMode.java

/**
 * Run the tally bases mode.//w w  w  . j  a  va2 s . c  o m
 *
 * @throws java.io.IOException error reading / writing
 */
@Override
public void execute() throws IOException {
    if (basenames.length != 2) {
        System.err.println("Exactly two basenames are supported at this time.");
        System.exit(1);
    }
    final CountsArchiveReader[] archives = new CountsArchiveReader[basenames.length];
    int i = 0;
    for (final String basename : basenames) {
        archives[i++] = new CountsArchiveReader(basename, alternativeCountArhive);
    }

    final CountsArchiveReader archiveA = archives[0];
    final CountsArchiveReader archiveB = archives[1];
    // keep only common reference sequences between the two input count archives.
    final ObjectSet<String> identifiers = new ObjectOpenHashSet<String>();
    identifiers.addAll(archiveA.getIdentifiers());
    identifiers.retainAll(archiveB.getIdentifiers());
    // find the optimal offset A vs B:
    final int offset = offsetString.equals("auto") ? optimizeOffset(archiveA, archiveB, identifiers)
            : Integer.parseInt(offsetString);
    System.out.println("offset: " + offset);

    final RandomAccessSequenceCache cache = new RandomAccessSequenceCache();
    if (cache.canLoad(genomeCacheFilename)) {
        try {
            cache.load(genomeCacheFilename);
        } catch (ClassNotFoundException e) {
            System.err.println("Cannot load cache from disk. Consider deleting the cache and rebuilding.");
            e.printStackTrace();
            System.exit(1);
        }
    } else {
        Reader reader = null;
        try {
            if (genomeFilename.endsWith(".fa") || genomeFilename.endsWith(".fasta")) {
                reader = new FileReader(genomeFilename);
                cache.loadFasta(reader);
            } else if (genomeFilename.endsWith(".fa.gz") || genomeFilename.endsWith(".fasta.gz")) {
                reader = new InputStreamReader(new GZIPInputStream(new FileInputStream(genomeFilename)));
                cache.loadFasta(reader);
            } else {
                System.err.println("The format of the input file is not supported at this time.");
                System.exit(1);
            }
        } finally {
            IOUtils.closeQuietly(reader);
        }
    }

    System.out.println("Will use genome cache basename: " + genomeCacheFilename);
    cache.save(genomeCacheFilename);
    final Random random = new Random(new Date().getTime());

    final double delta = cutoff;
    final int countThreshold = 30;
    final PrintStream output = new PrintStream(outputFilename);
    writeHeader(output, windowSize);
    for (final String referenceSequenceId : identifiers) {
        if (isReferenceIncluded(referenceSequenceId)) {

            final int referenceIndex = cache.getReferenceIndex(referenceSequenceId);
            if (referenceIndex != -1) {
                // sequence in cache.
                System.out.println("Processing sequence " + referenceSequenceId);
                final double sumA = getSumOfCounts(archiveA.getCountReader(referenceSequenceId));
                final double sumB = getSumOfCounts(archiveB.getCountReader(referenceSequenceId));
                final int referenceSize = cache.getSequenceSize(referenceIndex);
                // process this sequence:
                final AnyTransitionCountsIterator iterator = new AnyTransitionCountsIterator(
                        archiveA.getCountReader(referenceSequenceId),
                        new OffsetCountsReader(archiveB.getCountReader(referenceSequenceId), offset));

                while (iterator.hasNextTransition()) {
                    iterator.nextTransition();
                    final int position = iterator.getPosition();
                    final int countA = iterator.getCount(0);
                    final int countB = iterator.getCount(1);

                    if (countA + countB >= countThreshold) {
                        final double foldChange = Math.log1p(countA) - Math.log1p(countB) - Math.log(sumA)
                                + Math.log(sumB);
                        if (foldChange >= delta || foldChange <= -delta) {
                            if (random.nextDouble() < sampleRate) {
                                tallyPosition(cache, referenceIndex, position, foldChange, windowSize,
                                        referenceSize, referenceSequenceId, output, countA, countB, sumA, sumB);
                            }
                        }
                    }
                }
                iterator.close();
            }
        }
        output.flush();
    }
    output.close();
}

From source file:org.apache.giraph.master.BspServiceMaster.java

/**
 * Write superstep metrics to own file in HDFS
 * @param superstep the current superstep
 * @param aggregatedMetrics the aggregated metrics to write
 *//*from w w w  .ja va  2 s.c o  m*/
private void printAggregatedMetricsToHDFS(long superstep, AggregatedMetrics aggregatedMetrics) {
    ImmutableClassesGiraphConfiguration conf = getConfiguration();
    PrintStream out = null;
    Path dir = new Path(GiraphConstants.METRICS_DIRECTORY.get(conf));
    Path outFile = new Path(GiraphConstants.METRICS_DIRECTORY.get(conf) + Path.SEPARATOR_CHAR + "superstep_"
            + superstep + ".metrics");
    try {
        FileSystem fs;
        fs = FileSystem.get(conf);
        if (!fs.exists(dir)) {
            fs.mkdirs(dir);
        }
        if (fs.exists(outFile)) {
            throw new RuntimeException("printAggregatedMetricsToHDFS: metrics file exists");
        }
        out = new PrintStream(fs.create(outFile), false, Charset.defaultCharset().name());
        aggregatedMetrics.print(superstep, out);
    } catch (IOException e) {
        throw new RuntimeException("printAggregatedMetricsToHDFS: error creating metrics file", e);
    } finally {
        if (out != null) {
            out.close();
        }
    }
}

From source file:iDynoOptimizer.MOEAFramework26.src.org.moeaframework.analysis.sensitivity.Negater.java

@Override
public void run(CommandLine commandLine) throws Exception {
    TypedProperties properties = TypedProperties.withProperty("direction",
            commandLine.getOptionValue("direction"));
    int[] directions = properties.getIntArray("direction", null);

    outer: for (String filename : commandLine.getArgs()) {
        List<String> lines = new ArrayList<String>();
        String entry = null;//  ww w . j av  a  2 s  . c o m
        BufferedReader reader = null;
        PrintStream writer = null;

        // read the entire file
        try {
            reader = new BufferedReader(new FileReader(filename));

            while ((entry = reader.readLine()) != null) {
                lines.add(entry);
            }
        } finally {
            if (reader != null) {
                reader.close();
            }
        }

        // validate the file to detect any errors prior to overwriting
        for (String line : lines) {
            try {
                if (!line.startsWith("#") && !line.startsWith("//")) {
                    String[] tokens = line.split("\\s+");

                    if (tokens.length != directions.length) {
                        System.err.println("unable to negate values in " + filename
                                + ", incorrect number of values in a row");
                        continue outer;
                    }

                    for (int j = 0; j < tokens.length; j++) {
                        if (directions[j] != 0) {
                            Double.parseDouble(tokens[j]);
                        }
                    }
                }
            } catch (NumberFormatException e) {
                System.err.println("unable to negate values in " + filename + ", unable to parse number");
                continue outer;
            }
        }

        // overwrite the file
        try {
            writer = new PrintStream(new File(filename));

            for (String line : lines) {
                if (line.startsWith("#") || line.startsWith("//")) {
                    writer.println(line);
                } else {
                    String[] tokens = line.split("\\s+");

                    for (int j = 0; j < tokens.length; j++) {
                        if (j > 0) {
                            writer.print(' ');
                        }

                        if (directions[j] == 0) {
                            writer.print(tokens[j]);
                        } else {
                            double value = Double.parseDouble(tokens[j]);
                            writer.print(-value);
                        }
                    }

                    writer.println();
                }
            }
        } finally {
            if (writer != null) {
                writer.close();
            }
        }
    }
}

From source file:edu.umass.cs.mallet.util.bibsonomy.IEInterface.java

public void viterbiCRF(File inputFile, boolean sgml, String seperator) {

    assert (pipe != null);
    InstanceList instancelist = new InstanceList(pipe);

    Reader reader;/*w w w .  ja v  a  2  s. c  o m*/
    try {
        reader = new FileReader(inputFile);
    } catch (Exception e) {
        throw new IllegalArgumentException("Can't read file " + inputFile);
    }

    instancelist.add(new LineGroupIterator(reader, Pattern.compile(seperator), true));

    String outputFileStr = inputFile.toString() + "_tagged";

    System.out.println(inputFile.toString() + " ---> " + outputFileStr);

    PrintStream taggedOut = null;
    try {
        FileOutputStream fos = new FileOutputStream(outputFileStr);
        taggedOut = new PrintStream(fos);
    } catch (IOException e) {
        logger.warn("Couldn't open output file '" + outputFileStr + "'");
    }

    if (taggedOut == null) {
        taggedOut = System.out;
    }

    String viterbiStr = "";
    //      taggedOut.println("testing instance number: " + instancelist.size() );
    for (int i = 0; i < instancelist.size(); i++) {
        //            taggedOut.println("\ntesting instance " + i);
        Instance instance = instancelist.getInstance(i);
        String crfStr = viterbiCRFInstance(instance, sgml);

        taggedOut.println(seperator);
        taggedOut.println(
                " instance accuracy= " + instance_error_num + "/" + instance_size + "=" + instance_accuracy);
        taggedOut.println(crfStr);
        viterbiStr += crfStr;

        //N-best tagging
        int N = 10;
        crfStr = viterbiCRFInstance_NBest(instance, sgml, N);
        taggedOut.println("N-best result:");
        taggedOut.println(seperator);
        taggedOut.println(crfStr);

        viterbiStr += crfStr;
    }

    if (taggedOut != System.out) {
        taggedOut.close();
    }

}