Example usage for java.io PrintStream close

List of usage examples for java.io PrintStream close

Introduction

In this page you can find the example usage for java.io PrintStream close.

Prototype

public void close() 

Source Link

Document

Closes the stream.

Usage

From source file:org.apache.pig.test.TestLocal.java

License:asdf

@Test
public void testBigGroupAllWithNull() throws Throwable {

    int LOOP_COUNT = 4 * 1024;
    File tmpFile = File.createTempFile(this.getClass().getName(), ".txt");
    PrintStream ps = new PrintStream(new FileOutputStream(tmpFile));
    long nonNullCnt = 0;
    for (int i = 0; i < LOOP_COUNT; i++) {
        if (i % 10 == 0) {
            ps.println("");
        } else {/*from  www  .  jav a  2 s  .  co m*/
            ps.println(i);
            nonNullCnt++;
        }
    }
    ps.close();

    assertEquals(new Double(nonNullCnt), bigGroupAll(tmpFile));

    tmpFile.delete();

}

From source file:key.access.manager.HttpHandler.java

public String connect(String url, ArrayList data) throws IOException {
    try {/*from  w  w  w  .j a  va 2  s . c  o m*/
        // open a connection to the site
        URL connectionUrl = new URL(url);
        URLConnection con = connectionUrl.openConnection();
        // activate the output
        con.setDoOutput(true);
        PrintStream ps = new PrintStream(con.getOutputStream());
        // send your parameters to your site
        for (int i = 0; i < data.size(); i++) {
            ps.print(data.get(i));
            //System.out.println(data.get(i));
            if (i != data.size() - 1) {
                ps.print("&");
            }
        }

        // we have to get the input stream in order to actually send the request
        InputStream inStream = con.getInputStream();
        Scanner s = new Scanner(inStream).useDelimiter("\\A");
        String result = s.hasNext() ? s.next() : "";
        System.out.println(result);

        // close the print stream
        ps.close();
        return result;
    } catch (MalformedURLException e) {
        e.printStackTrace();
        return "error";
    } catch (IOException e) {
        e.printStackTrace();
        return "error";
    }
}

From source file:com.protheos.graphstream.JSONSender.java

/**
 * Send JSONObject message to Gephi server
 * //ww  w  . ja v  a2 s  . c om
 * @param obj
 *            , the JSON message content
 * @param operation
 *            , the operation sending to the server, like "updateGraph",
 *            "getGraph"
 */
private void doSend(JSONObject obj, String operation) {

    try {
        URL url = new URL("http", host, port, "/" + workspace + "?operation=" + operation + "&format=JSON");

        URLConnection connection = url.openConnection();

        connection.setDoOutput(true);
        connection.connect();

        OutputStream outputStream = null;
        PrintStream out = null;
        try {
            outputStream = connection.getOutputStream();
            out = new PrintStream(outputStream, true);

            out.print(obj.toString() + EOL);
            out.flush();
            out.close();

            // send event message to the server and read the result from the
            // server
            InputStream inputStream = connection.getInputStream();
            BufferedReader bf = new BufferedReader(new InputStreamReader(inputStream));
            String line;
            while ((line = bf.readLine()) != null) {
                // if (debug) debug(line);
            }
            inputStream.close();
        } catch (UnknownServiceException e) {
            // protocol doesn't support output
            e.printStackTrace();
            return;
        }
    } catch (IOException ex) {
        ex.printStackTrace();
    }
}

From source file:edu.umn.cs.spatialHadoop.visualization.MultilevelPlot.java

public static Job plot(Path[] inPaths, Path outPath, Class<? extends Plotter> plotterClass,
        OperationsParams params) throws IOException, InterruptedException, ClassNotFoundException {
    if (params.getBoolean("showmem", false)) {
        // Run a thread that keeps track of used memory
        Thread memThread = new Thread(new Thread() {
            @Override//w  ww  .  ja  v a2 s. c  o  m
            public void run() {
                Runtime runtime = Runtime.getRuntime();
                while (true) {
                    try {
                        Thread.sleep(60000);
                    } catch (InterruptedException e) {
                        e.printStackTrace();
                    }
                    runtime.gc();
                    LOG.info("Memory usage: "
                            + ((runtime.totalMemory() - runtime.freeMemory()) / (1024 * 1024 * 1024)) + "GB.");
                }
            }
        });
        memThread.setDaemon(true);
        memThread.start();
    }

    // Decide how to run it based on range of levels to generate
    String[] strLevels = params.get("levels", "7").split("\\.\\.");
    int minLevel, maxLevel;
    if (strLevels.length == 1) {
        minLevel = 0;
        maxLevel = Integer.parseInt(strLevels[0]) - 1;
    } else {
        minLevel = Integer.parseInt(strLevels[0]);
        maxLevel = Integer.parseInt(strLevels[1]);
    }
    // Create an output directory that will hold the output of the two jobs
    FileSystem outFS = outPath.getFileSystem(params);
    outFS.mkdirs(outPath);

    Job runningJob = null;
    if (OperationsParams.isLocal(params, inPaths)) {
        // Plot local
        plotLocal(inPaths, outPath, plotterClass, params);
    } else {
        int maxLevelWithFlatPartitioning = params.getInt(FlatPartitioningLevelThreshold, 4);
        if (minLevel <= maxLevelWithFlatPartitioning) {
            OperationsParams flatPartitioning = new OperationsParams(params);
            flatPartitioning.set("levels", minLevel + ".." + Math.min(maxLevelWithFlatPartitioning, maxLevel));
            flatPartitioning.set("partition", "flat");
            LOG.info("Using flat partitioning in levels " + flatPartitioning.get("levels"));
            runningJob = plotMapReduce(inPaths, new Path(outPath, "flat"), plotterClass, flatPartitioning);
        }
        if (maxLevel > maxLevelWithFlatPartitioning) {
            OperationsParams pyramidPartitioning = new OperationsParams(params);
            pyramidPartitioning.set("levels",
                    Math.max(minLevel, maxLevelWithFlatPartitioning + 1) + ".." + maxLevel);
            pyramidPartitioning.set("partition", "pyramid");
            LOG.info("Using pyramid partitioning in levels " + pyramidPartitioning.get("levels"));
            runningJob = plotMapReduce(inPaths, new Path(outPath, "pyramid"), plotterClass,
                    pyramidPartitioning);
        }
        // Write a new HTML file that displays both parts of the pyramid
        // Add an HTML file that visualizes the result using Google Maps
        LineReader templateFileReader = new LineReader(
                MultilevelPlot.class.getResourceAsStream("/zoom_view.html"));
        PrintStream htmlOut = new PrintStream(outFS.create(new Path(outPath, "index.html")));
        Text line = new Text();
        while (templateFileReader.readLine(line) > 0) {
            String lineStr = line.toString();
            lineStr = lineStr.replace("#{TILE_WIDTH}", Integer.toString(params.getInt("tilewidth", 256)));
            lineStr = lineStr.replace("#{TILE_HEIGHT}", Integer.toString(params.getInt("tileheight", 256)));
            lineStr = lineStr.replace("#{MAX_ZOOM}", Integer.toString(maxLevel));
            lineStr = lineStr.replace("#{MIN_ZOOM}", Integer.toString(minLevel));
            lineStr = lineStr.replace("#{TILE_URL}", "(zoom <= " + maxLevelWithFlatPartitioning
                    + "? 'flat' : 'pyramid')+('/tile-' + zoom + '-' + coord.x + '-' + coord.y + '.png')");

            htmlOut.println(lineStr);
        }
        templateFileReader.close();
        htmlOut.close();
    }

    return runningJob;
}

From source file:com.adaptris.core.AdaptrisMessageCase.java

@Test
public void testOutputStream() throws Exception {
    AdaptrisMessage msg1 = createMessage();

    PrintStream out = new PrintStream(msg1.getOutputStream());
    out.print(PAYLOAD2);/*from   ww  w  .  ja v a2 s .c  om*/
    // w/o closing the output stream, it's not going to be equal
    assertNotSame(PAYLOAD2, msg1.getContent());
    out.close();
    assertEquals(PAYLOAD2, msg1.getContent());
}

From source file:com.google.flightmap.parsing.faa.nfd.NfdParser.java

private void parseAndDumpIataToIcao() throws IOException {
    final BufferedReader in = new BufferedReader(new FileReader(nfd));
    final PrintStream out = new PrintStream(iataToIcao);

    Matcher airportArincMatcher;// ww  w  . j  a  v a  2  s. c  o  m

    // Airport data variables
    String icao, iata;
    String line;
    try {
        while ((line = in.readLine()) != null) {
            airportArincMatcher = airportArincPattern.matcher(line);
            if (!airportArincMatcher.matches()) {
                // Not an airport entry
                continue;
            }

            icao = airportArincMatcher.group(1).trim();
            iata = airportArincMatcher.group(2).trim();
            if (!(iata.isEmpty() || icao.isEmpty()) && !iata.equals(icao))
                out.println(iata + " " + icao);
        }
    } finally {
        out.close();
        in.close();
    }
}

From source file:fr.in2p3.maven.plugin.DependencyXmlMojo.java

public void execute() throws MojoExecutionException, MojoFailureException {
    // create output directory
    if (!outputDirectory.exists()) {
        outputDirectory.mkdir();/*  w  w  w  .  j  ava 2 s .c  o m*/
    }

    // build dependencies tree
    DependencyNode root;
    // Filter does NOT work
    //        AndArtifactFilter scopeFilter = new AndArtifactFilter();
    //        scopeFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_COMPILE));
    //        scopeFilter.add(new ScopeArtifactFilter(Artifact.SCOPE_RUNTIME));
    try {
        root = dependencyTreeBuilder.buildDependencyTree(project, localRepository, factory,
                artifactMetadataSource, null, collector);
        CollectingDependencyNodeVisitor visitor = new CollectingDependencyNodeVisitor();

        root.accept(visitor);
        List<DependencyNode> nodes = visitor.getNodes();
        includedArtifacts = new HashMap<String, String>(nodes.size());
        for (DependencyNode dependencyNode : nodes) {
            int state = dependencyNode.getState();
            Artifact artifact = dependencyNode.getArtifact();
            if (state == DependencyNode.INCLUDED) {
                includedArtifacts.put(artifact.getArtifactId(), artifact.getVersion());
            }
        }
    } catch (DependencyTreeBuilderException e) {
        throw new MojoExecutionException("Unable to build dependency tree", e);
    }

    // dump
    PrintStream out;
    try {
        out = new PrintStream(new FileOutputStream(outputFile));
    } catch (FileNotFoundException e) {
        throw new MojoExecutionException("Failed to create output file", e);
    }
    out.println("<project version=\"" + project.getVersion() + "\">");
    for (Iterator it = root.getChildren().iterator(); it.hasNext();) {
        DependencyNode child = (DependencyNode) it.next();
        dump(child, out);
    }
    out.println("</project>");
    out.close();
}

From source file:com.yahoo.ycsb.bulk.hbase.BulkDataGeneratorJob.java

/** Create the input file used for launching the maps */
void createInputFile(Job job, String workdir) throws IOException {
    Configuration conf = job.getConfiguration();
    FileSystem fs = FileSystem.get(conf);
    Path inpath = new Path(workdir + "/inputkeyranges.txt");
    PrintStream out = new PrintStream(new BufferedOutputStream(fs.create(inpath)));
    long start = conf.getLong(ARG_KEY_RANGE_START, 0);
    long end = conf.getLong(ARG_KEY_RANGE_END, 0);
    int parts = conf.getInt(ARG_KEY_RANGE_PARTITIONS, 1);

    writeRanges(start, end, parts, out);
    out.close();

    TextInputFormat.setInputPaths(job, inpath);
    // NLineInputFormat.setInputPaths(job, inpath);

    /* compute the max input split size */
    //        long max_split = fs.getFileStatus( inpath ).getLen() / parts;
    //        TextInputFormat.setMaxInputSplitSize(job, max_split);

    // JobConf jc = new JobConf(conf);
    // jc.setNumMapTasks(parts);
}

From source file:edu.umn.cs.spatialHadoop.visualization.MultilevelPlot.java

private static void plotLocal(Path[] inFiles, final Path outPath, final Class<? extends Plotter> plotterClass,
        final OperationsParams params) throws IOException, InterruptedException, ClassNotFoundException {
    final boolean vflip = params.getBoolean("vflip", true);

    OperationsParams mbrParams = new OperationsParams(params);
    mbrParams.setBoolean("background", false);
    final Rectangle inputMBR = params.get("mbr") != null ? params.getShape("mbr").getMBR()
            : FileMBR.fileMBR(inFiles, mbrParams);
    OperationsParams.setShape(params, InputMBR, inputMBR);

    // Retrieve desired output image size and keep aspect ratio if needed
    int tileWidth = params.getInt("tilewidth", 256);
    int tileHeight = params.getInt("tileheight", 256);
    // Adjust width and height if aspect ratio is to be kept
    if (params.getBoolean("keepratio", true)) {
        // Expand input file to a rectangle for compatibility with the pyramid
        // structure
        if (inputMBR.getWidth() > inputMBR.getHeight()) {
            inputMBR.y1 -= (inputMBR.getWidth() - inputMBR.getHeight()) / 2;
            inputMBR.y2 = inputMBR.y1 + inputMBR.getWidth();
        } else {/*from  w  w  w. j  a v a  2s. c  om*/
            inputMBR.x1 -= (inputMBR.getHeight() - inputMBR.getWidth()) / 2;
            inputMBR.x2 = inputMBR.x1 + inputMBR.getHeight();
        }
    }

    String outFName = outPath.getName();
    int extensionStart = outFName.lastIndexOf('.');
    final String extension = extensionStart == -1 ? ".png" : outFName.substring(extensionStart);

    // Start reading input file
    Vector<InputSplit> splits = new Vector<InputSplit>();
    final SpatialInputFormat3<Rectangle, Shape> inputFormat = new SpatialInputFormat3<Rectangle, Shape>();
    for (Path inFile : inFiles) {
        FileSystem inFs = inFile.getFileSystem(params);
        if (!OperationsParams.isWildcard(inFile) && inFs.exists(inFile) && !inFs.isDirectory(inFile)) {
            if (SpatialSite.NonHiddenFileFilter.accept(inFile)) {
                // Use the normal input format splitter to add this non-hidden file
                Job job = Job.getInstance(params);
                SpatialInputFormat3.addInputPath(job, inFile);
                splits.addAll(inputFormat.getSplits(job));
            } else {
                // A hidden file, add it immediately as one split
                // This is useful if the input is a hidden file which is automatically
                // skipped by FileInputFormat. We need to plot a hidden file for the case
                // of plotting partition boundaries of a spatial index
                splits.add(new FileSplit(inFile, 0, inFs.getFileStatus(inFile).getLen(), new String[0]));
            }
        } else {
            Job job = Job.getInstance(params);
            SpatialInputFormat3.addInputPath(job, inFile);
            splits.addAll(inputFormat.getSplits(job));
        }
    }

    try {
        Plotter plotter = plotterClass.newInstance();
        plotter.configure(params);

        String[] strLevels = params.get("levels", "7").split("\\.\\.");
        int minLevel, maxLevel;
        if (strLevels.length == 1) {
            minLevel = 0;
            maxLevel = Integer.parseInt(strLevels[0]);
        } else {
            minLevel = Integer.parseInt(strLevels[0]);
            maxLevel = Integer.parseInt(strLevels[1]);
        }

        GridInfo bottomGrid = new GridInfo(inputMBR.x1, inputMBR.y1, inputMBR.x2, inputMBR.y2);
        bottomGrid.rows = bottomGrid.columns = 1 << maxLevel;

        TileIndex key = new TileIndex();

        // All canvases in the pyramid, one per tile
        Map<TileIndex, Canvas> canvases = new HashMap<TileIndex, Canvas>();
        for (InputSplit split : splits) {
            FileSplit fsplit = (FileSplit) split;
            RecordReader<Rectangle, Iterable<Shape>> reader = inputFormat.createRecordReader(fsplit, null);
            if (reader instanceof SpatialRecordReader3) {
                ((SpatialRecordReader3) reader).initialize(fsplit, params);
            } else if (reader instanceof RTreeRecordReader3) {
                ((RTreeRecordReader3) reader).initialize(fsplit, params);
            } else if (reader instanceof HDFRecordReader) {
                ((HDFRecordReader) reader).initialize(fsplit, params);
            } else {
                throw new RuntimeException("Unknown record reader");
            }

            while (reader.nextKeyValue()) {
                Rectangle partition = reader.getCurrentKey();
                if (!partition.isValid())
                    partition.set(inputMBR);

                Iterable<Shape> shapes = reader.getCurrentValue();

                for (Shape shape : shapes) {
                    Rectangle shapeMBR = shape.getMBR();
                    if (shapeMBR == null)
                        continue;
                    java.awt.Rectangle overlappingCells = bottomGrid.getOverlappingCells(shapeMBR);
                    // Iterate over levels from bottom up
                    for (key.level = maxLevel; key.level >= minLevel; key.level--) {
                        for (key.x = overlappingCells.x; key.x < overlappingCells.x
                                + overlappingCells.width; key.x++) {
                            for (key.y = overlappingCells.y; key.y < overlappingCells.y
                                    + overlappingCells.height; key.y++) {
                                Canvas canvas = canvases.get(key);
                                if (canvas == null) {
                                    Rectangle tileMBR = new Rectangle();
                                    int gridSize = 1 << key.level;
                                    tileMBR.x1 = (inputMBR.x1 * (gridSize - key.x) + inputMBR.x2 * key.x)
                                            / gridSize;
                                    tileMBR.x2 = (inputMBR.x1 * (gridSize - (key.x + 1))
                                            + inputMBR.x2 * (key.x + 1)) / gridSize;
                                    tileMBR.y1 = (inputMBR.y1 * (gridSize - key.y) + inputMBR.y2 * key.y)
                                            / gridSize;
                                    tileMBR.y2 = (inputMBR.y1 * (gridSize - (key.y + 1))
                                            + inputMBR.y2 * (key.y + 1)) / gridSize;
                                    canvas = plotter.createCanvas(tileWidth, tileHeight, tileMBR);
                                    canvases.put(key.clone(), canvas);
                                }
                                plotter.plot(canvas, shape);
                            }
                        }
                        // Update overlappingCells for the higher level
                        int updatedX1 = overlappingCells.x / 2;
                        int updatedY1 = overlappingCells.y / 2;
                        int updatedX2 = (overlappingCells.x + overlappingCells.width - 1) / 2;
                        int updatedY2 = (overlappingCells.y + overlappingCells.height - 1) / 2;
                        overlappingCells.x = updatedX1;
                        overlappingCells.y = updatedY1;
                        overlappingCells.width = updatedX2 - updatedX1 + 1;
                        overlappingCells.height = updatedY2 - updatedY1 + 1;
                    }
                }
            }
            reader.close();
        }

        // Done with all splits. Write output to disk
        LOG.info("Done with plotting. Now writing the output");
        final FileSystem outFS = outPath.getFileSystem(params);

        LOG.info("Writing default empty image");
        // Write a default empty image to be displayed for non-generated tiles
        BufferedImage emptyImg = new BufferedImage(tileWidth, tileHeight, BufferedImage.TYPE_INT_ARGB);
        Graphics2D g = new SimpleGraphics(emptyImg);
        g.setBackground(new Color(0, 0, 0, 0));
        g.clearRect(0, 0, tileWidth, tileHeight);
        g.dispose();

        // Write HTML file to browse the mutlielvel image
        OutputStream out = outFS.create(new Path(outPath, "default.png"));
        ImageIO.write(emptyImg, "png", out);
        out.close();

        // Add an HTML file that visualizes the result using Google Maps
        LOG.info("Writing the HTML viewer file");
        LineReader templateFileReader = new LineReader(
                MultilevelPlot.class.getResourceAsStream("/zoom_view.html"));
        PrintStream htmlOut = new PrintStream(outFS.create(new Path(outPath, "index.html")));
        Text line = new Text();
        while (templateFileReader.readLine(line) > 0) {
            String lineStr = line.toString();
            lineStr = lineStr.replace("#{TILE_WIDTH}", Integer.toString(tileWidth));
            lineStr = lineStr.replace("#{TILE_HEIGHT}", Integer.toString(tileHeight));
            lineStr = lineStr.replace("#{MAX_ZOOM}", Integer.toString(maxLevel));
            lineStr = lineStr.replace("#{MIN_ZOOM}", Integer.toString(minLevel));
            lineStr = lineStr.replace("#{TILE_URL}",
                    "'tile-' + zoom + '-' + coord.x + '-' + coord.y + '" + extension + "'");

            htmlOut.println(lineStr);
        }
        templateFileReader.close();
        htmlOut.close();

        // Write the tiles
        final Entry<TileIndex, Canvas>[] entries = canvases.entrySet().toArray(new Map.Entry[canvases.size()]);
        // Clear the hash map to save memory as it is no longer needed
        canvases.clear();
        int parallelism = params.getInt("parallel", Runtime.getRuntime().availableProcessors());
        Parallel.forEach(entries.length, new RunnableRange<Object>() {
            @Override
            public Object run(int i1, int i2) {
                boolean output = params.getBoolean("output", true);
                try {
                    Plotter plotter = plotterClass.newInstance();
                    plotter.configure(params);
                    for (int i = i1; i < i2; i++) {
                        Map.Entry<TileIndex, Canvas> entry = entries[i];
                        TileIndex key = entry.getKey();
                        if (vflip)
                            key.y = ((1 << key.level) - 1) - key.y;

                        Path imagePath = new Path(outPath, key.getImageFileName() + extension);
                        // Write this tile to an image
                        DataOutputStream outFile = output ? outFS.create(imagePath)
                                : new DataOutputStream(new NullOutputStream());
                        plotter.writeImage(entry.getValue(), outFile, vflip);
                        outFile.close();

                        // Remove entry to allows GC to collect it
                        entries[i] = null;
                    }
                    return null;
                } catch (InstantiationException e) {
                    e.printStackTrace();
                } catch (IllegalAccessException e) {
                    e.printStackTrace();
                } catch (IOException e) {
                    e.printStackTrace();
                }
                return null;
            }
        }, parallelism);
    } catch (InstantiationException e) {
        throw new RuntimeException("Error creating rastierizer", e);
    } catch (IllegalAccessException e) {
        throw new RuntimeException("Error creating rastierizer", e);
    }
}

From source file:com.moscona.dataSpace.ExportHelper.java

public void csvExport(AbstractVector vector, String fileName, boolean includeMetaData)
        throws FileNotFoundException, DataSpaceException {
    PrintStream out = new PrintStream(new File(fileName));
    try {/*  ww  w. j a v  a  2  s . co m*/
        if (includeMetaData) {
            csvOut(out, "name", vector.getName());
            csvOut(out, "description", vector.getDescription());
            csvOut(out, "base type", vector.getBaseType().name());
            csvOut(out, "sorted", "" + vector.isSorted());
            csvOut(out, "factor", "" + vector.isFactor());

            out.println();
        }

        IVectorIterator iterator = vector.iterator();
        while (iterator.hasNext()) {
            out.println(toCsvString((IScalar) iterator.next()));
        }
    } finally {
        out.close();
    }
}