Example usage for java.io PrintStream close

List of usage examples for java.io PrintStream close

Introduction

In this page you can find the example usage for java.io PrintStream close.

Prototype

public void close() 

Source Link

Document

Closes the stream.

Usage

From source file:NMTFUtils.GetFWords.java

public static void getTopWordsLSFTL(String base) throws IOException {
    int TOP_NUM = 20;
    List<String> dictsLists = FileUtils.readLines(new File(dicpath));
    List<String> indexLists = FileUtils.readLines(new File("G:\\syn_github\\KLSNMF\\KLSNMF\\indext.csv"));
    Map<String, String> wordMaps = new HashMap<>();

    for (String line : dictsLists) {
        String[] split = line.split("@:@");
        if (split.length != 2) {
            wordMaps.put(split[0], "");
        } else {/*from  w  w w . j  a v  a  2s  .c  o m*/
            wordMaps.put(split[0], split[1]);
        }
    }
    PrintStream ps = new PrintStream(
            "G:\\\\\\journal\\" + base + "\\LSFTLs-TopWordt.txt");
    for (int li = 0; li < indexLists.size(); li++) {
        String[] split = indexLists.get(li).split(",");
        StringBuilder topWords = new StringBuilder();
        for (int i = 0; i < TOP_NUM; i++) {
            String get = wordMaps.get(split[i]);
            if (get != null) {
                topWords.append(get.trim()).append(" ");
            } else {
                System.out.println(split[i]);
                return;
            }
        }
        System.out.println(topWords);
        ps.println("TOP:" + (li + 1) + " " + topWords);
    }
    ps.flush();
    ps.close();

}

From source file:NMTFUtils.GetFWords.java

public static void getTopWordsTriTL(String base) throws IOException {
    int TOP_NUM = 20;
    List<String> dictsLists = FileUtils.readLines(new File(dicpath));
    List<String> indexLists = FileUtils.readLines(new File("F:\\matlab_code\\TriTL\\indext.csv"));
    Map<String, String> wordMaps = new HashMap<>();

    for (String line : dictsLists) {
        String[] split = line.split("@:@");
        if (split.length != 2) {
            wordMaps.put(split[0], "");
        } else {//from   ww  w  . j a  v a2 s .  co m
            wordMaps.put(split[0], split[1]);
        }

    }
    PrintStream ps = new PrintStream(
            "G:\\\\\\journal\\" + base + "\\TRITLt-TopWords.txt");
    for (int li = 0; li < indexLists.size(); li++) {
        String[] split = indexLists.get(li).split(",");
        StringBuilder topWords = new StringBuilder();
        for (int i = 0; i < TOP_NUM; i++) {
            String get = wordMaps.get(split[i]);
            if (get != null) {
                topWords.append(get.trim()).append(" ");
            } else {
                System.out.println(split[i]);
                return;
            }
        }
        System.out.println(topWords);
        ps.println("TOP:" + (li + 1) + " " + topWords);
    }
    ps.flush();
    ps.close();

}

From source file:edu.umn.cs.spatialHadoop.indexing.Indexer.java

private static void indexLocal(Path inPath, final Path outPath, OperationsParams params)
        throws IOException, InterruptedException {
    Job job = Job.getInstance(params);//from  w  w  w  .j  a va  2s. c  o m
    final Configuration conf = job.getConfiguration();

    final String sindex = conf.get("sindex");

    // Start reading input file
    List<InputSplit> splits = new ArrayList<InputSplit>();
    final SpatialInputFormat3<Rectangle, Shape> inputFormat = new SpatialInputFormat3<Rectangle, Shape>();
    FileSystem inFs = inPath.getFileSystem(conf);
    FileStatus inFStatus = inFs.getFileStatus(inPath);
    if (inFStatus != null && !inFStatus.isDir()) {
        // One file, retrieve it immediately.
        // This is useful if the input is a hidden file which is automatically
        // skipped by FileInputFormat. We need to plot a hidden file for the case
        // of plotting partition boundaries of a spatial index
        splits.add(new FileSplit(inPath, 0, inFStatus.getLen(), new String[0]));
    } else {
        SpatialInputFormat3.setInputPaths(job, inPath);
        for (InputSplit s : inputFormat.getSplits(job))
            splits.add(s);
    }

    // Copy splits to a final array to be used in parallel
    final FileSplit[] fsplits = splits.toArray(new FileSplit[splits.size()]);
    boolean replicate = PartitionerReplicate.get(sindex);

    // Set input file MBR if not already set
    Rectangle inputMBR = (Rectangle) OperationsParams.getShape(conf, "mbr");
    if (inputMBR == null) {
        inputMBR = FileMBR.fileMBR(inPath, new OperationsParams(conf));
        OperationsParams.setShape(conf, "mbr", inputMBR);
    }

    setLocalIndexer(conf, sindex);
    final Partitioner partitioner = createPartitioner(inPath, outPath, conf, sindex);

    final IndexRecordWriter<Shape> recordWriter = new IndexRecordWriter<Shape>(partitioner, replicate, sindex,
            outPath, conf);
    for (FileSplit fsplit : fsplits) {
        RecordReader<Rectangle, Iterable<Shape>> reader = inputFormat.createRecordReader(fsplit, null);
        if (reader instanceof SpatialRecordReader3) {
            ((SpatialRecordReader3) reader).initialize(fsplit, conf);
        } else if (reader instanceof RTreeRecordReader3) {
            ((RTreeRecordReader3) reader).initialize(fsplit, conf);
        } else if (reader instanceof HDFRecordReader) {
            ((HDFRecordReader) reader).initialize(fsplit, conf);
        } else {
            throw new RuntimeException("Unknown record reader");
        }

        final IntWritable partitionID = new IntWritable();

        while (reader.nextKeyValue()) {
            Iterable<Shape> shapes = reader.getCurrentValue();
            if (replicate) {
                for (final Shape s : shapes) {
                    partitioner.overlapPartitions(s, new ResultCollector<Integer>() {
                        @Override
                        public void collect(Integer id) {
                            partitionID.set(id);
                            try {
                                recordWriter.write(partitionID, s);
                            } catch (IOException e) {
                                throw new RuntimeException(e);
                            }
                        }
                    });
                }
            } else {
                for (final Shape s : shapes) {
                    int pid = partitioner.overlapPartition(s);
                    if (pid != -1) {
                        partitionID.set(pid);
                        recordWriter.write(partitionID, s);
                    }
                }
            }
        }
        reader.close();
    }
    recordWriter.close(null);

    // Write the WKT formatted master file
    Path masterPath = new Path(outPath, "_master." + sindex);
    FileSystem outFs = outPath.getFileSystem(params);
    Path wktPath = new Path(outPath, "_" + sindex + ".wkt");
    PrintStream wktOut = new PrintStream(outFs.create(wktPath));
    wktOut.println("ID\tBoundaries\tRecord Count\tSize\tFile name");
    Text tempLine = new Text2();
    Partition tempPartition = new Partition();
    LineReader in = new LineReader(outFs.open(masterPath));
    while (in.readLine(tempLine) > 0) {
        tempPartition.fromText(tempLine);
        wktOut.println(tempPartition.toWKT());
    }
    in.close();
    wktOut.close();
}

From source file:com.meetingninja.csse.database.AgendaDatabaseAdapter.java

public static JsonNode update(String agendaID, Map<String, String> key_values)
        throws JsonGenerationException, IOException, InterruptedException {
    // prepare POST payload
    ByteArrayOutputStream json = new ByteArrayOutputStream();
    // this type of print stream allows us to get a string easily
    PrintStream ps = new PrintStream(json);

    // Create a generator to build the JSON string
    JsonGenerator jgen = JFACTORY.createGenerator(ps, JsonEncoding.UTF8);
    for (String key : key_values.keySet()) {
        jgen.flush();/*  www .  j a va  2s . c o  m*/
        // Build JSON Object
        jgen.writeStartObject();
        jgen.writeStringField(Keys.Agenda.ID, agendaID);
        jgen.writeStringField("field", key);
        jgen.writeStringField("value", key_values.get(key));
        jgen.writeEndObject();
        jgen.writeRaw("\f"); // write a form-feed to separate the payloads
    }

    jgen.close();
    // Get JSON Object payload from print stream
    String payload = json.toString("UTF8");
    ps.close();
    // The backend can only update a single field at a time
    String[] payloads = payload.split("\f\\s*"); // split at each form-feed
    Thread t = new Thread(new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                Thread.sleep(500);
            } catch (InterruptedException e) {
                e.getLocalizedMessage();
            }
        }
    }));
    String response = "";
    for (String p : payloads) {
        t.run();
        response = updateHelper(p);
    }
    return MAPPER.readTree(response);
}

From source file:edu.umn.cs.spatialHadoop.operations.FileMBR.java

public static Partition fileMBRLocal(Path[] inFiles, final OperationsParams params)
        throws IOException, InterruptedException {
    // 1- Split the input path/file to get splits that can be processed independently
    final SpatialInputFormat3<Rectangle, Shape> inputFormat = new SpatialInputFormat3<Rectangle, Shape>();
    Job job = Job.getInstance(params);/*from   w  ww  .  j a va 2 s  . c  om*/
    SpatialInputFormat3.setInputPaths(job, inFiles);
    final List<org.apache.hadoop.mapreduce.InputSplit> splits = inputFormat.getSplits(job);
    int parallelism = params.getInt("parallel", Runtime.getRuntime().availableProcessors());

    // 2- Process splits in parallel
    List<Map<String, Partition>> allMbrs = Parallel.forEach(splits.size(),
            new RunnableRange<Map<String, Partition>>() {
                @Override
                public Map<String, Partition> run(int i1, int i2) {
                    Map<String, Partition> mbrs = new HashMap<String, Partition>();
                    for (int i = i1; i < i2; i++) {
                        try {
                            org.apache.hadoop.mapreduce.lib.input.FileSplit fsplit = (org.apache.hadoop.mapreduce.lib.input.FileSplit) splits
                                    .get(i);
                            final RecordReader<Rectangle, Iterable<Shape>> reader = inputFormat
                                    .createRecordReader(fsplit, null);
                            if (reader instanceof SpatialRecordReader3) {
                                ((SpatialRecordReader3) reader).initialize(fsplit, params);
                            } else if (reader instanceof RTreeRecordReader3) {
                                ((RTreeRecordReader3) reader).initialize(fsplit, params);
                            } else if (reader instanceof HDFRecordReader) {
                                ((HDFRecordReader) reader).initialize(fsplit, params);
                            } else {
                                throw new RuntimeException("Unknown record reader");
                            }
                            Partition p = mbrs.get(fsplit.getPath().getName());
                            if (p == null) {
                                p = new Partition();
                                p.filename = fsplit.getPath().getName();
                                p.cellId = p.filename.hashCode();
                                p.size = 0;
                                p.recordCount = 0;
                                p.set(Double.MAX_VALUE, Double.MAX_VALUE, -Double.MAX_VALUE, -Double.MAX_VALUE);
                                mbrs.put(p.filename, p);
                            }
                            Text temp = new Text2();
                            while (reader.nextKeyValue()) {
                                Iterable<Shape> shapes = reader.getCurrentValue();
                                for (Shape s : shapes) {
                                    Rectangle mbr = s.getMBR();
                                    if (mbr != null)
                                        p.expand(mbr);
                                    p.recordCount++;
                                    temp.clear();
                                    s.toText(temp);
                                    p.size += temp.getLength() + 1;
                                }
                            }
                        } catch (IOException e) {
                            throw new RuntimeException(e);
                        } catch (InterruptedException e) {
                            throw new RuntimeException(e);
                        }
                    }
                    return mbrs;
                }
            }, parallelism);
    Map<String, Partition> mbrs = allMbrs.remove(allMbrs.size() - 1);
    for (Map<String, Partition> list : allMbrs) {
        for (Partition p1 : list.values()) {
            Partition p2 = mbrs.get(p1.filename);
            if (p2 != null) {
                p2.expand(p1);
            } else {
                mbrs.put(p1.filename, p1);
            }
        }
    }

    // Cache the final result, if needed
    for (Path inFile : inFiles) {
        FileSystem inFs = inFile.getFileSystem(params);
        if (!inFs.getFileStatus(inFile).isDir())
            continue;
        Path gindex_path = new Path(inFile, "_master.heap");
        // Answer has been already cached (may be by another job)
        if (inFs.exists(gindex_path))
            continue;
        FileStatus[] files = inFs.listStatus(inFile, SpatialSite.NonHiddenFileFilter);
        PrintStream wktout = new PrintStream(inFs.create(new Path(inFile, "_heap.wkt"), false));
        PrintStream gout = new PrintStream(inFs.create(gindex_path, false));

        Text text = new Text2();
        for (FileStatus file : files) {
            text.clear();
            Partition p = mbrs.get(file.getPath().getName());
            gout.println(p.toText(text).toString());
            wktout.println(p.toWKT());
        }

        wktout.close();
        gout.close();
    }

    // Return the final answer
    Partition finalResult = new Partition();
    finalResult.size = finalResult.recordCount = 0;
    finalResult.x1 = finalResult.y1 = Double.MAX_VALUE;
    finalResult.x2 = finalResult.y2 = -Double.MAX_VALUE;
    for (Partition p2 : mbrs.values())
        finalResult.expand(p2);
    return finalResult;
}

From source file:com.meetingninja.csse.database.GroupDatabaseAdapter.java

public static Group createGroup(Group g) throws IOException, MalformedURLException {
    // Server URL setup
    String _url = getBaseUri().build().toString();

    // establish connection
    URL url = new URL(_url);
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();

    conn.setRequestMethod("POST");
    addRequestHeader(conn, true);//from  w w  w . j  a  v a 2  s .c o  m

    // prepare POST payload
    ByteArrayOutputStream json = new ByteArrayOutputStream();
    // this type of print stream allows us to get a string easily
    PrintStream ps = new PrintStream(json);
    // Create a generator to build the JSON string
    JsonGenerator jgen = JFACTORY.createGenerator(ps, JsonEncoding.UTF8);

    // Build JSON Object
    jgen.writeStartObject();
    jgen.writeStringField(Keys.Group.TITLE, g.getGroupTitle());
    jgen.writeArrayFieldStart(Keys.Group.MEMBERS);
    for (User member : g.getMembers()) {
        jgen.writeStartObject();
        jgen.writeStringField(Keys.User.ID, member.getID());
        jgen.writeEndObject();

    }
    jgen.writeEndArray();
    jgen.writeEndObject();
    jgen.close();

    // Get JSON Object payload from print stream
    String payload = json.toString("UTF8");
    ps.close();

    // send payload
    int responseCode = sendPostPayload(conn, payload);
    String response = getServerResponse(conn);

    // prepare to get the id of the created Meeting
    // Map<String, String> responseMap = new HashMap<String, String>();

    /*
     * result should get valid={"meetingID":"##"}
     */
    String result = new String();
    if (!response.isEmpty()) {
        // responseMap = MAPPER.readValue(response,
        // new TypeReference<HashMap<String, String>>() {
        // });
        JsonNode groupNode = MAPPER.readTree(response);
        if (!groupNode.has(Keys.Group.ID)) {
            result = "invalid";
        } else
            result = groupNode.get(Keys.Group.ID).asText();
    }

    if (!result.equalsIgnoreCase("invalid"))
        g.setID(result);

    conn.disconnect();
    return g;
}

From source file:org.apache.mahout.utils.MatrixDumper.java

private static void exportCSV(Path inputPath, String outputFile, boolean doLabels) throws IOException {
    SequenceFileValueIterator<MatrixWritable> it = new SequenceFileValueIterator<>(inputPath, true,
            new Configuration());
    Matrix m = it.next().get();//from  ww  w  . j a v  a 2s.c o  m
    it.close();
    PrintStream ps = getPrintStream(outputFile);
    String[] columnLabels = getLabels(m.numCols(), m.getColumnLabelBindings(), "col");
    String[] rowLabels = getLabels(m.numRows(), m.getRowLabelBindings(), "row");
    if (doLabels) {
        ps.print("rowid,");
        ps.print(columnLabels[0]);
        for (int c = 1; c < m.numCols(); c++) {
            ps.print(',' + columnLabels[c]);
        }
        ps.println();
    }
    for (int r = 0; r < m.numRows(); r++) {
        if (doLabels) {
            ps.print(rowLabels[0] + ',');
        }
        ps.print(Double.toString(m.getQuick(r, 0)));
        for (int c = 1; c < m.numCols(); c++) {
            ps.print(",");
            ps.print(Double.toString(m.getQuick(r, c)));
        }
        ps.println();
    }
    if (ps != System.out) {
        ps.close();
    }
}

From source file:org.apache.ofbiz.base.util.KeyStoreUtil.java

public static Certificate pemToCert(Reader r) throws IOException, CertificateException {
    String header = "-----BEGIN CERTIFICATE-----";
    String footer = "-----END CERTIFICATE-----";

    BufferedReader reader = new BufferedReader(r);
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    PrintStream ps = new PrintStream(baos);

    String line;/*from   w w  w . j  a  v  a  2s .  c o m*/

    // ignore up to the header
    while ((line = reader.readLine()) != null && !line.equals(header)) {
    }

    // no header found
    if (line == null) {
        throw new IOException("Error reading certificate, missing BEGIN boundary");
    }

    // in between the header and footer is the actual certificate
    while ((line = reader.readLine()) != null && !line.equals(footer)) {
        line = line.replaceAll("\\s", "");
        ps.print(line);
    }

    // no footer found
    if (line == null) {
        throw new IOException("Error reading certificate, missing END boundary");
    }
    ps.close();

    // decode the buffer to a X509Certificate

    CertificateFactory cf = CertificateFactory.getInstance("X.509");
    byte[] certBytes = Base64.decodeBase64(baos.toByteArray());
    return cf.generateCertificate(new ByteArrayInputStream(certBytes));
}

From source file:com.meetingninja.csse.database.AgendaDatabaseAdapter.java

public static Agenda createAgenda(Agenda create) throws IOException {
    Agenda newAgenda = new Agenda(create);
    // Server URL setup
    String _url = getBaseUri().build().toString();
    // Establish connection
    URL url = new URL(_url);
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();

    // add request header
    conn.setRequestMethod(IRequest.POST);
    addRequestHeader(conn, true);/*from   w ww .  j a v  a 2 s .  c  o m*/

    // prepare POST payload
    ByteArrayOutputStream json = new ByteArrayOutputStream();
    // this type of print stream allows us to get a string easily
    PrintStream ps = new PrintStream(json);
    // Create a generator to build the JSON string
    JsonGenerator jgen = JFACTORY.createGenerator(ps, JsonEncoding.UTF8);

    // Build JSON Object
    jgen.writeStartObject(); // start agenda
    jgen.writeStringField(Keys.Agenda.TITLE, create.getTitle());
    jgen.writeStringField(Keys.Agenda.MEETING, create.getAttachedMeetingID());
    jgen.writeArrayFieldStart(Keys.Agenda.TOPIC); // start topics
    MAPPER.writeValue(jgen, create.getTopics()); // recursively does
    // subtopics
    jgen.writeEndArray(); // end topics
    jgen.writeEndObject(); // end agenda
    jgen.close();

    // Get JSON Object payload from print stream
    String payload = json.toString("UTF8");
    ps.close();

    // Send payload
    int responseCode = sendPostPayload(conn, payload);
    String response = getServerResponse(conn);

    newAgenda = parseAgenda(MAPPER.readTree(response));
    return newAgenda;
}

From source file:com.meetingninja.csse.database.NotesDatabaseAdapter.java

public static String createNote(Note n) throws Exception {
    // Server URL setup
    String _url = getBaseUri().build().toString();

    // Establish connection
    URL url = new URL(_url);
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();

    // add request header
    conn.setRequestMethod(IRequest.POST);
    //addRequestHeader(conn, true);

    // prepare POST payload
    ByteArrayOutputStream json = new ByteArrayOutputStream();
    // this type of print stream allows us to get a string easily
    PrintStream ps = new PrintStream(json);
    // Create a generator to build the JSON string
    JsonGenerator jgen = JFACTORY.createGenerator(ps, JsonEncoding.UTF8);

    // Build JSON Object
    jgen.writeStartObject();/*from w w  w  .  j  a  va2s.  com*/
    jgen.writeStringField(Keys.Note.CREATED_BY, n.getCreatedBy());
    jgen.writeStringField(Keys.Note.TITLE, n.getTitle());
    jgen.writeStringField(Keys.Note.DESC, n.getDescription());
    jgen.writeStringField(Keys.Note.CONTENT, n.getContent());
    jgen.writeStringField(Keys.Note.UPDATED, n.getDateCreated());
    jgen.writeEndObject();
    jgen.close();

    // Get JSON Object payload from print stream
    String payload = json.toString("UTF8");
    Log.d("CREATENOTE_payload", payload);
    ps.close();

    // Send payload
    int responseCode = sendPostPayload(conn, payload);
    String response = getServerResponse(conn);
    Log.d("CREATENOTE_response", response);

    String ID = "";
    if (!response.isEmpty()) {
        JsonNode tree = MAPPER.readTree(response);
        if (!tree.has(Keys.Note.ID))
            ID = "-1";
        else
            ID = tree.get(Keys.Note.ID).asText();
    }

    conn.disconnect();
    return ID;
}