Example usage for java.io PrintWriter close

List of usage examples for java.io PrintWriter close

Introduction

In this page you can find the example usage for java.io PrintWriter close.

Prototype

public void close() 

Source Link

Document

Closes the stream and releases any system resources associated with it.

Usage

From source file:com.ikon.servlet.DownloadServlet.java

/**
 * //  w  ww .  j  a v a  2  s .c o m
 */
public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws IOException, ServletException {
    request.setCharacterEncoding("UTF-8");
    String userId = request.getRemoteUser();
    String path = WebUtils.getString(request, "path");
    String uuid = WebUtils.getString(request, "uuid");
    boolean inline = WebUtils.getBoolean(request, "inline");
    InputStream is = null;

    try {
        // Now an document can be located by UUID
        if (uuid != null && !uuid.equals("")) {
            path = OKMRepository.getInstance().getNodePath(null, uuid);
        }

        if (path != null) {
            Document doc = OKMDocument.getInstance().getProperties(null, path);
            String fileName = PathUtils.getName(doc.getPath());
            log.info("Download {} by {} ({})",
                    new Object[] { path, userId, (inline ? "inline" : "attachment") });
            is = OKMDocument.getInstance().getContent(null, path, false);
            WebUtils.sendFile(request, response, fileName, doc.getMimeType(), inline, is);
        } else {
            response.setContentType("text/plain; charset=UTF-8");
            PrintWriter out = response.getWriter();
            out.println("Missing document reference");
            out.close();
        }
    } catch (PathNotFoundException e) {
        log.warn(e.getMessage(), e);
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                "PathNotFoundException: " + e.getMessage());
    } catch (RepositoryException e) {
        log.warn(e.getMessage(), e);
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                "RepositoryException: " + e.getMessage());
    } catch (Exception e) {
        log.warn(e.getMessage(), e);
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, e.getMessage());
    } finally {
        IOUtils.closeQuietly(is);
    }
}

From source file:edu.cornell.mannlib.vitro.webapp.servlet.ConceptSearchServlet.java

@Override
protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    super.doGet(req, resp);
    VitroRequest vreq = new VitroRequest(req);

    try {/* w  ww. j  a va 2s. c o  m*/
        ServletContext ctx = vreq.getSession().getServletContext();
        //Captures both concept list and any errors if they exist
        ConceptInfo conceptInfo = new ConceptInfo();
        conceptInfo.setSemanticServicesError(null);

        //Json output should be written out
        List<Concept> results = null;
        try {
            results = ConceptSearchServiceUtils.getSearchResults(ctx, vreq);
        } catch (Exception ex) {
            SemanticServicesError semanticServicesError = new SemanticServicesError("Exception encountered ",
                    ex.getMessage(), "fatal");
            log.error("An error occurred retrieving search results", ex);
            conceptInfo.setSemanticServicesError(semanticServicesError);
        }
        conceptInfo.setConceptList(results);

        String json = renderJson(conceptInfo);

        json = StringUtils.replaceChars(json, "\r\t\n", "");
        PrintWriter writer = resp.getWriter();
        resp.setContentType("application/json");
        writer.write(json);
        writer.close();

    } catch (Exception ex) {
        log.warn(ex, ex);
    }
}

From source file:com.seer.datacruncher.services.HttpFileUpload.java

public void doGet(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    response.setContentType("text/html");
    PrintWriter out = response.getWriter();
    out.write("Success");
    out.flush();/*from w w w.  ja v a2  s. co  m*/
    out.close();
}

From source file:cn.vlabs.umt.ui.servlet.AppProxyServlet.java

private void genHalfProxy(HttpServletRequest request, HttpServletResponse response) throws IOException {
    String appname = request.getParameter("appname");
    AppService service = (AppService) factory.getBean("ApplicationService");
    Application app = service.getApplication(appname);

    AppCredContent content = new AppCredContent();
    content.setAppKeyId(app.getKeyid());
    content.setIpAddress(RequestUtil.getRemoteIP(request));
    content.setPrincipal(new AppPrincipal(appname));
    Date tommorow = DateUtils.addDays(new Date(), 1);
    content.setValidTime(DateFormatUtils.format(tommorow, "yyyy-MM-dd hh:mm:ss"));

    Application umt = service.getApplication("umt");
    content.setUMTKeyId(umt.getKeyid());

    // UMTCredential cred= (UMTCredential)factory.getBean("UMTCredUtil");
    SignedEnvelope env = new SignedEnvelope(content.toXML());
    // env.genSignature(cred.getUMTKey()); kevin deleted //TODO //FIXME

    response.setCharacterEncoding("UTF-8");
    PrintWriter out = response.getWriter();
    out.println(env.toXML());//  w w  w  . j  a v a2 s.c o  m
    out.flush();
    out.close();
}

From source file:de.tudarmstadt.tk.statistics.importer.ExternalResultsReader.java

public static void readLODPipelineTrainTest(String pathToDirectory) {
    Locale.setDefault(Locale.ENGLISH);

    String[] semanticFeatures = new String[] { "Baseline", "+ALL", "+LOC", "+TIME", "+LOD", "+LOC+TIME",
            "+LOC+LOD", "+TIME+LOD", "+TYPES", "+CAT" };
    String[] measures = new String[] { "Percent Correct", "Weighted Precision", "Weighted Recall",
            "Weighted F-Measure" };
    String outFileName = "AggregatedCVRandom.csv";

    logger.log(Level.INFO, String.format("Importing data from directory %s.", pathToDirectory));

    // Method requires input directory. Check this condition.
    File directory = new File(pathToDirectory);
    if (!directory.isDirectory()) {
        System.err.println("Please specify a directory with the source .csv files. Aborting.");
        return;/*from w ww. ja v  a 2s.  com*/
    }

    //Empty previous output file, if there was one
    File outputFile = new File(directory, outFileName);
    if (outputFile.exists()) {
        outputFile.delete();
    }
    try {
        String header = "Train;Test;Classifier;FeatureSet;Measure;Value";

        PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
        out.println(header);
        out.close();
    } catch (IOException e) {
        System.err.println("Error while writing aggregated Train-Test file.");
        e.printStackTrace();
    }

    // prepare files lists
    HashMap<String, ArrayList<File>> filesMap = new HashMap<>();

    // read all subdirectories that match the city names
    File[] subdirs = directory.listFiles((FileFilter) DirectoryFileFilter.DIRECTORY);

    //Iterate all subdirectories
    for (File subDirectory : subdirs) {

        // get train set name
        String trainSetName = subDirectory.getName();

        // iterate all files in directory
        File[] filesInDirectory = subDirectory.listFiles();
        List<File> fileList = Arrays.asList(filesInDirectory);

        for (File subDirFile : fileList) {
            // get name of test data set
            String[] filenameTokens = subDirFile.getName().split("To");
            //String testDataName = filenameTokens[1].substring(0, filenameTokens[1].length() - 11);

            String testDataName;

            // if only this string is left, then CV
            if (filenameTokens[1].equals("Results.csv")) {
                testDataName = trainSetName;
            } else {
                testDataName = filenameTokens[1].split("Results.csv")[0];
                testDataName = testDataName.split("2C.csv|4C.csv|.csv")[0];
            }

            // put current file to test data name -> this way all files
            // corresponding to the same test set are in one map
            if (filesMap.get(testDataName) != null) {
                // get existing list and add file
                ArrayList<File> currentFileList = filesMap.get(testDataName);
                currentFileList.add(subDirFile);
            } else {
                // create new list and add current file
                ArrayList<File> newFileList = new ArrayList<>();
                newFileList.add(subDirFile);
                filesMap.put(testDataName, newFileList);
            }
        }

        ArrayList<String> outputRows = new ArrayList<String>();
        int nrDifferentClassifiers = 0;

        // iterate all files of one map
        Iterator<Entry<String, ArrayList<File>>> it = filesMap.entrySet().iterator();
        while (it.hasNext()) {
            Map.Entry pairs = (Map.Entry) it.next();
            String testSetName = (String) pairs.getKey();
            ArrayList<File> testFiles = (ArrayList<File>) pairs.getValue();

            nrDifferentClassifiers = testFiles.size();

            // initialize data store
            ArrayList<HashMap<String, Object>> values = new ArrayList<>();

            // get rows for first file to initialize store
            List<String[]> inputRowsFirstFile = readAndCheckCSV(testFiles.get(0).getAbsolutePath(), ';');

            for (int i = 0; i < inputRowsFirstFile.size(); i++) {
                HashMap<String, Object> currentRowValues = new HashMap<>();
                currentRowValues.put("semanticFeature", "");
                currentRowValues.put("classifierParameters", "");
                currentRowValues.put("aggregatedMeasureValues", new double[measures.length]);
                currentRowValues.put("nGrams", "");
                values.add(currentRowValues);
            }

            // get results from other files
            for (File testFile : testFiles) {
                // Only analyse files with .csv extension
                if (!FilenameUtils.getExtension(testFile.getName().toLowerCase()).equals("csv")
                        || testFile.getName().equals("AggregatedTrainTest.csv")) {
                    continue;
                }
                // check file for consistency
                List<String[]> inputRows = readAndCheckCSV(testFile.getAbsolutePath(), ';');

                // check if length matches first file
                if (!(inputRows.size() == values.size())) {
                    // TODO error message
                } else {
                    for (int i = 0; i < inputRows.size(); i++) {
                        String[] inputCells = inputRows.get(i);

                        // read current values and compare with entries
                        String semanticFeature = semanticFeatures[i % semanticFeatures.length];

                        if (values.get(i).get("semanticFeature") == "") {
                            values.get(i).put("semanticFeature", semanticFeature);
                        } else {
                            if (values.get(i).get("semanticFeature").equals(semanticFeature) == false) {
                                System.err.println("Semantic Features do not match.");
                                System.exit(1);
                            }
                        }

                        // needs rework as we do aggregation here
                        // String classifierParameters = inputCells[0];
                        //
                        // if (values.get(i).get("classifierParameters") ==
                        // "")
                        // {
                        // values.get(i).put("classifierParameters",
                        // classifierParameters);
                        // }
                        // else
                        // {
                        // if
                        // (values.get(i).get("classifierParameters").equals(classifierParameters)
                        // == false)
                        // {
                        // System.err.println("Classifier parameters do not match.");
                        // System.exit(1);
                        // }
                        // }

                        String nGrams = inputCells[12];

                        if (values.get(i).get("nGrams") == "") {
                            values.get(i).put("nGrams", nGrams);
                        } else {
                            if (values.get(i).get("nGrams").equals(nGrams) == false) {
                                System.err.println("N Gram Length does not match.");
                                System.exit(1);
                            }
                        }

                        // get and aggregate values
                        for (int j = 0; j < measures.length; j++) {
                            if (j == 0) {
                                //double currentValue = ((double[]) values.get(i).get("aggregatedMeasureValues"))[j];
                                double valueInFile = Double.parseDouble(inputCells[j + 16]) / 100;

                                ((double[]) values.get(i).get("aggregatedMeasureValues"))[j] += valueInFile;
                            } else {
                                //double currentValue = ((double[]) values.get(i).get("aggregatedMeasureValues"))[j];
                                double valueInFile = Double.parseDouble(inputCells[j + 16]);
                                ((double[]) values.get(i).get("aggregatedMeasureValues"))[j] += valueInFile;
                            }
                        }
                    }
                }
            }

            // write aggregated results to file
            for (HashMap<String, Object> currentValues : values) {
                String semFeature = (String) currentValues.get("semanticFeature");
                String nGrams = (String) currentValues.get("nGrams");
                String featureSet = String.format("%s, nGrams: %s", semFeature, nGrams);

                for (int j = 0; j < measures.length; j++) {
                    String outputRow = String.format("%s;%s;%s;%s;%s;%f", trainSetName, testSetName, "0",
                            featureSet, measures[j],
                            ((double[]) currentValues.get("aggregatedMeasureValues"))[j]
                                    / nrDifferentClassifiers);
                    outputRows.add(outputRow);
                }
            }

            // avoids a ConcurrentModificationException
            it.remove();
        }

        // Write aggregated data to a new file
        try {
            PrintWriter out = new PrintWriter(new FileWriter(outputFile, true));
            for (String s : outputRows) {
                out.println(s);
            }
            out.close();
        } catch (IOException e) {
            System.err.println("Error while writing aggregated Train-Test file.");
            e.printStackTrace();
        }
    }

    logger.log(Level.INFO,
            String.format("Finished import. The aggregated data was written to %s.", outFileName));

}

From source file:com.mockey.ui.ProxyInfoServlet.java

/**
 * //from   w w w  .  java  2  s .  c  o m
 * 
 * @param req
 *            basic request
 * @param resp
 *            basic resp
 * @throws ServletException
 *             basic
 * @throws IOException
 *             basic
 */
public void doPost(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
    ProxyServerModel proxyInfo = new ProxyServerModel();

    proxyInfo.setProxyPassword(req.getParameter("proxyPassword"));
    proxyInfo.setProxyUsername(req.getParameter("proxyUsername"));
    proxyInfo.setProxyUrl(req.getParameter("proxyUrl"));
    String enabled = req.getParameter("proxyEnabled");
    boolean proxyEnabled = false;
    try {
        proxyEnabled = Boolean.parseBoolean(enabled);
    } catch (Exception e) {
        e.printStackTrace();
    }
    proxyInfo.setProxyEnabled(proxyEnabled);
    store.setProxy(proxyInfo);
    JSONObject responseObject = new JSONObject();
    JSONObject successMessage = new JSONObject();

    try {
        successMessage.put("success", "Proxy settings updated.");
        responseObject.put("result", successMessage);

    } catch (JSONException e) {

    }
    resp.setContentType("application/json");
    PrintWriter out = resp.getWriter();
    out.println(responseObject.toString());
    out.flush();
    out.close();
    return;
}

From source file:net.bashtech.geobot.BotManager.java

public static String postRemoteDataSongRequest(String urlString, String channel, String requester) {
    if (BotManager.getInstance().CoeBotTVAPIKey.length() > 4) {
        URL url;/*from w  w  w  .ja v a  2  s  .  co  m*/
        HttpURLConnection conn;

        try {
            url = new URL("http://coebot.tv/api/v1/reqsongs/add/" + channel.toLowerCase() + "$"
                    + BotManager.getInstance().CoeBotTVAPIKey + "$" + BotManager.getInstance().nick);

            String postData = "url=" + URLEncoder.encode(urlString, "UTF-8") + "&requestedBy="
                    + URLEncoder.encode(requester, "UTF-8");
            conn = (HttpURLConnection) url.openConnection();
            System.out.println(postData);
            conn.setDoOutput(true);
            conn.setRequestMethod("POST");
            conn.setRequestProperty("User-Agent", "CoeBot");
            conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
            conn.setRequestProperty("Content-Length", "" + Integer.toString(postData.getBytes().length));

            // conn.setConnectTimeout(5 * 1000);
            // conn.setReadTimeout(5 * 1000);

            PrintWriter out = new PrintWriter(conn.getOutputStream());
            out.print(postData);
            out.close();
            String response = "";
            if (conn.getResponseCode() < 400) {

                Scanner inStream = new Scanner(conn.getInputStream());

                while (inStream.hasNextLine()) {
                    response += (inStream.nextLine());
                }
                inStream.close();
            } else {
                Scanner inStream = new Scanner(conn.getErrorStream());

                while (inStream.hasNextLine()) {
                    response += (inStream.nextLine());
                }
                inStream.close();
            }
            System.out.println(response);

            return response;

        } catch (MalformedURLException ex) {
            ex.printStackTrace();
        } catch (IOException ex) {
            ex.printStackTrace();

        }

        return null;
    } else {
        return null;
    }

}

From source file:com.bitranger.parknshop.seller.controller.SellerManageItemCtrl.java

@RequestMapping(value = "/seller/modifyItem", method = RequestMethod.GET)
public void clickModify(HttpServletRequest request, HttpServletResponse response) throws IOException {

    String itemId = request.getParameter("id");

    PsItem psItem = psItemDAO.findById(Integer.parseInt(itemId));

    Set<PsTag> tags = psItem.getPsTags();

    Set<ModifyTag> modiyTags = new HashSet<ModifyTag>();

    Iterator<PsTag> it = tags.iterator();

    while (it.hasNext()) {

        PsTag tag = it.next();/* w ww .  java2  s .  c  om*/

        ModifyTag modifyTag = new ModifyTag();
        modifyTag.setId(tag.getId());
        modifyTag.setName(tag.getName());
        modiyTags.add(modifyTag);
    }

    ModifyItem modifyItem = new ModifyItem();
    modifyItem.setTags(modiyTags);

    PsCategory psCategory = psItem.getPsCategory();
    ModifyCategoty modifyCategoty = new ModifyCategoty();
    modifyCategoty.setId(psCategory.getId());
    modifyCategoty.setDescription(psCategory.getName());
    modifyItem.setModifyCategory(modifyCategoty);

    modifyItem.setName(psItem.getName());
    modifyItem.setDescription(psItem.getIntroduction());
    modifyItem.setExtra1(psItem.getExtra1());
    modifyItem.setPrice(psItem.getPrice());
    modifyItem.setUrlPictuere(psItem.getUrlPicture());

    JSONArray jsonArray = JSONArray.fromObject(modifyItem);

    PrintWriter out = response.getWriter();
    out.write(jsonArray.toString());
    out.flush();
    out.close();

}

From source file:org.biopax.validator.Main.java

protected static void runBatch(Validator validator, Collection<Resource> resources) throws IOException {

    //collect all reports in this object (only if --output option was used)
    final ValidatorResponse consolidatedReport = new ValidatorResponse();

    // Read from the batch and validate from file, id or url, line-by-line (stops on first empty line)
    for (Resource resource : resources) {
        Validation result = new Validation(new IdentifierImpl(), resource.getDescription(), autofix, null,
                maxErrors, profile);// www.  j a  va  2s.co m
        result.setDescription(resource.getDescription());
        log.info("BioPAX DATA IMPORT FROM: " + result.getDescription());
        try {
            validator.importModel(result, resource.getInputStream());
            validator.validate(result);

            //if autofix is enabled, then do normalize too
            if (autofix) {
                Model model = (Model) result.getModel();
                Normalizer normalizer = new Normalizer();
                normalizer.setXmlBase(xmlBase); //if xmlBase is null, the model's one is used
                normalizer.normalize(model);
            }

            if (output != null)
                consolidatedReport.addValidationResult(result);

        } catch (Exception e) {
            log.error("failed", e);
        }

        final String filename = outFileName(result);
        PrintWriter writer;

        // save modified (normalized) biopax if the option was used
        if (autofix) {
            Model model = (Model) result.getModel();
            (new SimpleIOHandler()).convertToOWL(model, new FileOutputStream(filename + EXT));
        }

        // remove the BioPAX data before writing report
        result.setModel(null);
        result.setModelData(null);

        // save the individual validation results
        //unless the user specified the output file explicitly
        if (output == null || output.isEmpty()) {
            writer = new PrintWriter(filename + ".validation." + outFormat);
            Source xsltSrc = (outFormat.equalsIgnoreCase("html"))
                    ? new StreamSource(ctx.getResource("classpath:html-result.xsl").getInputStream())
                    : null;
            ValidatorUtils.write(result, writer, xsltSrc);
            writer.close();
        }

        validator.getResults().remove(result);
        log.info("Done with " + filename);
    }

    // save if the user specified the output file explicitly
    if (output != null) {
        Writer writer = new PrintWriter(output);
        Source xsltSrc = (outFormat.equalsIgnoreCase("html"))
                ? new StreamSource(ctx.getResource("classpath:html-result.xsl").getInputStream())
                : null;
        ValidatorUtils.write(consolidatedReport, writer, xsltSrc);
        writer.close();
    }
}

From source file:gate.termraider.output.PairCsvGenerator.java

public void generateAndSaveCsv(AbstractPairbank pairbank, Number threshold, File outputFile)
        throws GateException {
    this.pairbank = pairbank;
    this.debugMode = pairbank.getDebugMode();
    this.scorePropertyName = pairbank.getScoreProperty();
    PrintWriter writer = initializeWriter(outputFile);
    generateCsv(writer, threshold);//from   w  ww .  j  a v a2 s. c o  m
    writer.flush();
    writer.close();
    if (debugMode) {
        System.out.println("Pairbank: saved CSV in " + outputFile.getAbsolutePath());
    }

}