Example usage for java.io Writer close

List of usage examples for java.io Writer close

Introduction

In this page you can find the example usage for java.io Writer close.

Prototype

public abstract void close() throws IOException;

Source Link

Document

Closes the stream, flushing it first.

Usage

From source file:edu.indiana.lib.twinpeaks.util.DomUtils.java

/**
 * Write formatted XML text to a String.
 * @param object The XML Document, HTML Document, or Element to write
 * @return String containing the formatted document text
 * @throws DomException/*w ww .j av a 2  s.  c o  m*/
 */
public static String serialize(Object object) throws DomException {
    ByteArrayOutputStream stream = null;
    Writer writer = null;

    try {
        stream = new ByteArrayOutputStream();
        writer = new OutputStreamWriter(stream, ENCODING);

        if (object instanceof Document) {
            serializeXml((Node) ((Document) object).getDocumentElement(), writer);
        } else if (object instanceof Element) {
            serializeXml((Node) object, writer);
        } else {
            throw new IllegalArgumentException("Unexpected object for serialzation: " + object.toString());
        }
        return stream.toString();

    } catch (Exception e) {
        throw new DomException(e.toString());

    } finally {
        try {
            if (writer != null)
                writer.close();
        } catch (Exception ignore) {
        }
        try {
            if (stream != null)
                stream.close();
        } catch (Exception ignore) {
        }
    }
}

From source file:edu.monash.merc.struts2.dispatcher.RssResult.java

public void execute(ActionInvocation actionInvocation) throws Exception {
    if (StringUtils.isBlank(feedName)) {
        String msg = ("Required parameter with the name [" + feedName + "] not found. "
                + "Make sure you have the param tag set.");
        logger.error(msg);//  w w w .ja  v a  2 s.c o m
        throw new IllegalArgumentException(msg);
    }

    //set the content type for response
    HttpServletResponse response = (HttpServletResponse) actionInvocation.getInvocationContext()
            .get(StrutsStatics.HTTP_RESPONSE);
    if (StringUtils.isBlank(mimeType)) {
        mimeType = DEFAULT_MIME_TYPE;
    }
    response.setContentType(mimeType);

    SyndFeed feed = (SyndFeed) actionInvocation.getStack().findValue(feedName);
    if (feed != null) {
        if (StringUtils.isBlank(encoding)) {
            encoding = feed.getEncoding();
        }
        //set the response encoding
        if (StringUtils.isNotBlank(encoding)) {
            response.setCharacterEncoding(encoding);
        }
        //set the feed type
        if (StringUtils.isNotBlank(feedType)) {
            feed.setFeedType(feedType);
        }
        SyndFeedOutput feedOutput = new SyndFeedOutput();
        //we will writer to write out to the outpustream
        Writer out = null;
        try {
            out = response.getWriter();
            feedOutput.output(feed, out);
        } catch (Exception ex) {
            logger.error("Could not write the feed: " + ex.getMessage());
            throw new RuntimeException(ex);
            // throw new RuntimeException("Could not write the feed: " + ex.getMessage());
        } finally {
            if (out != null) {
                out.close();
            }
        }
    } else {
        String errormsg = "Did not find the object on the stack with name '" + feedName + "'";
        logger.error(errormsg);
        throw new RuntimeException(errormsg);
    }
}

From source file:it.unimi.di.big.mg4j.document.DocumentCollectionTest.java

@Test
public void testInputStreamSequence() throws IOException, ConfigurationException {
    System.err.println("Checking input stream (text field only)");
    // Extract only field number 1, and write it out with separator '\u0000'
    MutableString res = new MutableString();
    String[][] justSecondField = new String[ndoc][1];
    for (int i = 0; i < ndoc; i++) {
        res.append(document[i][1] + "\u0000");
        justSecondField[i][0] = document[i][1];
    }/*ww  w . ja v a  2  s  .c o m*/
    String resString = res.toString();
    // Write the sequence on a file (in UTF-8)
    Writer resWriter = new OutputStreamWriter(new FileOutputStream(new File(tempDir, "stream")), "UTF-8");
    resWriter.write(resString);
    resWriter.close();
    // Read it as a input stream document sequence
    InputStream is = new FileInputStream(new File(tempDir, "stream"));
    DocumentSequence seq = new InputStreamDocumentSequence(is, '\u0000',
            new IdentityDocumentFactory(DEFAULT_PROPERTIES));
    checkAllDocumentsSeq(seq, new String[] { "text" }, justSecondField);
    seq.close();
}

From source file:com.all.client.util.FileUtil.java

private void closeWriter(Writer writer) {
    if (writer != null) {
        try {//  w w  w.  j  av a2  s .  c  o m
            writer.close();
            writer = null;
        } catch (IOException e) {
            LOG.info(e, e);
        }
    }
}

From source file:ixa.pipe.ned_ukb.Annotate.java

public void disambiguateNEsToKAF(KAFDocument kaf, String scripts, String ukbExec, String ukbKb, String ukbDict,
        String wikiDb) throws Exception {

    String resourceExternalRef = ukbKb.substring(ukbKb.lastIndexOf("/") + 1);

    List<String> neIds = new ArrayList<String>();
    String ukbContext = "naf\n";

    List<Entity> entities = kaf.getEntities();
    for (Entity entity : entities) {
        String entityId = entity.getId();
        String entityLemma = "";
        List<Term> entityTerms = entity.getTerms();
        for (Term term : entityTerms) {
            String tId = term.getId();
            neIds.add(tId);//from  ww  w . java  2s . c  o  m
            if (!entityLemma.equals("")) {
                entityLemma += "_";
            }
            entityLemma += term.getLemma().toLowerCase();
        }
        ukbContext += entityLemma + "##" + entityId + "#1 ";
    }

    String formsContext2Match = "";
    String lemmasContext2Match = "";

    List<Term> terms = kaf.getTerms();
    for (Term term : terms) {
        if (!neIds.contains(term.getId())) {
            if (!(term.getForm().contains("@@")) && !(term.getForm().contains(" "))) {
                formsContext2Match += term.getForm().toLowerCase() + "@@" + term.getWFs().get(0).getOffset()
                        + " ";
                lemmasContext2Match += term.getLemma().toLowerCase() + "@@" + term.getWFs().get(0).getOffset()
                        + " ";
            }
        }
    }

    // create UKB context
    String[] cmdMatch = { "perl", scripts + "/merge_match.pl", "-d", wikiDb, "--t1", formsContext2Match, "--t2",
            lemmasContext2Match };

    Process pMatch = Runtime.getRuntime().exec(cmdMatch);

    String matchedContext = "";
    String outputLineContext = "";
    BufferedReader outputContextStream = new BufferedReader(
            new InputStreamReader(pMatch.getInputStream(), "UTF-8"));
    while ((outputLineContext = outputContextStream.readLine()) != null) {
        matchedContext += outputLineContext + "\n";
    }
    outputContextStream.close();

    String errorContext = "";
    BufferedReader errorContextStream = new BufferedReader(new InputStreamReader(pMatch.getErrorStream()));
    while ((errorContext = errorContextStream.readLine()) != null) {
        System.err.println("MERGE_MATCH ERROR: " + errorContext);
    }
    errorContextStream.close();

    pMatch.waitFor();

    String[] contextStrings = matchedContext.split(" ");
    for (String contextString : contextStrings) {
        if (contextString.equals(""))
            continue;
        contextString = contextString.trim();

        //ContextString = spot_string@@spot_offset
        String[] contextWordOffset = contextString.split("@@");
        ukbContext += contextWordOffset[0] + "##" + contextWordOffset[1] + "#1 ";
    }

    File contextTmpFile = File.createTempFile("context", ".tmp");
    contextTmpFile.deleteOnExit();
    String contextTmpFileName = contextTmpFile.getAbsolutePath();

    Writer contextFile = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream(contextTmpFile), "UTF-8"));
    try {
        contextFile.write(ukbContext);
    } finally {
        contextFile.close();
    }

    // run UKB
    String cmdUkb = ukbExec
            + " --prank_damping 0.90 --prank_iter 15 --allranks --minput --nopos --ppr_w2w --dict_weight -K "
            + ukbKb + " -D " + ukbDict + " " + contextTmpFileName;

    Process pUkb = Runtime.getRuntime().exec(cmdUkb);

    String outputUkb = "";
    String outputLineUkb = "";
    BufferedReader outputUkbStream = new BufferedReader(new InputStreamReader(pUkb.getInputStream(), "UTF-8"));
    while ((outputLineUkb = outputUkbStream.readLine()) != null) {
        outputUkb += outputLineUkb + "\n";
    }
    outputUkbStream.close();

    String errorUkb = "";
    BufferedReader errorUkbStream = new BufferedReader(new InputStreamReader(pUkb.getErrorStream()));
    while ((errorUkb = errorUkbStream.readLine()) != null) {
        System.err.println("UKB ERROR: " + errorUkb);
    }
    errorUkbStream.close();

    pUkb.waitFor();

    // UKB output (one line): context_id word_id (concept_id(/weight)?)+ !! lemma   (there are 2 spaces after word_id)
    // UKB output example:    naf e12  Norvegia/0.999998 Norvegiako_bandera/2.25207e-06 !! norvegia
    Map<String, String> entityLinks = new HashMap<String, String>(); // e12 --> Norvegia/0.999998
    String ukbDisambiguations[] = outputUkb.split("\n");
    for (String ukbDisambiguation : ukbDisambiguations) {
        if (ukbDisambiguation.startsWith("!! -v"))
            continue;
        String ukbLine[] = ukbDisambiguation.split(" ");
        entityLinks.put(ukbLine[1], ukbLine[3]);
    }

    // UKB links to KAF
    for (Entity entity : entities) {
        String entityId = entity.getId();
        if (entityLinks.containsKey(entityId)) {
            String reference = entityLinks.get(entityId).split("/")[0];
            String confidence = entityLinks.get(entityId).split("/")[1];
            String ref2 = reference;
            reference = "http://" + language + ".wikipedia.org/wiki/" + reference;
            ExternalRef externalRef = kaf.newExternalRef(resourceExternalRef, reference);
            externalRef.setConfidence(Float.parseFloat(confidence));
            externalRef.setSource(language);
            externalRef.setReftype(language);
            entity.addExternalRef(externalRef);
            if (cross) {
                String mappingRef = getMappingRef(reference);
                if (mappingRef != null) {
                    ExternalRef enRef = kaf.newExternalRef(this.resourceMapping, mappingRef);
                    enRef.setConfidence(Float.parseFloat(confidence));
                    enRef.setSource(language);
                    enRef.setReftype("en");
                    entity.addExternalRef(enRef);
                }
            }
        } else { // UKB didn't assign any link to this entity. Try with MFS
            String cmdMfs = "perl " + scripts + "/mfs.pl -d " + wikiDb;
            Process pMfs = Runtime.getRuntime().exec(cmdMfs);

            String entityLemma = "";
            List<Term> entityTerms = entity.getTerms();
            for (Term term : entityTerms) {
                if (!entityLemma.equals("")) {
                    entityLemma += "_";
                }
                entityLemma += term.getLemma().toLowerCase();
            }

            OutputStream stdinMfs = pMfs.getOutputStream();
            stdinMfs.write(entityLemma.getBytes());
            stdinMfs.flush();
            stdinMfs.close();

            String outputMfs = "";
            BufferedReader outputMfsStream = new BufferedReader(
                    new InputStreamReader(pMfs.getInputStream(), "UTF-8"));
            outputMfs = outputMfsStream.readLine();
            outputMfsStream.close();

            String errorMfs = "";
            BufferedReader errorMfsStream = new BufferedReader(new InputStreamReader(pMfs.getErrorStream()));
            while ((errorMfs = errorMfsStream.readLine()) != null) {
                System.err.println("MFS ERROR: " + errorMfs);
            }
            errorMfsStream.close();

            pMfs.waitFor();
            if (!outputMfs.equals("NILL")) {
                String reference = outputMfs;
                String confidence = "1";
                reference = "http://" + language + ".wikipedia.org/wiki/" + reference;
                ExternalRef externalRef = kaf.newExternalRef("MFS_" + resourceExternalRef, reference);
                externalRef.setConfidence(Float.parseFloat(confidence));
                externalRef.setSource(language);
                externalRef.setReftype(language);
                entity.addExternalRef(externalRef);
                if (cross) {
                    String mappingRef = getMappingRef(reference);
                    if (mappingRef != null) {
                        ExternalRef enRef = kaf.newExternalRef(this.resourceMapping, mappingRef);
                        enRef.setConfidence(Float.parseFloat(confidence));
                        enRef.setSource(language);
                        enRef.setReftype("en");
                        entity.addExternalRef(enRef);
                    }
                }

            }
        }
    }

}

From source file:seava.j4e.web.controller.ui.extjs.DependencyLoader.java

public void packFrameCmp(String bundle, String name, File file) throws Exception {

    List<String> list = new ArrayList<String>();
    this.resolveFrameDependencies(bundle, name, null, list, null);
    Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "UTF-8")); // ISO-8859-1
    try {//from   w  ww  .ja va2s . c  o  m
        for (String dep : list) {
            this.writeContentCmp(dep, writer);
        }
    } finally {
        writer.close();
    }
}

From source file:grails.converters.JSON.java

private void finalizeRender(Writer out) {
    try {//from   w w w  .j  a  v  a 2 s.  c  om
        out.flush();
        out.close();
    } catch (Exception e) {
        log.warn("Unexpected exception while closing a writer: " + e.getMessage());
    }
}

From source file:seava.j4e.web.controller.ui.extjs.DependencyLoader.java

/**
 * Pack translation files for the frame components.
 * //from   www  .jav a  2 s. c om
 * @param bundle
 * @param name
 * @param language
 * @param file
 * @throws Exception
 */
public void packFrameTrl(String bundle, String name, String language, File file) throws Exception {
    List<String> list = new ArrayList<String>();
    this.resolveFrameDependencies(bundle, name, language, null, list);
    Writer writer = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(file), "ISO-8859-1"));
    try {
        for (String dep : list) {
            this.writeContentTrl(dep, language, writer);
        }
    } finally {
        writer.close();
    }
}

From source file:org.callimachusproject.repository.CalliRepository.java

public void storeBlob(String uri, String content) throws OpenRDFException, IOException {
    ObjectConnection conn = getConnection();
    try {/*from   ww w  .  j a  va  2  s.c  om*/
        logger.warn("Replacing {}", uri);
        Writer writer = conn.getBlobObject(uri).openWriter();
        try {
            writer.write(content);
        } finally {
            writer.close();
        }
    } finally {
        conn.close();
    }
}

From source file:eu.gyza.eap.eapsocialontology.ontology.SocialOntology.java

public void save() {
    String fileName = "new_" + ontologyResource;
    // FileWriter out = null;
    Writer out2 = null;
    try {/*w ww  .  j a va 2s . c o  m*/
        //  out = new FileWriter( fileName );
        out2 = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(fileName), "UTF-8"));
    } catch (IOException ex) {
        Logger.getLogger(TwitterOntologyController.class.getName()).log(Level.SEVERE, null, ex);
    }
    try {
        infOntModel.write(out2, "RDF/XML");
    } finally {
        try {
            out2.close();
        } catch (IOException closeException) {
            // ignore
        }
    }

}