Example usage for java.io StringWriter close

List of usage examples for java.io StringWriter close

Introduction

In this page you can find the example usage for java.io StringWriter close.

Prototype

public void close() throws IOException 

Source Link

Document

Closing a StringWriter has no effect.

Usage

From source file:de.tu_dortmund.ub.data.dswarm.Transform.java

/**
 * configuration and processing of the task
 *
 * @param inputDataModelID//from  w  w w.j a v a 2s .c  om
 * @param projectIDs
 * @param outputDataModelID
 * @return
 */
private String executeTask(final String inputDataModelID, final Collection<String> projectIDs,
        final String outputDataModelID, final String serviceName, final String engineDswarmAPI,
        final Optional<Boolean> optionalDoIngestOnTheFly, final Optional<Boolean> optionalDoExportOnTheFly,
        final Optional<String> optionalExportMimeType, final Optional<String> optionalExportFileExtension)
        throws Exception {

    final JsonArray mappings = getMappingsFromProjects(projectIDs, serviceName, engineDswarmAPI);
    final JsonObject inputDataModel = getDataModel(inputDataModelID, serviceName, engineDswarmAPI);
    final JsonObject outputDataModel = getDataModel(outputDataModelID, serviceName, engineDswarmAPI);
    final Optional<JsonObject> optionalSkipFilter = getSkipFilter(serviceName, engineDswarmAPI);

    // erzeuge Task-JSON
    final String persistString = config.getProperty(TPUStatics.PERSIST_IN_DMP_IDENTIFIER);

    final boolean persist;

    if (persistString != null && !persistString.trim().isEmpty()) {

        persist = Boolean.valueOf(persistString);
    } else {

        // default is false
        persist = false;
    }

    final StringWriter stringWriter = new StringWriter();
    final JsonGenerator jp = Json.createGenerator(stringWriter);

    jp.writeStartObject();
    jp.write(DswarmBackendStatics.PERSIST_IDENTIFIER, persist);
    // default for now: true, i.e., no content will be returned
    jp.write(DswarmBackendStatics.DO_NOT_RETURN_DATA_IDENTIFIER, true);
    // default for now: true, i.e., if a schema is attached it will utilised (instead of being derived from the data resource)
    jp.write(DswarmBackendStatics.UTILISE_EXISTING_INPUT_IDENTIFIER, true);

    if (optionalDoIngestOnTheFly.isPresent()) {

        final Boolean doIngestOnTheFly = optionalDoIngestOnTheFly.get();

        if (doIngestOnTheFly) {

            LOG.info(String.format("[%s][%d] do ingest on-the-fly", serviceName, cnt));
        }

        jp.write(DswarmBackendStatics.DO_INGEST_ON_THE_FLY, doIngestOnTheFly);
    }

    if (optionalDoExportOnTheFly.isPresent()) {

        final Boolean doExportOnTheFly = optionalDoExportOnTheFly.get();

        if (doExportOnTheFly) {

            LOG.info(String.format("[%s][%d] do export on-the-fly", serviceName, cnt));
        }

        jp.write(DswarmBackendStatics.DO_EXPORT_ON_THE_FLY, doExportOnTheFly);
    }

    jp.write(DswarmBackendStatics.DO_VERSIONING_ON_RESULT_IDENTIFIER, false);

    // task
    jp.writeStartObject(DswarmBackendStatics.TASK_IDENTIFIER);
    jp.write(DswarmBackendStatics.NAME_IDENTIFIER, "Task Batch-Prozess 'CrossRef'");
    jp.write(DswarmBackendStatics.DESCRIPTION_IDENTIFIER,
            "Task Batch-Prozess 'CrossRef' zum InputDataModel 'inputDataModelID '");

    // job
    jp.writeStartObject(DswarmBackendStatics.JOB_IDENTIFIER);
    jp.write(DswarmBackendStatics.UUID_IDENTIFIER, UUID.randomUUID().toString());
    jp.write(DswarmBackendStatics.MAPPINGS_IDENTIFIER, mappings);

    if (optionalSkipFilter.isPresent()) {

        jp.write(DswarmBackendStatics.SKIP_FILTER_IDENTIFIER, optionalSkipFilter.get());
    }

    jp.writeEnd();

    jp.write(DswarmBackendStatics.INPUT_DATA_MODEL_IDENTIFIER, inputDataModel);
    jp.write(DswarmBackendStatics.OUTPUT_DATA_MODEL_IDENTIFIER, outputDataModel);

    // end task
    jp.writeEnd();

    // end request
    jp.writeEnd();

    jp.flush();
    jp.close();

    final String task = stringWriter.toString();
    stringWriter.flush();
    stringWriter.close();

    LOG.debug(String.format("[%s][%d] task : %s", serviceName, cnt, task));

    try (final CloseableHttpClient httpclient = HttpClients.createDefault()) {

        // POST /dmp/tasks/
        final HttpPost httpPost = new HttpPost(engineDswarmAPI + DswarmBackendStatics.TASKS_ENDPOINT);
        final StringEntity stringEntity = new StringEntity(task, ContentType.APPLICATION_JSON);
        stringEntity.setChunked(true);

        final String mimetype;

        if (optionalDoExportOnTheFly.isPresent() && optionalDoExportOnTheFly.get()) {

            if (optionalExportMimeType.isPresent()) {

                mimetype = optionalExportMimeType.get();
            } else {

                // default export mime type is XML
                mimetype = APIStatics.APPLICATION_XML_MIMETYPE;
            }
        } else {

            mimetype = APIStatics.APPLICATION_JSON_MIMETYPE;
        }

        httpPost.setHeader(HttpHeaders.ACCEPT, mimetype);
        //httpPost.setHeader(HttpHeaders.TRANSFER_ENCODING, CHUNKED_TRANSFER_ENCODING);

        httpPost.setEntity(stringEntity);

        final Header[] requestHeaders = httpPost.getAllHeaders();

        final String printedRequestHeaders = printHeaders(requestHeaders);

        LOG.info(String.format("[%s][%d] request : %s :: request headers : \n'%s' :: body : '%s'", serviceName,
                cnt, httpPost.getRequestLine(), printedRequestHeaders, stringEntity));

        try (final CloseableHttpResponse httpResponse = httpclient.execute(httpPost)) {

            final Header[] responseHeaders = httpResponse.getAllHeaders();

            final String printedResponseHeaders = printHeaders(responseHeaders);

            LOG.info(String.format("[%s][%d]  response headers : \n'%s'", serviceName, cnt,
                    printedResponseHeaders));

            final int statusCode = httpResponse.getStatusLine().getStatusCode();

            switch (statusCode) {

            case 204: {

                LOG.info(String.format("[%s][%d] %d : %s", serviceName, cnt, statusCode,
                        httpResponse.getStatusLine().getReasonPhrase()));

                EntityUtils.consume(httpResponse.getEntity());

                return "success";
            }
            case 200: {

                if (optionalDoExportOnTheFly.isPresent() && optionalDoExportOnTheFly.get()) {

                    LOG.info(String.format("[%s][%d] %d : %s", serviceName, cnt, statusCode,
                            httpResponse.getStatusLine().getReasonPhrase()));

                    final String exportFileExtension;

                    if (optionalExportFileExtension.isPresent()) {

                        exportFileExtension = optionalExportFileExtension.get();
                    } else {

                        // XML as default file ending
                        exportFileExtension = TaskProcessingUnit.XML_FILE_ENDING;
                    }

                    // write result to file
                    final String fileName = TPUUtil.writeResultToFile(httpResponse, config,
                            outputDataModelID + "-" + inputDataModelID + "-" + cnt, exportFileExtension);

                    return "success - exported XML to '" + fileName + "'";
                }
            }
            default: {

                LOG.error(String.format("[%s][%d] %d : %s", serviceName, cnt, statusCode,
                        httpResponse.getStatusLine().getReasonPhrase()));

                final String response = TPUUtil.getResponseMessage(httpResponse);

                throw new Exception("something went wrong at task execution" + response);
            }
            }
        }
    }
}

From source file:com.seajas.search.contender.service.modifier.ArchiveModifierService.java

/**
 * Handle the given archive's content by sending each entry to the given handler.
 * //from  w  ww.  ja  va 2s .com
 * @param archive
 * @param handler
 * @throws Exception
 */
public void handleArchive(final Archive archive, final ArchiveResultHandler handler) throws Exception {
    // Create a validating SAX parser

    final SAXParserFactory parserFactory = SAXParserFactory.newInstance();

    parserFactory.setValidating(true);
    parserFactory.setNamespaceAware(true);

    // Create a common transformer per thread

    Transformer transformer = null;

    try {
        transformer = transformerCache.getTransformer(archive.getId(), "archive",
                archive.getModificationDate());

        if (transformer == null)
            transformer = transformerCache.putContent(archive.getId(), "archive", archive.getModificationDate(),
                    archive.getTransformerContent());
    } catch (TransformerConfigurationException e) {
        logger.error("Unable to generate a (cached) transformer from the given content", e);

        return;
    } catch (TransformerFactoryConfigurationError e) {
        logger.error("Unable to generate a (cached) transformer from the given content", e);

        return;
    }

    // Store and process the files

    try {
        Map<File, String> storedResults = storeAndDecompressFiles(archive);

        List<String> deletedLinks = new ArrayList<String>();

        // Only process deletes when a deletion expression has been provided

        if (StringUtils.hasText(archive.getDeletionExpression())) {
            // Process all entries beforehand, so to exclude deletes from the final result

            for (Map.Entry<File, String> storedResult : storedResults.entrySet()) {
                File storedResultsFolder = storedResult.getKey();

                for (File entryLocation : storedResultsFolder.listFiles())
                    if (entryLocation.getName().matches(archive.getDeletionExpression())) {
                        String deleteLink = entryLocation.getName().replaceAll(archive.getDeletionExpression(),
                                archive.getInternalLink());

                        deletedLinks.add(deleteLink);

                        // Delete the actual link

                        cacheService
                                .addDeleted(new CacheService.DeletedEntry(archive.getCollection(), deleteLink));
                    }
            }
        }

        // Now process the stored results themselves

        for (Map.Entry<File, String> storedResult : storedResults.entrySet()) {
            File storedResultsFolder = storedResult.getKey();

            // Create the descriptions folder if it doesn't already exist

            File descriptionsFolderLocation = new File(storedResultsFolder, "descriptions");

            if (!descriptionsFolderLocation.exists())
                descriptionsFolderLocation.mkdirs();

            for (File entryLocation : storedResultsFolder.listFiles()) {
                if (entryLocation.isDirectory()) {
                    if (!entryLocation.getName().equals("descriptions"))
                        logger.warn("Unknown folder '" + entryLocation.getName()
                                + "'found in decompressed archive folder '"
                                + storedResultsFolder.getAbsolutePath() + "'");

                    continue;
                } else if (StringUtils.hasText(archive.getDeletionExpression())
                        && entryLocation.getName().matches(archive.getDeletionExpression()))
                    continue;

                InputStream transformationInputStream = null;

                try {
                    transformationInputStream = new BufferedInputStream(new FileInputStream(entryLocation));

                    // Now determine the content type and create a reader in case of structured content

                    MediaType entryMediaType = autoDetectParser.getDetector().detect(transformationInputStream,
                            new Metadata());

                    if (!(entryMediaType.getSubtype().equals("xml")
                            || entryMediaType.getSubtype().endsWith("+xml"))) {
                        logger.warn("Archive entry " + entryLocation.getAbsolutePath() + " contains "
                                + entryMediaType + " data which is unstructured, ignoring");

                        continue;
                    }
                } catch (IOException e) {
                    logger.error("Could not close input stream during archive processing", e);

                    if (transformationInputStream != null)
                        transformationInputStream.close();

                    continue;
                }

                // Process it as (semi-)structured content

                XmlReader transformationReader = new XmlReader(transformationInputStream, true);
                StringWriter transformationWriter = new StringWriter();

                try {
                    transformer.setOutputProperty(OutputKeys.ENCODING, "UTF8");

                    // Use a SAX reader for entity resolution

                    XMLReader xmlReader = parserFactory.newSAXParser().getXMLReader();
                    InputSource inputSource = new InputSource(transformationReader);

                    xmlReader.setEntityResolver(transformerCache.getEntityResolver());
                    inputSource.setSystemId("file://" + transformerCache.getDtdImportPath() + "/template.xsl");

                    // Perform the actual transformation

                    transformer.setParameter("substituteUrl", archive.getInternalLink());
                    transformer.transform(new SAXSource(xmlReader, inputSource),
                            new StreamResult(transformationWriter));
                } catch (TransformerException e) {
                    logger.error("Unable to perform content transformation for entry "
                            + entryLocation.getAbsolutePath());

                    continue;
                } catch (SAXException e) {
                    logger.error("Unable to perform content transformation for entry "
                            + entryLocation.getAbsolutePath());

                    continue;
                } catch (ParserConfigurationException e) {
                    logger.error("Unable to perform content transformation for entry "
                            + entryLocation.getAbsolutePath());

                    continue;
                } finally {
                    transformationInputStream.close();
                    transformationReader.close();
                }

                // Create a syndication feed from the given result

                String resultContent = transformationWriter.toString();

                SyndFeed resultFeed = null;

                try {
                    SyndFeedInput feedInput = new SyndFeedInput();

                    resultFeed = feedInput.build(new StringReader(resultContent));
                } catch (FeedException e) {
                    logger.error("Could not parse the feed resulting from the archive entry transformation");

                    continue;
                } finally {
                    transformationWriter.close();
                }

                // Write the <description> content to a separate file and add it as an <enclosure>

                if (resultFeed.getEntries().size() > 0) {
                    Integer entryNumber = 0;

                    for (SyndEntry feedEntry : (Collection<SyndEntry>) resultFeed.getEntries()) {
                        if (!deletedLinks.contains(feedEntry.getLink())) {
                            String description = feedEntry.getDescription().getValue().trim();

                            File descriptionLocation = new File(descriptionsFolderLocation,
                                    stripExtension(entryLocation.getName()) + "-" + entryNumber++
                                            + (feedEntry.getDescription().getType().equals("text/html")
                                                    ? ".html"
                                                    : ".xml"));

                            Writer descriptionWriter = new OutputStreamWriter(
                                    new FileOutputStream(descriptionLocation), "UTF-8");

                            if (!description.endsWith("</html>"))
                                descriptionWriter.write("<html>\n<head>\n\t<title>" + feedEntry.getTitle()
                                        + "</title>\n</head>\n<body>\n");
                            descriptionWriter.write(description);
                            if (!description.endsWith("</html>"))
                                descriptionWriter.write("\n</body>\n</html>");

                            descriptionWriter.flush();
                            descriptionWriter.close();

                            // Remove the link from the processed cache should it already be in there, taking care of updates

                            cacheService.deleteElement(feedEntry.getLink());

                            // Then offer it up to the handler

                            if (logger.isDebugEnabled())
                                logger.debug("Adding result content (" + entryNumber
                                        + ") for archive entry with path " + entryLocation.getAbsolutePath());

                            try {
                                // NOTE: The encoding of 'UTF-8' is implied for archive-related files

                                handler.process(new URI(feedEntry.getLink()), archive.getHostname(), feedEntry,
                                        resultFeed);
                            } catch (FeedException e) {
                                logger.error(String.format(
                                        "Could not offer feed entry with link '%s' - invalid entry",
                                        feedEntry.getLink()), e);
                            } catch (URISyntaxException e) {
                                logger.error(String.format(
                                        "Could not offer feed entry with link '%s' - invalid link",
                                        feedEntry.getLink()), e);
                            }
                        } else
                            logger.info("Skipping over feed entry with link '" + feedEntry.getLink()
                                    + "' - marked for deletion");
                    }
                } else if (logger.isDebugEnabled())
                    logger.debug("No entries were found in archive entry with path "
                            + entryLocation.getAbsolutePath());
            }

            logger.info("Finished processing archive with name " + storedResult.getValue());

            // Now archive the entry in the cache

            cacheService.addArchived(storedResult.getValue());
        }

        logger.info("Finishing archive populator thread");
    } catch (IOException e) {
        logger.error("Could not close input stream during archive processing", e);
    }
}

From source file:de.intranda.goobi.plugins.CSICMixedImport.java

private List<Record> retrieveOriginalRecordsFromFile() {

    List<Record> recordList = new ArrayList<Record>();
    HashMap<String, String> filteredRecordStrings = new HashMap<String, String>();
    int count = 0;

    if (importFile.getName().endsWith("zip")) {
        logger.info("Extracting zip archive");
        HashMap<String, byte[]> recordStrings = unzipFile(importFile);
        // Check all records first to see what's inside
        for (String key : recordStrings.keySet()) {
            byte[] bytes = recordStrings.get(key);
            InputStream bais = null;
            if (key.endsWith(".zip")) {
                // Another zip-archive
                logger.debug("Extracting inner archive " + key + "; size = " + bytes.length + " bytes");
                try {
                    bais = new ByteArrayInputStream(bytes);
                    // FileOutputStream fout = new FileOutputStream(new File(importFolder, "temp.zip"));
                    // fout.write(bytes);
                    // fout.close();
                    HashMap<String, String> tempRecords = unzipStream(bais);
                    filteredRecordStrings.putAll(tempRecords);
                } catch (Exception e) {
                    logger.error("Unable to read inner zip file");
                } finally {
                    if (bais != null) {
                        try {
                            bais.close();
                        } catch (IOException e) {
                            logger.error(e);
                        }/*from w  ww.  j  av a 2 s . c  o  m*/
                    }
                }
            } else if (key.endsWith(".xml")) {
                BufferedReader br = null;
                // ByteArrayInputStream bais = null;
                InputStreamReader isr = null;
                try {
                    bais = new ByteArrayInputStream(bytes);
                    isr = new InputStreamReader(bais);
                    br = new BufferedReader(isr);
                    StringBuffer sb = new StringBuffer();
                    String line;
                    while ((line = br.readLine()) != null) {
                        sb.append(line).append("\n");
                    }
                    filteredRecordStrings.put(key, sb.toString());
                } catch (IOException e) {
                    logger.error("Unable to read METS file from zip-Archive");
                } finally {
                    try {
                        if (bais != null) {
                            bais.close();
                        }
                        if (isr != null) {
                            isr.close();
                        }
                        if (br != null) {
                            br.close();
                        }
                    } catch (IOException e) {
                        logger.error("Error closing String reader");
                    }
                }
            }
        }

        for (String key : filteredRecordStrings.keySet()) {
            if (!key.endsWith(".xml")) {
                continue;
            }
            String importFileName = key;
            String importData = filteredRecordStrings.get(key);
            logger.debug("Extracting record " + ++count);
            Record rec = new Record();
            // System.out.println("Data from Zip-File:\n " + importData);
            rec.setData(importData);
            logger.debug("Getting record " + importFileName);
            rec.setId(importFileName.substring(0, importFileName.indexOf(".")));
            recordList.add(rec);
        }
    } else {
        logger.info("Importing single record file");
        InputStream input = null;
        StringWriter writer = null;
        try {
            logger.debug("loaded file: " + importFile.getAbsolutePath());
            String importFileName = importFile.getName();
            input = new FileInputStream(importFile);
            Record rec = new Record();
            writer = new StringWriter();
            IOUtils.copy(input, writer, encoding);
            rec.setData(writer.toString());
            rec.setId(importFileName.substring(0, importFileName.indexOf(".")));
            recordList.add(rec);
        } catch (FileNotFoundException e) {
            logger.error(e.getMessage(), e);
        } catch (IOException e) {
            logger.error(e.getMessage(), e);
        } finally {
            if (input != null) {
                try {
                    if (writer != null)
                        writer.close();
                    input.close();
                } catch (IOException e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }
    }
    return recordList;

}

From source file:org.apache.solr.handler.extraction.ExtractingDocumentLoader.java

@Override
public void load(SolrQueryRequest req, SolrQueryResponse rsp, ContentStream stream,
        UpdateRequestProcessor processor) throws Exception {
    Parser parser = null;/* w  w  w  .  ja v a2 s  . c  om*/
    String streamType = req.getParams().get(ExtractingParams.STREAM_TYPE, null);
    if (streamType != null) {
        //Cache?  Parsers are lightweight to construct and thread-safe, so I'm told
        MediaType mt = MediaType.parse(streamType.trim().toLowerCase(Locale.ROOT));
        parser = new DefaultParser(config.getMediaTypeRegistry()).getParsers().get(mt);
    } else {
        parser = autoDetectParser;
    }
    if (parser != null) {
        Metadata metadata = new Metadata();

        // If you specify the resource name (the filename, roughly) with this parameter,
        // then Tika can make use of it in guessing the appropriate MIME type:
        String resourceName = req.getParams().get(ExtractingParams.RESOURCE_NAME, null);
        if (resourceName != null) {
            metadata.add(TikaMetadataKeys.RESOURCE_NAME_KEY, resourceName);
        }
        // Provide stream's content type as hint for auto detection
        if (stream.getContentType() != null) {
            metadata.add(HttpHeaders.CONTENT_TYPE, stream.getContentType());
        }

        InputStream inputStream = null;
        try {
            inputStream = stream.getStream();
            metadata.add(ExtractingMetadataConstants.STREAM_NAME, stream.getName());
            metadata.add(ExtractingMetadataConstants.STREAM_SOURCE_INFO, stream.getSourceInfo());
            metadata.add(ExtractingMetadataConstants.STREAM_SIZE, String.valueOf(stream.getSize()));
            metadata.add(ExtractingMetadataConstants.STREAM_CONTENT_TYPE, stream.getContentType());
            // HtmlParser and TXTParser regard Metadata.CONTENT_ENCODING in metadata
            String charset = ContentStreamBase.getCharsetFromContentType(stream.getContentType());
            if (charset != null) {
                metadata.add(HttpHeaders.CONTENT_ENCODING, charset);
            }

            String xpathExpr = params.get(ExtractingParams.XPATH_EXPRESSION);
            boolean extractOnly = params.getBool(ExtractingParams.EXTRACT_ONLY, false);
            SolrContentHandler handler = factory.createSolrContentHandler(metadata, params, req.getSchema());
            ContentHandler parsingHandler = handler;

            StringWriter writer = null;
            BaseMarkupSerializer serializer = null;
            if (extractOnly == true) {
                String extractFormat = params.get(ExtractingParams.EXTRACT_FORMAT, "xml");
                writer = new StringWriter();
                if (extractFormat.equals(TEXT_FORMAT)) {
                    serializer = new TextSerializer();
                    serializer.setOutputCharStream(writer);
                    serializer.setOutputFormat(new OutputFormat("Text", "UTF-8", true));
                } else {
                    serializer = new XMLSerializer(writer, new OutputFormat("XML", "UTF-8", true));
                }
                if (xpathExpr != null) {
                    Matcher matcher = PARSER.parse(xpathExpr);
                    serializer.startDocument();//The MatchingContentHandler does not invoke startDocument.  See http://tika.markmail.org/message/kknu3hw7argwiqin
                    parsingHandler = new MatchingContentHandler(serializer, matcher);
                } else {
                    parsingHandler = serializer;
                }
            } else if (xpathExpr != null) {
                Matcher matcher = PARSER.parse(xpathExpr);
                parsingHandler = new MatchingContentHandler(handler, matcher);
            } //else leave it as is

            try {
                //potentially use a wrapper handler for parsing, but we still need the SolrContentHandler for getting the document.
                ParseContext context = parseContextConfig.create();

                context.set(Parser.class, parser);
                context.set(HtmlMapper.class, MostlyPassthroughHtmlMapper.INSTANCE);

                // Password handling
                RegexRulesPasswordProvider epp = new RegexRulesPasswordProvider();
                String pwMapFile = params.get(ExtractingParams.PASSWORD_MAP_FILE);
                if (pwMapFile != null && pwMapFile.length() > 0) {
                    InputStream is = req.getCore().getResourceLoader().openResource(pwMapFile);
                    if (is != null) {
                        log.debug("Password file supplied: " + pwMapFile);
                        epp.parse(is);
                    }
                }
                context.set(PasswordProvider.class, epp);
                String resourcePassword = params.get(ExtractingParams.RESOURCE_PASSWORD);
                if (resourcePassword != null) {
                    epp.setExplicitPassword(resourcePassword);
                    log.debug("Literal password supplied for file " + resourceName);
                }
                parser.parse(inputStream, parsingHandler, metadata, context);
            } catch (TikaException e) {
                if (ignoreTikaException)
                    log.warn(new StringBuilder("skip extracting text due to ").append(e.getLocalizedMessage())
                            .append(". metadata=").append(metadata.toString()).toString());
                else
                    throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
            }
            if (extractOnly == false) {
                addDoc(handler);
            } else {
                //serializer is not null, so we need to call endDoc on it if using xpath
                if (xpathExpr != null) {
                    serializer.endDocument();
                }
                rsp.add(stream.getName(), writer.toString());
                writer.close();
                String[] names = metadata.names();
                NamedList metadataNL = new NamedList();
                for (int i = 0; i < names.length; i++) {
                    String[] vals = metadata.getValues(names[i]);
                    metadataNL.add(names[i], vals);
                }
                rsp.add(stream.getName() + "_metadata", metadataNL);
            }
        } catch (SAXException e) {
            throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
        } finally {
            IOUtils.closeQuietly(inputStream);
        }
    } else {
        throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
                "Stream type of " + streamType + " didn't match any known parsers.  Please supply the "
                        + ExtractingParams.STREAM_TYPE + " parameter.");
    }
}

From source file:com.beligum.core.utils.AssetPacker.java

private static String minify(File file) {
    String retVal = "";

    StringReader in = null;//from ww  w . j a  v a  2s . c o m
    StringWriter out = null;
    try {
        retVal = FileUtils.readFileToString(file);

        /*
         * Pre-processing
         */

        //remove 'special comments' so they get removed anyway (for libraries  la bootstrap, jquery,...)
        retVal = retVal.replace("/*!", "/*");

        in = new StringReader(retVal);
        out = new StringWriter();

        if (file != null && file.exists()) {

            if (file.getAbsolutePath().endsWith(".js")) {
                JavaScriptCompressor compressor = new JavaScriptCompressor(in, new ErrorReporter() {
                    public void warning(String message, String sourceName, int line, String lineSource,
                            int lineOffset) {
                        Logger.warn(message);
                    }

                    public void error(String message, String sourceName, int line, String lineSource,
                            int lineOffset) {
                        Logger.error(message);
                    }

                    public EvaluatorException runtimeError(String message, String sourceName, int line,
                            String lineSource, int lineOffset) {
                        error(message, sourceName, line, lineSource, lineOffset);
                        return new EvaluatorException(message);
                    }
                });

                /*
                 * Display informational messages and warnings.
                 */
                boolean verbose = false;

                /*
                 * This minifies AND obfuscates local symbols, disable to minify only. 
                 */
                boolean munge = true;

                /* Preserve unnecessary semicolons (such as right before a '}') This option
                 * is useful when compressed code has to be run through JSLint (which is the
                 * case of YUI for example)
                 */
                boolean preserveAllSemiColons = false;

                /*
                 * Disable all the built-in micro optimizations.
                 */
                boolean disableOptimizations = false;

                compressor.compress(out, -1, munge, verbose, preserveAllSemiColons, disableOptimizations);
                retVal = out.toString();

                /*
                 * For Google Closure, switched to YuiCompressor cause it also provided css support
                 * 
                com.google.javascript.jscomp.Compiler compiler = new com.google.javascript.jscomp.Compiler();
                CompilerOptions options = new CompilerOptions();
                        
                CompilationLevel.SIMPLE_OPTIMIZATIONS.setOptionsForCompilationLevel(options);
                //WarningLevel.QUIET.setOptionsForWarningLevel(options);
                //compiler.setLoggingLevel(Level.ALL);
                        
                //options.setLanguageIn(CompilerOptions.LanguageMode.ECMASCRIPT5);
                //Processes goog.provide() and goog.require() calls
                //options.closurePass = true;
                        
                List<SourceFile> externs = new ArrayList<SourceFile>();
                List<SourceFile> inputs = new ArrayList<SourceFile>();
                inputs.add(SourceFile.fromFile(file));
                        
                com.google.javascript.jscomp.Result compileResult = compiler.compile(externs, inputs, options);
                if (compileResult.success) {
                retVal = compiler.toSource();
                }
                else {
                throw new Exception(compileResult.debugLog);
                }
                */
            } else if (file.getAbsolutePath().endsWith(".css")) {
                File minFile = new File(
                        file.getAbsolutePath().substring(0, file.getAbsolutePath().length() - ".css".length())
                                + ".min.css");
                //we always re-minify, just to be sure...
                CssCompressor cssCompressor = new CssCompressor(in);
                cssCompressor.compress(out, -1);
                retVal = out.toString();
            } else {
                throw new Exception("Can't minify this file; unknown source type.");
            }
        } else {
            Logger.error("Trying to minify a file that doesn't exist: " + file.getAbsolutePath());
        }
    } catch (Exception e) {
        Logger.warn("Error while minifying file " + file.getAbsolutePath(), e);
    } finally {
        if (in != null) {
            in.close();
        }
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
            }
        }
    }

    return retVal;
}

From source file:org.apache.hadoop.conf.TestConfiguration.java

public void testDumpConfiguration() throws IOException {
    StringWriter outWriter = new StringWriter();
    Configuration.dumpConfiguration(conf, outWriter);
    String jsonStr = outWriter.toString();
    ObjectMapper mapper = new ObjectMapper();
    JsonConfiguration jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
    int defaultLength = jconf.getProperties().length;

    // add 3 keys to the existing configuration properties
    out = new BufferedWriter(new FileWriter(CONFIG));
    startConfig();//from  www .j  a  va  2  s .c om
    appendProperty("test.key1", "value1");
    appendProperty("test.key2", "value2", true);
    appendProperty("test.key3", "value3");
    endConfig();
    Path fileResource = new Path(CONFIG);
    conf.addResource(fileResource);
    out.close();

    outWriter = new StringWriter();
    Configuration.dumpConfiguration(conf, outWriter);
    jsonStr = outWriter.toString();
    mapper = new ObjectMapper();
    jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
    int length = jconf.getProperties().length;
    // check for consistency in the number of properties parsed in Json format.
    assertEquals(length, defaultLength + 3);

    //change few keys in another resource file
    out = new BufferedWriter(new FileWriter(CONFIG2));
    startConfig();
    appendProperty("test.key1", "newValue1");
    appendProperty("test.key2", "newValue2");
    endConfig();
    Path fileResource1 = new Path(CONFIG2);
    conf.addResource(fileResource1);
    out.close();

    outWriter = new StringWriter();
    Configuration.dumpConfiguration(conf, outWriter);
    jsonStr = outWriter.toString();
    mapper = new ObjectMapper();
    jconf = mapper.readValue(jsonStr, JsonConfiguration.class);

    // put the keys and their corresponding attributes into a hashmap for their 
    // efficient retrieval
    HashMap<String, JsonProperty> confDump = new HashMap<String, JsonProperty>();
    for (JsonProperty prop : jconf.getProperties()) {
        confDump.put(prop.getKey(), prop);
    }
    // check if the value and resource of test.key1 is changed
    assertEquals("newValue1", confDump.get("test.key1").getValue());
    assertEquals(false, confDump.get("test.key1").getIsFinal());
    assertEquals(fileResource1.toString(), confDump.get("test.key1").getResource());
    // check if final parameter test.key2 is not changed, since it is first 
    // loaded as final parameter
    assertEquals("value2", confDump.get("test.key2").getValue());
    assertEquals(true, confDump.get("test.key2").getIsFinal());
    assertEquals(fileResource.toString(), confDump.get("test.key2").getResource());
    // check for other keys which are not modified later
    assertEquals("value3", confDump.get("test.key3").getValue());
    assertEquals(false, confDump.get("test.key3").getIsFinal());
    assertEquals(fileResource.toString(), confDump.get("test.key3").getResource());
    // check for resource to be "Unknown" for keys which are loaded using 'set' 
    // and expansion of properties
    conf.set("test.key4", "value4");
    conf.set("test.key5", "value5");
    conf.set("test.key6", "${test.key5}");
    outWriter = new StringWriter();
    Configuration.dumpConfiguration(conf, outWriter);
    jsonStr = outWriter.toString();
    mapper = new ObjectMapper();
    jconf = mapper.readValue(jsonStr, JsonConfiguration.class);
    confDump = new HashMap<String, JsonProperty>();
    for (JsonProperty prop : jconf.getProperties()) {
        confDump.put(prop.getKey(), prop);
    }
    assertEquals("value5", confDump.get("test.key6").getValue());
    assertEquals("programatically", confDump.get("test.key4").getResource());
    outWriter.close();
}

From source file:org.netbeans.nbbuild.MakeJnlp2.java

private void generateFiles() throws IOException, BuildException {

    final Set<String> declaredLocales = new HashSet<String>();

    final boolean useAllLocales;

    if ("*".equals(includelocales)) {
        useAllLocales = true;//ww w. j  a  v a 2  s. com
    } else if ("".equals(includelocales)) {
        useAllLocales = false;
    } else {
        useAllLocales = false;
        StringTokenizer tokenizer = new StringTokenizer(includelocales, ",");
        while (tokenizer.hasMoreElements()) {
            declaredLocales.add(tokenizer.nextToken());
        }
    }

    final Set<String> indirectFilePaths = new HashSet<String>();
    for (FileSet fs : new FileSet[] { indirectJars, indirectFiles }) {
        if (fs != null) {
            DirectoryScanner scan = fs.getDirectoryScanner(getProject());
            for (String f : scan.getIncludedFiles()) {
                indirectFilePaths.add(f.replace(File.pathSeparatorChar, '/'));
            }
        }
    }

    final ExecutorService executorService = Executors.newFixedThreadPool(nbThreads);

    final List<BuildException> exceptions = new ArrayList<BuildException>();

    for (final Iterator fileIt = files.iterator(); fileIt.hasNext();) {
        if (!exceptions.isEmpty()) {
            break;
        }

        final FileResource fr = (FileResource) fileIt.next();
        final File jar = fr.getFile();

        if (!jar.canRead()) {
            throw new BuildException("Cannot read file: " + jar);
        }

        //
        if (optimize && checkDuplicate(jar).isPresent()) {
            continue;
        }
        //

        executorService.execute(new Runnable() {
            @Override
            public void run() {
                JarFile theJar = null;
                try {
                    theJar = new JarFile(jar);

                    String codenamebase = JarWithModuleAttributes
                            .extractCodeName(theJar.getManifest().getMainAttributes());
                    if (codenamebase == null) {
                        throw new BuildException("Not a NetBeans Module: " + jar);
                    }
                    {
                        int slash = codenamebase.indexOf('/');
                        if (slash >= 0) {
                            codenamebase = codenamebase.substring(0, slash);
                        }
                    }
                    String dashcnb = codenamebase.replace('.', '-');

                    String title;
                    String oneline;
                    String shrt;
                    String osDep = null;

                    {
                        String bundle = theJar.getManifest().getMainAttributes()
                                .getValue("OpenIDE-Module-Localizing-Bundle");
                        Properties prop = new Properties();
                        if (bundle != null) {
                            ZipEntry en = theJar.getEntry(bundle);
                            if (en == null) {
                                throw new BuildException("Cannot find entry: " + bundle + " in file: " + jar);
                            }
                            InputStream is = theJar.getInputStream(en);
                            prop.load(is);
                            is.close();
                        }
                        title = prop.getProperty("OpenIDE-Module-Name", codenamebase);
                        oneline = prop.getProperty("OpenIDE-Module-Short-Description", title);
                        shrt = prop.getProperty("OpenIDE-Module-Long-Description", oneline);
                    }

                    {
                        String osMan = theJar.getManifest().getMainAttributes()
                                .getValue("OpenIDE-Module-Requires");
                        if (osMan != null) {
                            if (osMan.indexOf("org.openide.modules.os.MacOSX") >= 0) { // NOI18N
                                osDep = "Mac OS X"; // NOI18N
                            } else if (osMan.indexOf("org.openide.modules.os.Linux") >= 0) { // NOI18N
                                osDep = "Linux"; // NOI18N
                            } else if (osMan.indexOf("org.openide.modules.os.Solaris") >= 0) { // NOI18N
                                osDep = "Solaris"; // NOI18N
                            } else if (osMan.indexOf("org.openide.modules.os.Windows") >= 0) { // NOI18N
                                osDep = "Windows"; // NOI18N
                            }
                        }
                    }

                    Map<String, List<File>> localizedFiles = verifyExtensions(jar, theJar.getManifest(),
                            dashcnb, codenamebase, verify, indirectFilePaths);

                    executedLocales = localizedFiles.keySet();

                    new File(targetFile, dashcnb).mkdir();

                    File signed = new File(new File(targetFile, dashcnb), jar.getName());

                    // +p
                    final JarConfigResolved jarConfig = signOrCopy(jar, signed);

                    File jnlp = new File(targetFile, dashcnb + ".jnlp");
                    StringWriter writeJNLP = new StringWriter();
                    writeJNLP.write("<?xml version='1.0' encoding='UTF-8'?>\n");
                    writeJNLP.write(
                            "<!DOCTYPE jnlp PUBLIC \"-//Sun Microsystems, Inc//DTD JNLP Descriptor 6.0//EN\" \"http://java.sun.com/dtd/JNLP-6.0.dtd\">\n");
                    writeJNLP.write("<jnlp spec='1.0+' codebase='" + codebase + "'>\n");
                    writeJNLP.write("  <information>\n");
                    writeJNLP.write("   <title>" + XMLUtil.toElementContent(title) + "</title>\n");
                    writeJNLP.write("   <vendor>NetBeans</vendor>\n");
                    writeJNLP.write("   <description kind='one-line'>" + XMLUtil.toElementContent(oneline)
                            + "</description>\n");
                    writeJNLP.write("   <description kind='short'>" + XMLUtil.toElementContent(shrt)
                            + "</description>\n");
                    writeJNLP.write("  </information>\n");

                    String realPermissions = permissions;
                    if ((jarConfig != null) && (jarConfig.getExtraManifestAttributes() != null)) {
                        String jarPermissions = jarConfig.getExtraManifestAttributes().getValue("Permissions");

                        if (jarPermissions != null) {
                            if ("all-permissions".equals(jarPermissions)) {
                                realPermissions = "<security><all-permissions/></security>\n";
                            } else {
                                realPermissions = "";
                            }
                        }
                    }

                    writeJNLP.write(realPermissions);

                    if (osDep == null) {
                        writeJNLP.write("  <resources>\n");
                    } else {
                        writeJNLP.write("  <resources os='" + osDep + "'>\n");
                    }
                    writeJNLP.write("<property name=\"jnlp.packEnabled\" value=\"" + String.valueOf(pack200)
                            + "\"/>\n");
                    writeJNLP.write(constructJarHref(jar, dashcnb));

                    processExtensions(jar, theJar.getManifest(), writeJNLP, dashcnb, codebase, realPermissions);
                    processIndirectJars(writeJNLP, dashcnb);
                    processIndirectFiles(writeJNLP, dashcnb);

                    writeJNLP.write("  </resources>\n");

                    if (useAllLocales || !declaredLocales.isEmpty()) {

                        // write down locales
                        for (Map.Entry<String, List<File>> e : localizedFiles.entrySet()) {
                            final String locale = e.getKey();

                            if (!declaredLocales.isEmpty() && !declaredLocales.contains(locale)) {
                                continue;
                            }

                            final List<File> allFiles = e.getValue();

                            writeJNLP.write("  <resources locale='" + locale + "'>\n");

                            for (File n : allFiles) {
                                log("generating locale " + locale + " for " + n, Project.MSG_VERBOSE);
                                String name = n.getName();
                                String clusterRootPrefix = jar.getParent() + File.separatorChar;
                                String absname = n.getAbsolutePath();
                                if (absname.startsWith(clusterRootPrefix)) {
                                    name = absname.substring(clusterRootPrefix.length())
                                            .replace(File.separatorChar, '-');
                                }
                                File t = new File(new File(targetFile, dashcnb), name);
                                signOrCopy(n, t);
                                writeJNLP.write(constructJarHref(n, dashcnb, name));
                            }

                            writeJNLP.write("  </resources>\n");

                        }
                    }

                    writeJNLP.write("  <component-desc/>\n");
                    writeJNLP.write("</jnlp>\n");
                    writeJNLP.close();

                    // +p
                    Files.write(writeJNLP.toString(), jnlp, Charset.forName("UTF-8"));
                } catch (Exception e) {
                    exceptions.add(new BuildException(e));
                } finally {
                    if (theJar != null) {
                        try {
                            theJar.close();
                        } catch (IOException e) {
                        }
                    }
                }
            }
        });
    }

    executorService.shutdown();

    try {
        executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
    } catch (Exception e) {
        throw new BuildException(e);
    }

    if (!exceptions.isEmpty()) {
        throw exceptions.get(0);
    }
}

From source file:com.aimluck.eip.schedule.util.ScheduleUtils.java

/**
 * ???????//www  .j a va 2s  .c om
 *
 * @return
 */
public static String createReminderMsgForCellPhone(EipTSchedule schedule, List<ALEipUser> memberList,
        int destUserID) {
    String date_detail = "";
    try {
        date_detail = getMsgDate(schedule);
    } catch (Exception e) {
        return "";
    }

    StringWriter out = null;
    try {
        VelocityService service = (VelocityService) ((TurbineServices) TurbineServices.getInstance())
                .getService(VelocityService.SERVICE_NAME);
        Context context = service.getContext();

        context.put("titleValue", schedule.getName().toString());
        context.put("dateValue", date_detail);

        if (memberList != null) {
            int size = memberList.size();
            int i;
            StringBuffer body = new StringBuffer("");
            for (i = 0; i < size; i++) {
                if (i != 0) {
                    body.append(", ");
                }
                ALEipUser member = memberList.get(i);
                body.append(member.getAliasName());
            }
            context.put("menbersList", body.toString());
        }

        ALEipUser destUser;
        try {
            destUser = ALEipUtils.getALEipUser(destUserID);
        } catch (ALDBErrorException ex) {
            logger.error("schedule", ex);
            return "";
        }

        context.put("Alias", ALOrgUtilsService.getAlias());

        context.put("globalUrl1",
                ALMailUtils.getGlobalurl() + "?key=" + ALCellularUtils.getCellularKey(destUser));

        out = new StringWriter();
        service.handleRequest(context, "mail/scheduleReminder.vm", out);
        out.flush();
        return out.toString();
    } catch (IllegalArgumentException e) {

    } catch (RuntimeException e) {
        String message = e.getMessage();
        logger.warn(message, e);
        e.printStackTrace();
    } catch (Exception e) {
        String message = e.getMessage();
        logger.warn(message, e);
        e.printStackTrace();
    } finally {
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }
    return null;
}

From source file:com.aimluck.eip.schedule.util.ScheduleUtils.java

/**
 * ??????/*from  w  w  w.  j a  v  a2s. c  o m*/
 *
 * @return
 */
public static String createReminderMsgForPc(EipTSchedule schedule, List<ALEipUser> memberList) {
    boolean enableAsp = JetspeedResources.getBoolean("aipo.asp", false);

    String date_detail = "";

    try {

        date_detail = getMsgDate(schedule);
    } catch (Exception e) {
        return "";
    }

    StringWriter out = null;
    try {
        VelocityService service = (VelocityService) ((TurbineServices) TurbineServices.getInstance())
                .getService(VelocityService.SERVICE_NAME);
        Context context = service.getContext();

        context.put("titleValue", schedule.getName().toString());
        context.put("dateValue", date_detail);

        if (schedule.getPlace().toString().length() > 0) {
            context.put("placeValue", schedule.getPlace().toString());
        }

        if (schedule.getNote().toString().length() > 0) {
            context.put("noteValue", schedule.getNote().toString());
        }

        if (memberList != null) {
            int size = memberList.size();
            int i;
            StringBuffer body = new StringBuffer("");
            for (i = 0; i < size; i++) {
                if (i != 0) {
                    body.append(", ");
                }
                ALEipUser member = memberList.get(i);
                body.append(member.getAliasName());
            }
            context.put("menbersList", body.toString());
        }

        context.put("Alias", ALOrgUtilsService.getAlias());

        if (enableAsp) {
            context.put("globalUrl1", ALMailUtils.getGlobalurl());
        } else {
            context.put("globalurl2", ALMailUtils.getGlobalurl());
            context.put("globalUrl3", ALMailUtils.getLocalurl());
        }

        out = new StringWriter();
        service.handleRequest(context, "mail/scheduleReminder.vm", out);
        out.flush();
        return out.toString();
    } catch (IllegalArgumentException e) {

    } catch (Exception e) {
        String message = e.getMessage();
        logger.warn(message, e);
        e.printStackTrace();
    } finally {
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }
    return null;
}

From source file:com.aimluck.eip.schedule.util.ScheduleUtils.java

public static String createReminderMsgForMessage(EipTSchedule schedule, List<ALEipUser> memberList) {
    boolean enableAsp = JetspeedResources.getBoolean("aipo.asp", false);

    String date_detail = "";

    try {//ww w.  j  a  v a 2s .c o m

        date_detail = getMsgDate(schedule);
    } catch (Exception e) {
        return "";
    }

    StringWriter out = null;
    try {
        VelocityService service = (VelocityService) ((TurbineServices) TurbineServices.getInstance())
                .getService(VelocityService.SERVICE_NAME);
        Context context = service.getContext();

        context.put("titleValue", schedule.getName().toString());
        context.put("dateValue", date_detail);

        if (schedule.getPlace().toString().length() > 0) {
            context.put("placeValue", schedule.getPlace().toString());
        }

        if (schedule.getNote().toString().length() > 0) {
            context.put("noteValue", schedule.getNote().toString());
        }

        if (memberList != null) {
            int size = memberList.size();
            int i;
            StringBuffer body = new StringBuffer("");
            for (i = 0; i < size; i++) {
                if (i != 0) {
                    body.append(", ");
                }
                ALEipUser member = memberList.get(i);
                body.append(member.getAliasName());
            }
            context.put("menbersList", body.toString());
        }

        context.put("Alias", ALOrgUtilsService.getAlias());

        if (enableAsp) {
            context.put("globalUrl1", ALMailUtils.getGlobalurl());
        } else {
            context.put("globalurl2", ALMailUtils.getGlobalurl());
            context.put("globalUrl3", ALMailUtils.getLocalurl());
        }

        out = new StringWriter();
        service.handleRequest(context, "mail/scheduleReminderMessage.vm", out);
        out.flush();
        return out.toString();
    } catch (IllegalArgumentException e) {

    } catch (Exception e) {
        String message = e.getMessage();
        logger.warn(message, e);
        e.printStackTrace();
    } finally {
        if (out != null) {
            try {
                out.close();
            } catch (IOException e) {
                // ignore
            }
        }
    }
    return null;

}