Example usage for java.io OutputStream getClass

List of usage examples for java.io OutputStream getClass

Introduction

In this page you can find the example usage for java.io OutputStream getClass.

Prototype

@HotSpotIntrinsicCandidate
public final native Class<?> getClass();

Source Link

Document

Returns the runtime class of this Object .

Usage

From source file:de.mpg.escidoc.services.exportmanager.Export.java

/**
 * Walk around the itemList XML, fetch all files from components via URIs
 * and put them into the archive {@link OutputStream} aos
 * /*from ww  w.j av  a 2s.  co m*/
 * @param aos
 *            - array {@link OutputStream}
 * @param itemList
 *            - XML with the files to be fetched, see NS:
 *            http://www.escidoc.de/schemas/components/0.7
 * @throws ExportManagerException
 */
private void fetchComponentsDo(OutputStream aos, String itemList) throws ExportManagerException {
    Document doc = parseDocument(itemList);
    NodeIterator ni = getFilteredNodes(new ComponentNodeFilter(), doc);

    // login only once
    String userHandle;
    try {
        userHandle = AdminHelper.loginUser(USER_ID, PASSWORD);
    } catch (Exception e) {
        throw new ExportManagerException("Cannot login", e);
    }

    String fileName;
    Node n;
    while ((n = ni.nextNode()) != null) {

        Element componentElement = (Element) n;
        NodeList nl = componentElement.getElementsByTagNameNS(COMPONENTS_NS, "content");
        Element contentElement = (Element) nl.item(0);
        if (contentElement == null) {
            throw new ExportManagerException(
                    "Wrong item XML: {" + COMPONENTS_NS + "}component element doesn't contain content element. "
                            + "Component id: " + componentElement.getAttributeNS(XLINK_NS, "href"));
        }
        String href = contentElement.getAttributeNS(XLINK_NS, "href");
        String storageStatus = contentElement.getAttribute("storage");

        // get file name
        if ("internal-managed".equals(storageStatus)) {
            NodeIterator nif = ((DocumentTraversal) doc).createNodeIterator(componentElement,
                    NodeFilter.SHOW_ELEMENT, new FileNameNodeFilter(), true);
            Node nf;

            if ((nf = nif.nextNode()) != null) {
                fileName = ((Element) nf).getTextContent();

                // names of files for
                Matcher m = Pattern.compile("^([\\w.]+?)(\\s+|$)", Pattern.CASE_INSENSITIVE | Pattern.DOTALL)
                        .matcher(fileName);
                m.find();
                fileName = m.group(1);
            } else {
                throw new ExportManagerException("Missed file property: {" + COMPONENTS_NS
                        + "}component element doesn't contain file-name element (md-records/md-record/file:file/dc:title). "
                        + "Component id: " + componentElement.getAttributeNS(XLINK_NS, "href"));
            }
        }
        // TODO: the external-managed will be processed later
        else {
            throw new ExportManagerException("Missed internal-managed file in {" + COMPONENTS_NS
                    + "}component: components/component/content[@storage=\"internal-managed\"]"
                    + "Component id: " + componentElement.getAttributeNS(XLINK_NS, "href"));
        }

        logger.info("link to the content: " + href);
        logger.info("storage status: " + storageStatus);
        logger.info("fileName: " + fileName);

        // get file via URI
        String url;
        try {
            url = ServiceLocator.getFrameworkUrl() + href;
        } catch (Exception e) {
            throw new ExportManagerException("Cannot get framework url", e);
        }

        logger.info("url=" + url);
        GetMethod method = new GetMethod(url);

        method.setFollowRedirects(false);
        method.setRequestHeader("Cookie", "escidocCookie=" + userHandle);

        // Execute the method with HttpClient.
        HttpClient client = new HttpClient();
        try {
            ProxyHelper.executeMethod(client, method);
        } catch (Exception e) {
            throw new ExportManagerException("Cannot execute HttpMethod", e);
        }

        int status = method.getStatusCode();
        logger.info("Status=" + status);

        if (status != 200)
            fileName += ".error" + status;

        byte[] responseBody;
        try {
            responseBody = method.getResponseBody();
        } catch (Exception e) {

            throw new ExportManagerException("Cannot get Response Body", e);

        }
        InputStream bis = new BufferedInputStream(new ByteArrayInputStream(responseBody));

        if (aos instanceof ZipOutputStream) {
            ZipEntry ze = new ZipEntry(fileName);
            ze.setSize(responseBody.length);
            try {
                ((ZipOutputStream) aos).putNextEntry(ze);
                writeFromStreamToStream(bis, aos);
                ((ZipOutputStream) aos).closeEntry();
            } catch (Exception e) {
                throw new ExportManagerException("zip2stream generation problem", e);
            }

        } else if (aos instanceof TarOutputStream) {
            TarEntry te = new TarEntry(fileName);
            te.setSize(responseBody.length);
            try {
                ((TarOutputStream) aos).putNextEntry(te);
                writeFromStreamToStream(bis, aos);
                ((TarOutputStream) aos).closeEntry();
            } catch (Exception e) {
                throw new ExportManagerException("tar2stream generation problem", e);
            }
        } else {
            throw new ExportManagerException("Unsupported archive output stream: " + aos.getClass());
        }
        try {
            bis.close();
        } catch (Exception e) {
            throw new ExportManagerException("Cannot close InputStream", e);
        }
    }

}

From source file:org.apache.axiom.om.impl.MTOMXMLStreamWriter.java

/**
 * Creates a new MTOMXMLStreamWriter with specified encoding.
 *
 * @param outStream//from   ww w. j a v a 2 s .c  o  m
 * @param format
 * @throws XMLStreamException
 * @throws FactoryConfigurationError
 * @see OMOutputFormat#DEFAULT_CHAR_SET_ENCODING
 */
public MTOMXMLStreamWriter(OutputStream outStream, OMOutputFormat format)
        throws XMLStreamException, FactoryConfigurationError {
    if (isDebugEnabled) {
        log.debug("Creating MTOMXMLStreamWriter");
        log.debug("OutputStream =" + outStream.getClass());
        log.debug("OMFormat = " + format.toString());
    }
    if (isTraceEnabled) {
        log.trace("Call Stack =" + CommonUtils.callStackToString());
    }
    this.format = format;
    this.outStream = outStream;

    String encoding = format.getCharSetEncoding();
    if (encoding == null) { //Default encoding is UTF-8
        format.setCharSetEncoding(encoding = OMOutputFormat.DEFAULT_CHAR_SET_ENCODING);
    }

    optimizationPolicy = new OptimizationPolicyImpl(format);

    if (format.isOptimized()) {
        multipartWriter = new OMMultipartWriter(outStream, format);
        try {
            rootPartOutputStream = multipartWriter.writeRootPart();
        } catch (IOException ex) {
            throw new XMLStreamException(ex);
        }
        ContentIDGenerator contentIDGenerator = new ContentIDGenerator() {
            public String generateContentID(String existingContentID) {
                return existingContentID != null ? existingContentID : getNextContentId();
            }
        };
        xmlWriter = new XOPEncodingStreamWriter(StAXUtils
                .createXMLStreamWriter(format.getStAXWriterConfiguration(), rootPartOutputStream, encoding),
                contentIDGenerator, optimizationPolicy);
    } else {
        xmlWriter = StAXUtils.createXMLStreamWriter(format.getStAXWriterConfiguration(), outStream,
                format.getCharSetEncoding());
    }
    xmlStreamWriterFilter = format.getXmlStreamWriterFilter();
    if (xmlStreamWriterFilter != null) {
        if (log.isDebugEnabled()) {
            log.debug("Installing XMLStreamWriterFilter " + xmlStreamWriterFilter);
        }
        xmlStreamWriterFilter.setDelegate(xmlWriter);
        xmlWriter = xmlStreamWriterFilter;
    }
}

From source file:org.apache.hadoop.hbase.HBaseTestingUtility.java

/**
 * Set maxRecoveryErrorCount in DFSClient.  In 0.20 pre-append its hard-coded to 5 and
 * makes tests linger.  Here is the exception you'll see:
 * <pre>/*from w ww . ja v  a2s.c o  m*/
 * 2010-06-15 11:52:28,511 WARN  [DataStreamer for file /hbase/.logs/hlog.1276627923013 block blk_928005470262850423_1021] hdfs.DFSClient$DFSOutputStream(2657): Error Recovery for block blk_928005470262850423_1021 failed  because recovery from primary datanode 127.0.0.1:53683 failed 4 times.  Pipeline was 127.0.0.1:53687, 127.0.0.1:53683. Will retry...
 * </pre>
 * @param stream A DFSClient.DFSOutputStream.
 * @param max
 * @throws NoSuchFieldException
 * @throws SecurityException
 * @throws IllegalAccessException
 * @throws IllegalArgumentException
 */
public static void setMaxRecoveryErrorCount(final OutputStream stream, final int max) {
    try {
        Class<?>[] clazzes = DFSClient.class.getDeclaredClasses();
        for (Class<?> clazz : clazzes) {
            String className = clazz.getSimpleName();
            if (className.equals("DFSOutputStream")) {
                if (clazz.isInstance(stream)) {
                    Field maxRecoveryErrorCountField = stream.getClass()
                            .getDeclaredField("maxRecoveryErrorCount");
                    maxRecoveryErrorCountField.setAccessible(true);
                    maxRecoveryErrorCountField.setInt(stream, max);
                    break;
                }
            }
        }
    } catch (Exception e) {
        LOG.info("Could not set max recovery field", e);
    }
}

From source file:org.apache.hadoop.hbase.regionserver.wal.TestLogRolling.java

/**
 * Give me the HDFS pipeline for this log file
 *//*from   www . ja  v  a2  s  .c om*/
DatanodeInfo[] getPipeline(HLog log)
        throws IllegalArgumentException, IllegalAccessException, InvocationTargetException {
    OutputStream stm = ((FSHLog) log).getOutputStream();
    Method getPipeline = null;
    for (Method m : stm.getClass().getDeclaredMethods()) {
        if (m.getName().endsWith("getPipeline")) {
            getPipeline = m;
            getPipeline.setAccessible(true);
            break;
        }
    }

    assertTrue("Need DFSOutputStream.getPipeline() for this test", null != getPipeline);
    Object repl = getPipeline.invoke(stm, new Object[] {} /* NO_ARGS */);
    return (DatanodeInfo[]) repl;
}

From source file:org.digidoc4j.impl.DDocContainer.java

@Override
public void save(OutputStream out) {
    logger.debug("Saves to " + out.getClass());
    try {//from ww  w  .  j av a2 s.c  om
        ddoc.writeToStream(out);
    } catch (DigiDocException e) {
        logger.error(e.getMessage());
        throw new DigiDoc4JException(e.getNestedException());
    }
}

From source file:org.freezedry.serialization.ObjectSerializer.java

@Override
public synchronized void serialize(final Object object, final OutputStream output) {
    try (final ObjectOutputStream out = new ObjectOutputStream(output)) {
        out.writeObject(object);// w  w w. ja  v a  2  s . c o  m
    } catch (IOException e) {
        final StringBuilder message = new StringBuilder();
        message.append("Unable to serialize object to output stream:").append(Constants.NEW_LINE);
        message.append("  Output Stream Type: ").append(output.getClass().getName()).append(Constants.NEW_LINE);
        message.append("  Object Type: ").append(object.getClass().getName()).append(Constants.NEW_LINE);
        message.append("  Object: ").append(object.toString()).append(Constants.NEW_LINE);
        LOGGER.error(message.toString(), e);
        throw new IllegalArgumentException(message.toString(), e);
    }
}

From source file:org.microtitan.diffusive.diffuser.serializer.ObjectSerializer.java

@Override
public synchronized void serialize(final Object object, final OutputStream output) {
    try (final ObjectOutputStream out = new ObjectOutputStream(output)) {
        out.writeObject(object);// w  ww . ja  va2 s .c  o  m
    } catch (IOException e) {
        final StringBuffer message = new StringBuffer();
        message.append("Unable to serialize object to output stream:" + Constants.NEW_LINE);
        message.append("  Output Stream Type: " + output.getClass().getName() + Constants.NEW_LINE);
        message.append("  Object Type: " + object.getClass().getName() + Constants.NEW_LINE);
        message.append("  Object: " + object.toString() + Constants.NEW_LINE);
        LOGGER.error(message.toString(), e);
        throw new IllegalArgumentException(message.toString(), e);
    }
}

From source file:org.pentaho.platform.plugin.services.importexport.ZipExportProcessor.java

/**
 *
 * @param repositoryDir/*from  w w  w.ja  v a 2s  .  c  om*/
 * @param outputStream
 */
@Override
public void exportDirectory(RepositoryFile repositoryDir, OutputStream outputStream, String filePath)
        throws ExportException, IOException {
    addToManifest(repositoryDir);

    List<RepositoryFile> children = this.unifiedRepository.getChildren(repositoryDir.getId());
    for (RepositoryFile repositoryFile : children) {
        if (repositoryFile.isFolder()) {
            if (outputStream.getClass().isAssignableFrom(ZipOutputStream.class)) {
                ZipOutputStream zos = (ZipOutputStream) outputStream;
                ZipEntry entry = new ZipEntry(getZipEntryName(repositoryFile, filePath));
                zos.putNextEntry(entry);
            }
            exportDirectory(repositoryFile, outputStream, filePath);
        } else {
            exportFile(repositoryFile, outputStream, filePath);
        }
    }
    createLocales(repositoryDir, filePath, repositoryDir.isFolder(), outputStream);
}