Example usage for java.io PipedInputStream PipedInputStream

List of usage examples for java.io PipedInputStream PipedInputStream

Introduction

In this page you can find the example usage for java.io PipedInputStream PipedInputStream.

Prototype

public PipedInputStream() 

Source Link

Document

Creates a PipedInputStream so that it is not yet #connect(java.io.PipedOutputStream) connected .

Usage

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

private void processKdmXmlFile(final List<File> kdmFiles)
        throws FileNotFoundException, IOException, RepositoryException, ToifException {
    if (debug) {/*  ww w.  j a v  a 2s.  c o  m*/
        LOG.debug("processing kdm file...");
        //System.err.println("processing kdm file...");
    }

    PipedInputStream in = new PipedInputStream();
    final PipedOutputStream out = new PipedOutputStream(in);
    final ThreadStatus status = new ThreadStatus();

    Thread t = new Thread(new Runnable() {

        @Override
        public void run() {
            KdmXmlHandler handler = null;
            try {
                if (kdmFiles.size() > 1) {
                    final String msg = "There should only be one .kdm file.";
                    LOG.error(msg);
                    throw new ToifException(msg);
                } else if (kdmFiles.size() == 1) {
                    File kdmFile = kdmFiles.get(0); // get the head of
                                                    // thelist.
                    handler = load(kdmFile, out);
                }
                out.flush();
                out.close();

                if (handler == null) {
                    return;
                }
                setNextId(handler.getNextId());
                setSmallestBigNumber(handler.getSmallestBigNumber());
                // increase
            } catch (IOException e) {
                final String msg = "IO exception whilst processing kdm file. "
                        + ". Possibly an existing kdm file is in your input path!";

                LOG.error(msg, e);
                status.exception = new ToifException(msg, e);
            } catch (RepositoryException e) {
                final String msg = "Repository Exception whilst processing kdm file. "
                        + ". Possibly an existing kdm file is in your input path!";

                LOG.error(msg, e);
                status.exception = new ToifException(msg, e);
            } catch (ToifException e) {
                // RJF final String msg =
                // "Processing Exception whilst processing kdm file. "
                // + ". Possibly that input file is invalid XML!";

                // LOG.error(msg, e);
                status.exception = e;
            } finally {
                if (out != null)
                    try {
                        out.close();
                    } catch (IOException e) {
                        // Just leave it alone
                        LOG.error("unable to close stream");
                    }
            }
        }
    });

    // ---------------------------------------------------------
    // Unable to change logic within the short time frame given so
    // adding a means to catch unknown exceptions in thread
    // ----------------------------------------------------------
    Thread.UncaughtExceptionHandler tueh = new Thread.UncaughtExceptionHandler() {

        public void uncaughtException(Thread th, Throwable ex) {
            LOG.error("Uncaught exception: " + ex);
            status.exception = (Exception) ex;
        }
    };

    t.setUncaughtExceptionHandler(tueh);
    t.start();

    streamStatementsToRepo(in);
    try {
        t.join();

        // Check if we enoutered exception during processing and
        // proxy throw if we have one
        if (status.exception != null) {
            // Leave alone if already a ToifException
            if (status.exception instanceof ToifException)
                throw (ToifException) status.exception;
            else
                throw new ToifException(status.exception);

        }
    } catch (InterruptedException e) {
        LOG.error("Interrupted");
        throw new ToifException("Interrupted");
    }
}

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

/**
 * process the tkdm files// ww w. j ava  2s.  c o  m
 * 
 * @param tkdmFiles
 *          the list of tkdm files to process.
 * @return
 * @throws IOException
 * @throws ToifException
 */
private RepositoryMerger processTkdmFiles(final List<File> tkdmFiles) throws IOException, ToifException {
    final PipedInputStream in = new PipedInputStream();
    final PipedOutputStream out = new PipedOutputStream(in);

    String assemblyName = "Assembly";
    int possition = outputLocation.getName().lastIndexOf(".");
    if (possition != -1) {
        assemblyName = outputLocation.getName().substring(0, possition);
    }

    final RepositoryMerger kdmMerger = getTkdmMerger(new PrintWriter(out), assemblyName);
    new Thread(new Runnable() {

        @Override
        public void run() {
            mergeTkdm(kdmMerger, tkdmFiles);
            kdmMerger.close();
            try {
                out.close();
            } catch (IOException e) {
                LOG.error("", e);
                //e.printStackTrace();
            }
        }
    }).start();

    streamStatementsToRepo(in);
    return kdmMerger;
}

From source file:com.kdmanalytics.toif.assimilator.Assimilator.java

/**
 * process the toif files./*ww  w . jav  a2s . c om*/
 * 
 * @param toifFiles
 *          list of toif files to process.
 * @param smallestBigNumber2
 *          the smallest number from the end of the long scale. used for the bnodes at the end of
 *          the repository
 * @param blacklistPath
 *          string that is the name of the directory of the project root.
 * @return
 * @throws IOException
 * @throws ToifException
 */
private void processToifFiles(final List<File> toifFiles, Long id, Long smallestBigNumber2,
        String blacklistPath) throws IOException, ToifException {
    PipedInputStream toifIn = new PipedInputStream();
    final PipedOutputStream toifOut = new PipedOutputStream(toifIn);

    // final ToifMerger toifMerger = getToifMerger(new PrintWriter(toifOut),
    // id, smallestBigNumber2, blacklistPath);

    PrintWriter w = new PrintWriter(toifOut);
    final ToifMerger toifMerger = getToifMerger(w, id, smallestBigNumber2, blacklistPath);
    new Thread(new Runnable() {

        @Override
        public void run() {
            Long offset = mergeToif(toifMerger, toifFiles);

            setOffset(offset);
            try {
                toifOut.close();
            } catch (IOException e) {
                LOG.error("", e);
            }
        }

    }).start();

    streamStatementsToRepo(toifIn);
}

From source file:com.zimbra.cs.mime.Mime.java

/** Returns an {@code InputStream} to the content of a {@code MimeMessage}
 *  by starting a thread that serves up its content to a {@code
 *  PipedOutputStream}.  This workaround is necessary because JavaMail does
 *  not provide {@code InputStream} access to the content. */
public static InputStream getInputStream(MimeMessage mm) throws IOException {
    //        if (isZimbraJavaMailShim(mm)) {
    //            return ((ZMimeMessage) mm).getMessageStream();
    //        }//from  w w w .  java  2 s.  c o m

    // Nasty hack because JavaMail doesn't provide an InputStream accessor
    // to the entire RFC 822 content of a MimeMessage.  Start a thread that
    // serves up the content of the MimeMessage via PipedOutputStream.
    PipedInputStream in = new PipedInputStream();
    PipedOutputStream out = new PipedOutputStream(in);
    Thread thread = new Thread(new MimeMessageOutputThread(mm, out));
    thread.setName("MimeMessageThread");
    thread.start();
    return in;
}

From source file:eu.scape_project.service.ConnectorService.java

private void addMetadata(final Session session, final Object metadata, final String path)
        throws RepositoryException {
    final StringBuilder sparql = new StringBuilder("PREFIX scape: <" + SCAPE_NAMESPACE + "> ");
    try {//  ww  w  .j av a2s.c om

        /* use piped streams to copy the data to the repo */
        final PipedInputStream dcSrc = new PipedInputStream();
        final PipedOutputStream dcSink = new PipedOutputStream();
        dcSink.connect(dcSrc);
        new Thread(new Runnable() {

            @Override
            public void run() {
                try {
                    ConnectorService.this.marshaller.getJaxbMarshaller().marshal(metadata, dcSink);
                    dcSink.flush();
                    dcSink.close();
                } catch (JAXBException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                } catch (IOException e) {
                    LOG.error(e.getLocalizedMessage(), e);
                }
            }
        }).start();

        final Datastream ds = datastreamService.createDatastream(session, path, "text/xml", null, dcSrc);
        final Node desc = ds.getNode();
        desc.addMixin("scape:metadata");

        final IdentifierTranslator subjects = new DefaultIdentifierTranslator();
        final String dsUri = subjects.getSubject(desc.getPath()).getURI();
        /* get the type of the metadata */
        String type = "unknown";
        String schema = "";

        if (metadata.getClass() == ElementContainer.class) {
            type = "dublin-core";
            schema = "http://purl.org/dc/elements/1.1/";
        } else if (metadata.getClass() == GbsType.class) {
            type = "gbs";
            schema = "http://books.google.com/gbs";
        } else if (metadata.getClass() == Fits.class) {
            type = "fits";
            schema = "http://hul.harvard.edu/ois/xml/ns/fits/fits_output";
        } else if (metadata.getClass() == AudioType.class) {
            type = "audiomd";
            schema = "http://www.loc.gov/audioMD/";
        } else if (metadata.getClass() == RecordType.class) {
            type = "marc21";
            schema = "http://www.loc.gov/MARC21/slim";
        } else if (metadata.getClass() == Mix.class) {
            type = "mix";
            schema = "http://www.loc.gov/mix/v20";
        } else if (metadata.getClass() == VideoType.class) {
            type = "videomd";
            schema = "http://www.loc.gov/videoMD/";
        } else if (metadata.getClass() == PremisComplexType.class) {
            type = "premis-provenance";
            schema = "info:lc/xmlns/premis-v2";
        } else if (metadata.getClass() == RightsComplexType.class) {
            type = "premis-rights";
            schema = "info:lc/xmlns/premis-v2";
        } else if (metadata.getClass() == TextMD.class) {
            type = "textmd";
            schema = "info:lc/xmlns/textmd-v3";
        }

        /* add a sparql query to set the type of this object */
        sparql.append("INSERT DATA {<" + dsUri + "> " + prefix(HAS_TYPE) + " '" + type + "'};");
        sparql.append("INSERT DATA {<" + dsUri + "> " + prefix(HAS_SCHEMA) + " '" + schema + "'};");

        ds.updatePropertiesDataset(subjects, sparql.toString());

    } catch (IOException e) {
        throw new RepositoryException(e);
    } catch (InvalidChecksumException e) {
        throw new RepositoryException(e);
    }
}