Example usage for java.io SequenceInputStream SequenceInputStream

List of usage examples for java.io SequenceInputStream SequenceInputStream

Introduction

In this page you can find the example usage for java.io SequenceInputStream SequenceInputStream.

Prototype

public SequenceInputStream(InputStream s1, InputStream s2) 

Source Link

Document

Initializes a newly created SequenceInputStream by remembering the two arguments, which will be read in order, first s1 and then s2, to provide the bytes to be read from this SequenceInputStream.

Usage

From source file:com.nttec.everychan.chans.nullchan.NullchanccModule.java

@SuppressLint("SimpleDateFormat")
@Override//  w ww.j ava 2  s .co  m
protected WakabaReader getKusabaReader(InputStream stream, UrlPageModel urlModel) {
    Reader reader;
    if (urlModel != null && urlModel.chanName != null && urlModel.chanName.equals("expand")) {
        stream = new SequenceInputStream(new ByteArrayInputStream("<form id=\"delform\">".getBytes()), stream);
        reader = new BufferedReader(new InputStreamReader(stream));
    } else {
        reader = new ReplacingReader(new BufferedReader(new InputStreamReader(stream)),
                "<form id=\"delform20\"", "<form id=\"delform\"");
    }
    return new Instant0chanReader(reader, canCloudflare());
}

From source file:org.datavec.image.loader.CifarLoader.java

public void load() {
    if (!cifarRawFilesExist() && !fullDir.exists()) {
        generateMaps();/*w w w . ja  v  a  2 s.  co  m*/
        fullDir.mkdir();

        log.info("Downloading {}...", localDir);
        downloadAndUntar(cifarDataMap, new File(BASE_DIR, localDir));
    }
    try {
        Collection<File> subFiles = FileUtils.listFiles(fullDir, new String[] { "bin" }, true);
        Iterator trainIter = subFiles.iterator();
        trainInputStream = new SequenceInputStream(new FileInputStream((File) trainIter.next()),
                new FileInputStream((File) trainIter.next()));
        while (trainIter.hasNext()) {
            File nextFile = (File) trainIter.next();
            if (!TESTFILENAME.equals(nextFile.getName()))
                trainInputStream = new SequenceInputStream(trainInputStream, new FileInputStream(nextFile));
        }
        testInputStream = new FileInputStream(new File(fullDir, TESTFILENAME));
    } catch (Exception e) {
        e.printStackTrace();
    }

    if (labels.isEmpty())
        defineLabels();

    if (useSpecialPreProcessCifar && train && !cifarProcessedFilesExists()) {
        for (int i = fileNum + 1; i <= (TRAINFILENAMES.length); i++) {
            inputStream = trainInputStream;
            DataSet result = convertDataSet(numToConvertDS);
            result.save(new File(trainFilesSerialized + i + ".ser"));
        }
        //            for (int i = 1; i <= (TRAINFILENAMES.length); i++){
        //                normalizeCifar(new File(trainFilesSerialized + i + ".ser"));
        //            }
        inputStream = testInputStream;
        DataSet result = convertDataSet(numToConvertDS);
        result.save(new File(testFilesSerialized));
        //            normalizeCifar(new File(testFilesSerialized));
    }
    setInputStream();
}

From source file:pl.otros.logview.api.io.UtilsTest.java

@Test
public void testSequeceRead() throws Exception {
    String url = HTTP_NOT_GZIPPED;
    FileObject resolveFile = fsManager.resolveFile(url);
    InputStream httpInputStream = resolveFile.getContent().getInputStream();
    byte[] buff = Utils.loadProbe(httpInputStream, 10000);
    // int read = httpInputStream.read(buff);

    ByteArrayInputStream bin = new ByteArrayInputStream(buff);

    SequenceInputStream sequenceInputStream = new SequenceInputStream(bin, httpInputStream);

    byte[] byteArray = IOUtils.toByteArray(new ObservableInputStreamImpl(sequenceInputStream));

    LoadingInfo loadingInfo = Utils.openFileObject(fsManager.resolveFile(url), false);
    byte[] byteArrayUtils = IOUtils.toByteArray(loadingInfo.getContentInputStream());
    AssertJUnit.assertEquals(byteArrayUtils.length, byteArray.length);
}

From source file:org.apache.sling.distribution.packaging.impl.DistributionPackageUtils.java

public static InputStream createStreamWithHeader(DistributionPackage distributionPackage) throws IOException {

    DistributionPackageInfo packageInfo = distributionPackage.getInfo();
    ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
    Map<String, Object> headerInfo = new HashMap<String, Object>();
    headerInfo.put(DistributionPackageInfo.PROPERTY_REQUEST_TYPE, packageInfo.getRequestType());
    headerInfo.put(DistributionPackageInfo.PROPERTY_REQUEST_PATHS, packageInfo.getPaths());
    headerInfo.put(PROPERTY_REMOTE_PACKAGE_ID, distributionPackage.getId());
    if (packageInfo.containsKey("reference-required")) {
        headerInfo.put("reference-required", packageInfo.get("reference-required"));
        log.info("setting reference-required to {}", packageInfo.get("reference-required"));
    }/* w w w . j a v  a 2  s .  c  o m*/
    writeInfo(outputStream, headerInfo);

    InputStream headerStream = new ByteArrayInputStream(outputStream.toByteArray());
    InputStream bodyStream = distributionPackage.createInputStream();
    return new SequenceInputStream(headerStream, bodyStream);
}

From source file:pl.otros.logview.api.io.UtilsTest.java

@Test
public void testSequeceReadGzipped() throws Exception {
    String url = HTTP_GZIPPED;//from  w w  w.  j  ava2 s .c om
    FileObject resolveFile = fsManager.resolveFile(url);
    InputStream httpInputStream = resolveFile.getContent().getInputStream();
    byte[] buff = Utils.loadProbe(httpInputStream, 10000);
    // int read = httpInputStream.read(buff);

    ByteArrayInputStream bin = new ByteArrayInputStream(buff);

    SequenceInputStream sequenceInputStream = new SequenceInputStream(bin, httpInputStream);

    byte[] byteArray = IOUtils
            .toByteArray(new GZIPInputStream(new ObservableInputStreamImpl(sequenceInputStream)));

    LoadingInfo loadingInfo = Utils.openFileObject(fsManager.resolveFile(url), false);
    byte[] byteArrayUtils = IOUtils.toByteArray(loadingInfo.getContentInputStream());
    AssertJUnit.assertEquals(byteArrayUtils.length, byteArray.length);
}

From source file:nya.miku.wishmaster.chans.nullchancc.NullchanccModule.java

@SuppressLint("SimpleDateFormat")
@Override/* ww w  .j av a 2 s . co m*/
protected WakabaReader getWakabaReader(InputStream stream, UrlPageModel urlModel) {
    if (urlModel != null && urlModel.chanName != null && urlModel.chanName.equals("expand")) {
        stream = new SequenceInputStream(new ByteArrayInputStream("<form id=\"delform\">".getBytes()), stream);
    }
    return new WakabaReader(stream) {
        private final DateFormat dateFormat;
        {
            DateFormatSymbols symbols = new DateFormatSymbols();
            symbols.setShortMonths(new String[] { "", "", "", "?", "", "",
                    "", "?", "", "", "??", "" });
            dateFormat = new SimpleDateFormat("yyyy MMM dd HH:mm:ss", symbols);
            dateFormat.setTimeZone(TimeZone.getTimeZone("GMT+3"));
        }

        @Override
        protected void parseDate(String date) {
            if (date.length() > 0) {
                date = date.replaceAll("(?:[^\\d]*)(\\d(?:.*))", "$1");
                try {
                    currentPost.timestamp = dateFormat.parse(date).getTime();
                } catch (Exception e) {
                    Logger.e(TAG, "cannot parse date", e);
                }
            }
        }

        @Override
        protected void parseOmittedString(String omitted) {
            if (omitted.indexOf('>') != -1)
                omitted = omitted.substring(omitted.indexOf('>'));
            super.parseOmittedString(omitted);
        }

        @Override
        protected void postprocessPost(PostModel post) {
            Matcher matcher = PATTERN_EMBEDDED.matcher(post.comment);
            while (matcher.find()) {
                String id = matcher.group(1);
                String div = matcher.group(0).toLowerCase(Locale.US);
                String url = null;
                if (div.contains("youtube")) {
                    url = "http://www.youtube.com/watch?v=" + id;
                } else if (div.contains("vimeo")) {
                    url = "http://vimeo.com/" + id;
                } else if (div.contains("coub")) {
                    url = "http://coub.com/view/" + id;
                }
                if (url != null) {
                    AttachmentModel attachment = new AttachmentModel();
                    attachment.type = AttachmentModel.TYPE_OTHER_NOTFILE;
                    attachment.path = url;
                    attachment.thumbnail = div.contains("youtube")
                            ? ("http://img.youtube.com/vi/" + id + "/default.jpg")
                            : null;
                    int oldCount = post.attachments != null ? post.attachments.length : 0;
                    AttachmentModel[] attachments = new AttachmentModel[oldCount + 1];
                    for (int i = 0; i < oldCount; ++i)
                        attachments[i] = post.attachments[i];
                    attachments[oldCount] = attachment;
                    post.attachments = attachments;
                }
            }
        }
    };
}

From source file:com.photon.phresco.framework.impl.ApplicationManagerImpl.java

private BufferedInputStream executeLiquibaseMavenCommand(ProjectInfo projectInfo, ActionType action,
        StringBuilder command, String workingDirectory) throws PhrescoException {
    if (isDebugEnabled) {
        S_LOGGER.debug(//from   w w w  . ja  va2  s .  c om
                "Entering Method ApplicationManagerImpl.executeMavenCommand(Project project, ActionType action, StringBuilder command)");
        S_LOGGER.debug("executeMavenCommand() Project Code = " + projectInfo.getProjectCode());
        S_LOGGER.debug("executeMavenCommand() Command = " + command.toString());
        S_LOGGER.debug("executeMavenCommand() ActionType Name = " + action.getActionType());
    }

    createPomArg(projectInfo, command, workingDirectory);
    Commandline cl = new Commandline(command.toString());
    if (StringUtils.isNotEmpty(workingDirectory)) {
        cl.setWorkingDirectory(workingDirectory);
    }
    try {
        Process process = cl.execute();
        InputStream inputStream = process.getInputStream();
        InputStream errorStream = process.getErrorStream();
        SequenceInputStream sequenceInputStream = new SequenceInputStream(inputStream, errorStream);
        return new BufferedInputStream(new MyWrapper(sequenceInputStream));
    } catch (CommandLineException e) {
        throw new PhrescoException(e);
    }
}

From source file:eu.scape_project.arc2warc.ArcMigrator.java

public void migrateRecord(ArcRecordBase arcRecord, boolean arcMetadataRecord)
        throws IOException, URISyntaxException {
    String recordId = getRecordID(arcFile, reader.getStartOffset()).toString();
    WarcRecord warcRecord = WarcRecord.createRecord(writer);
    // Standard headers, url, date, record id
    warcRecord.header.addHeader(WarcConstants.FN_WARC_TARGET_URI, arcRecord.getUrlStr());
    warcRecord.header.addHeader(WarcConstants.FN_WARC_DATE,
            GMTUTCUnixTsFormat.format(arcRecord.getArchiveDate()));
    warcRecord.header.addHeader(WarcConstants.FN_WARC_RECORD_ID, recordId);
    /* Mimetype*/
    if (arcRecord.getContentType() != null) {
        warcRecord.header.addHeader(WarcConstants.FN_CONTENT_TYPE, arcRecord.getContentType().toString());
    } else {//w ww . j a v a  2 s. co  m
        warcRecord.header.addHeader(WarcConstants.FN_CONTENT_TYPE, MIME_UNKNOWN);
    }
    // Is this metadata about the warc file or is it a "real" record
    String type;
    if (arcMetadataRecord) {
        // ARC metadata record relates to the WARC info record
        warcRecord.header.addHeader(WarcConstants.FN_WARC_CONCURRENT_TO, warcInfoId);
        type = WarcConstants.RT_METADATA;
    } else {
        type = WarcConstants.RT_RESPONSE;
    }
    warcRecord.header.addHeader(WarcConstants.FN_WARC_TYPE, type);
    if (arcRecord.getIpAddress() != null) {
        warcRecord.header.addHeader(WarcConstants.FN_WARC_IP_ADDRESS, arcRecord.getIpAddress());
    }
    // Payload metadata = HTTP Response lines
    String payloadHeader = constructPayloadHeader(arcRecord);
    InputStream payloadContentStream = null;
    if (arcRecord.hasPayload()) {
        InputStream inputStream = arcRecord.getPayloadContent();
        long remaining;
        try {
            remaining = arcRecord.getPayload().getRemaining();
        } catch (IOException e) {
            if (arcRecord.getStartOffset() == 0 && arcRecord.getArchiveLength() == 77) {
                remaining = 0;
            } else {
                throw new IOException(e);
            }
        }
        long contentLength = remaining + payloadHeader.getBytes().length;// WARC content length is payload length + payload header length
        warcRecord.header.addHeader(WarcConstants.FN_CONTENT_LENGTH, contentLength, null);
        PayloadContent payloadContent = new PayloadContent(inputStream, remaining, buffer);
        if (config.isContentTypeIdentification()) {
            TikaIdentificationTask ti = TikaIdentificationTask.getInstance();
            ti.setCurrentItemId(recordId);
            payloadContent.setIdentifier(ti);
            payloadContent.doPayloadIdentification(true);
        }
        payloadContentStream = payloadContent.getPayloadContentAsInputStream();
        if (payloadContent.getDigestStr() != null) {
            warcRecord.header.addHeader(WarcConstants.FN_WARC_PAYLOAD_DIGEST, payloadContent.getDigestStr());
        }
        if (config.isContentTypeIdentification()) {
            warcRecord.header.addHeader(WarcConstants.FN_WARC_IDENTIFIED_PAYLOAD_TYPE,
                    payloadContent.getIdentifiedPayLoadType());
        }
    } else {
        warcRecord.header.addHeader(WarcConstants.FN_CONTENT_LENGTH, 0, null);
    }
    // finished creating header, write it to the WARC record
    writer.writeHeader(warcRecord);
    // Record payload
    if (arcRecord.hasPayload()) {
        // Prepend payload metadata = HTTP Response lines
        ByteArrayInputStream payloadHeaderStream = new ByteArrayInputStream(payloadHeader.getBytes());
        SequenceInputStream sis = new SequenceInputStream(payloadHeaderStream, payloadContentStream);
        writer.streamPayload(sis);
    }
    writer.closeRecord();
}

From source file:com.splicemachine.derby.stream.control.ControlDataSetProcessor.java

private InputStream getFileStream(String s) throws IOException {
    DistributedFileSystem dfs = SIDriver.driver().fileSystem();
    InputStream value;//ww w.  j a va 2s  .  c  o m
    if (dfs.getInfo(s).isDirectory()) {
        //we need to open a Stream against each file in the directory
        InputStream inputStream = null;
        boolean sequenced = false;
        try (DirectoryStream<Path> stream = Files.newDirectoryStream(dfs.getPath(s))) {
            for (Path p : stream) {
                if (inputStream == null) {
                    inputStream = newInputStream(dfs, p, StandardOpenOption.READ);
                } else {
                    inputStream = new SequenceInputStream(inputStream,
                            newInputStream(dfs, p, StandardOpenOption.READ));
                }
            }
        }
        value = inputStream;
    } else {
        value = newInputStream(dfs, dfs.getPath(s), StandardOpenOption.READ);
    }
    return value;
}