Example usage for org.apache.commons.io FileUtils openOutputStream

List of usage examples for org.apache.commons.io FileUtils openOutputStream

Introduction

In this page you can find the example usage for org.apache.commons.io FileUtils openOutputStream.

Prototype

public static FileOutputStream openOutputStream(File file) throws IOException 

Source Link

Document

Opens a FileOutputStream for the specified file, checking and creating the parent directory if it does not exist.

Usage

From source file:org.broadinstitute.gatk.utils.io.IOUtils.java

/**
 * Writes the an embedded resource to a file.
 * File is not scheduled for deletion and must be cleaned up by the caller.
 * @param resource Embedded resource.//from   w w w  . ja v  a 2s.c  o  m
 * @param file File path to write.
 */
public static void writeResource(Resource resource, File file) {
    String path = resource.getPath();
    InputStream inputStream = resource.getResourceContentsAsStream();
    OutputStream outputStream = null;
    try {
        outputStream = FileUtils.openOutputStream(file);
        org.apache.commons.io.IOUtils.copy(inputStream, outputStream);
    } catch (IOException e) {
        throw new GATKException(String.format("Unable to copy resource '%s' to '%s'", path, file), e);
    } finally {
        org.apache.commons.io.IOUtils.closeQuietly(inputStream);
        org.apache.commons.io.IOUtils.closeQuietly(outputStream);
    }
}

From source file:org.broadinstitute.sting.analyzecovariates.AnalyzeCovariates.java

private void writeDataTables() {

    int numReadGroups = 0;

    // for each read group
    for (Object readGroupKey : dataManager.getCollapsedTable(0).data.keySet()) {

        if (NUM_READ_GROUPS_TO_PROCESS == -1 || ++numReadGroups <= NUM_READ_GROUPS_TO_PROCESS) {
            String readGroup = readGroupKey.toString();
            RecalDatum readGroupDatum = (RecalDatum) dataManager.getCollapsedTable(0).data.get(readGroupKey);
            logger.info(String.format(
                    "Writing out data tables for read group: %s\twith %s observations\tand aggregate residual error = %.3f",
                    readGroup, readGroupDatum.getNumObservations(),
                    readGroupDatum.empiricalQualDouble(0, MAX_QUALITY_SCORE)
                            - readGroupDatum.getEstimatedQReported()));

            // for each covariate
            for (int iii = 1; iii < requestedCovariates.size(); iii++) {
                Covariate cov = requestedCovariates.get(iii);

                // Create a PrintStream
                File outputFile = new File(OUTPUT_DIR,
                        readGroup + "." + cov.getClass().getSimpleName() + ".dat");
                PrintStream output;
                try {
                    output = new PrintStream(FileUtils.openOutputStream(outputFile));
                } catch (IOException e) {
                    throw new UserException.CouldNotCreateOutputFile(outputFile, e);
                }// w w w .java2s  . c  om

                try {
                    // Output the header
                    output.println("Covariate\tQreported\tQempirical\tnMismatches\tnBases");

                    for (Object covariateKey : ((Map) dataManager.getCollapsedTable(iii).data.get(readGroupKey))
                            .keySet()) {
                        output.print(covariateKey.toString() + "\t"); // Covariate
                        RecalDatum thisDatum = (RecalDatum) ((Map) dataManager.getCollapsedTable(iii).data
                                .get(readGroupKey)).get(covariateKey);
                        output.print(String.format("%.3f", thisDatum.getEstimatedQReported()) + "\t"); // Qreported
                        output.print(String.format("%.3f", thisDatum.empiricalQualDouble(0, MAX_QUALITY_SCORE))
                                + "\t"); // Qempirical
                        output.print(thisDatum.getNumMismatches() + "\t"); // nMismatches
                        output.println(thisDatum.getNumObservations()); // nBases
                    }
                } finally {
                    // Close the PrintStream
                    IOUtils.closeQuietly(output);
                }
            }
        } else {
            break;
        }

    }
}

From source file:org.broadinstitute.sting.utils.io.IOUtils.java

/**
 * Writes the an embedded resource to a file.
 * File is not scheduled for deletion and must be cleaned up by the caller.
 * @param resource Embedded resource./*  ww w.ja v a 2  s. c o  m*/
 * @param file File path to write.
 */
public static void writeResource(Resource resource, File file) {
    String path = resource.getPath();
    InputStream inputStream = resource.getResourceContentsAsStream();
    OutputStream outputStream = null;
    try {
        outputStream = FileUtils.openOutputStream(file);
        org.apache.commons.io.IOUtils.copy(inputStream, outputStream);
    } catch (IOException e) {
        throw new StingException(String.format("Unable to copy resource '%s' to '%s'", path, file), e);
    } finally {
        org.apache.commons.io.IOUtils.closeQuietly(inputStream);
        org.apache.commons.io.IOUtils.closeQuietly(outputStream);
    }
}

From source file:org.busko.routemanager.services.RouteOutlineServiceImpl.java

private void createGtfsTxt(File directory, List gtfsEntities) throws Exception {
    if (gtfsEntities.isEmpty())
        return;//from   ww w. j a  va 2s  . c o m

    GtfsFormatted gtfsFormatted = (GtfsFormatted) gtfsEntities.get(0);
    File file = createFile(directory, gtfsFormatted.getGtfsFileName(), null);

    PrintWriter writer = null;
    try {
        writer = new PrintWriter(FileUtils.openOutputStream(file));
        writer.print(gtfsFormatted.getGtfsFileHeader());
        writer.print("\n");

        String testData = NewZealandTestData.getTestData(gtfsFormatted.getGtfsFileName());
        if (testData != null) {
            writer.print(testData);
            writer.print("\n");
        }

        for (Object object : gtfsEntities) {
            writer.print(((GtfsFormatted) object).getGtfsData());
            writer.print("\n");
        }
    } finally {
        if (writer != null) {
            writer.close();
        }
    }
}

From source file:org.canova.api.util.ArchiveUtils.java

/**
 * Extracts files to the specified destination
 * @param file the file to extract to/*from   w  ww  .  java  2 s .  c  o m*/
 * @param dest the destination directory
 * @throws java.io.IOException
 */
public static void unzipFileTo(String file, String dest) throws IOException {
    File target = new File(file);
    if (!target.exists())
        throw new IllegalArgumentException("Archive doesnt exist");
    FileInputStream fin = new FileInputStream(target);
    int BUFFER = 2048;
    byte data[] = new byte[BUFFER];

    if (file.endsWith(".zip")) {
        //getFromOrigin the zip file content
        ZipInputStream zis = new ZipInputStream(fin);
        //getFromOrigin the zipped file list entry
        ZipEntry ze = zis.getNextEntry();

        while (ze != null) {
            String fileName = ze.getName();

            File newFile = new File(dest + File.separator + fileName);

            if (ze.isDirectory()) {
                newFile.mkdirs();
                zis.closeEntry();
                ze = zis.getNextEntry();
                continue;
            }

            log.info("file unzip : " + newFile.getAbsoluteFile());

            //create all non exists folders
            //else you will hit FileNotFoundException for compressed folder

            FileOutputStream fos = new FileOutputStream(newFile);

            int len;
            while ((len = zis.read(data)) > 0) {
                fos.write(data, 0, len);
            }

            fos.flush();
            fos.close();
            zis.closeEntry();
            ze = zis.getNextEntry();
        }

        zis.close();

    }

    else if (file.endsWith(".tar")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(in);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();
                ;

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".tar.gz") || file.endsWith(".tgz")) {

        BufferedInputStream in = new BufferedInputStream(fin);
        GzipCompressorInputStream gzIn = new GzipCompressorInputStream(in);
        TarArchiveInputStream tarIn = new TarArchiveInputStream(gzIn);

        TarArchiveEntry entry = null;

        /** Read the tar entries using the getNextEntry method **/

        while ((entry = (TarArchiveEntry) tarIn.getNextEntry()) != null) {

            log.info("Extracting: " + entry.getName());

            /** If the entry is a directory, createComplex the directory. **/

            if (entry.isDirectory()) {

                File f = new File(dest + File.separator + entry.getName());
                f.mkdirs();
            }
            /**
             * If the entry is a file,write the decompressed file to the disk
             * and close destination stream.
             **/
            else {
                int count;

                FileOutputStream fos = new FileOutputStream(dest + File.separator + entry.getName());
                BufferedOutputStream destStream = new BufferedOutputStream(fos, BUFFER);
                while ((count = tarIn.read(data, 0, BUFFER)) != -1) {
                    destStream.write(data, 0, count);
                }

                destStream.flush();

                IOUtils.closeQuietly(destStream);
            }
        }

        /** Close the input stream **/

        tarIn.close();
    }

    else if (file.endsWith(".gz")) {
        GZIPInputStream is2 = new GZIPInputStream(fin);
        File extracted = new File(target.getParent(), target.getName().replace(".gz", ""));
        if (extracted.exists())
            extracted.delete();
        extracted.createNewFile();
        OutputStream fos = FileUtils.openOutputStream(extracted);
        IOUtils.copyLarge(is2, fos);
        is2.close();
        fos.flush();
        fos.close();
    }

}

From source file:org.chtijbug.drools.archetype.GuvnorPojoModelInstaller.java

/**
 * @throws MojoExecutionException//from   w  ww .j  a v a  2s  .  c  o  m
 * @throws MojoFailureException
 */
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    logger.debug(" >> execute");
    try {
        RestRepositoryConnector repositoryConnector = new GuvnorRepositoryConnector(host, app,
                organizationalUnitName, repositoryName, pkg, username, password);
        //____ Download the POJO Model from Guvnor instance
        InputStream inputStream = null;
        try {
            inputStream = repositoryConnector.getPojoModel();
        } catch (ChtijbugDroolsRestException e) {
            logger.error("Error occurred while creating he file from the Pojo Model", e);
        }
        //____ First create TEMP file with the downloaded content
        File tempFile = File.createTempFile(artifactId, ".jar");
        FileOutputStream outputStream = FileUtils.openOutputStream(tempFile);
        IOUtils.copy(inputStream, outputStream);
        IOUtils.closeQuietly(outputStream);
        //____ Install the pojo model from the local repository as a Maven artifact

        InstallRequest installRequest = new InstallRequest();
        Artifact artifact = new DefaultArtifact(artifactGroupId, artifactId, "jar", version);
        artifact = artifact.setFile(tempFile);
        installRequest.addArtifact(artifact);
        repoSystem.install(repoSession, installRequest);

    } catch (IOException e) {
        logger.error("Error occurred while creating he file from the Pojo Model", e);
    } catch (InstallationException e) {
        logger.error("Error occurred while creating he file from the Pojo Model", e);
    } finally {
        logger.debug(">> execute");
    }
}

From source file:org.chtijbug.drools.platform.runtime.utils.Xsd2JarTransformerTestCase.java

@Test
public void should_get_all_expected_entries_from_generated_jar_file() throws IOException {
    Xsd2JarTransformer toTest = new Xsd2JarTransformer();

    URL xsdFile = this.getClass().getResource("/model.xsd");

    InputStream modelJarStream = toTest.transformXsd2Jar("org.pymma.drools", new File(xsdFile.getFile()));

    File modelJarFile = File.createTempFile("model", ".jar");
    IOUtils.copy(modelJarStream, FileUtils.openOutputStream(modelJarFile));

    JarInputStream inputStream = new JarInputStream(FileUtils.openInputStream(modelJarFile));
    assertThat(inputStream.getManifest()).isNotNull();

    List<ZipEntry> allJarEntries = new ArrayList<ZipEntry>();

    ZipEntry entry;/*  w ww  .  j  a va 2  s .c  o m*/
    while ((entry = inputStream.getNextEntry()) != null)
        allJarEntries.add(entry);

    assertThat(allJarEntries).hasSize(5);
}

From source file:org.codehaus.httpcache4j.cache.FileManager.java

File createFile(Key key, InputStream stream) throws IOException {
    File file = fileResolver.resolve(key);

    FileOutputStream outputStream = FileUtils.openOutputStream(file);
    try {//w  ww  .j a v a2 s .  c  om
        IOUtils.copy(stream, outputStream);
    } finally {
        IOUtils.closeQuietly(outputStream);
    }
    if (file.length() == 0) {
        file.delete();
        file = null;
    }

    return file;
}

From source file:org.codehaus.httpcache4j.cache.PersistentCacheStorage.java

private void saveCacheToDisk() {
    FileOutputStream outputStream = null;
    try {/* w ww .  j  a v  a 2 s. c om*/
        outputStream = FileUtils.openOutputStream(serializationFile);
        SerializationUtils.serialize(cache, outputStream);
    } catch (IOException e) {
        //Ignored, we create a new one.
    } finally {
        IOUtils.closeQuietly(outputStream);
    }
}

From source file:org.codehaus.mojo.jsimport.AbstractImportMojo.java

private void writeTokenStream(CharStream cs, CommonTokenStream tokenStream, File outputFile)
        throws IOException {
    OutputStream os = new BufferedOutputStream(FileUtils.openOutputStream(outputFile));
    try {/*w  ww .ja v  a2s . co  m*/
        List<?> tokens = tokenStream.getTokens();
        cs.seek(0);
        for (Object tokenObject : tokens) {
            CommonToken token = (CommonToken) tokenObject;
            if (token.getType() == ECMAScriptLexer.MODULE_DECL
                    || token.getType() == ECMAScriptLexer.REQUIRE_DECL) {
                int startIndex = token.getStartIndex();
                while (cs.index() < startIndex) {
                    int streamChar = cs.LA(1);
                    if (streamChar == CharStream.EOF) {
                        break;
                    }
                    os.write(streamChar);
                    cs.consume();
                }

                CharacterIterator iter = new StringCharacterIterator(token.getText());
                for (char tokenChar = iter.first(); tokenChar != CharacterIterator.DONE; tokenChar = iter
                        .next()) {
                    os.write(tokenChar);
                }

                cs.seek(token.getStopIndex() + 1);
            }
        }

        int streamChar;
        while ((streamChar = cs.LA(1)) != CharStream.EOF) {
            os.write(streamChar);
            cs.consume();
        }
    } finally {
        os.close();
    }
}