Example usage for java.nio.file Files newOutputStream

List of usage examples for java.nio.file Files newOutputStream

Introduction

In this page you can find the example usage for java.nio.file Files newOutputStream.

Prototype

public static OutputStream newOutputStream(Path path, OpenOption... options) throws IOException 

Source Link

Document

Opens or creates a file, returning an output stream that may be used to write bytes to the file.

Usage

From source file:net.tuples.doitfx.connector.crypto.PasswordManager.java

private boolean generateNewPWStore(Path pStorePath) {

    final OutputStream storeOutStm;

    try {//from  w ww.j  av a2  s.  com
        storeOutStm = Files.newOutputStream(pStorePath, StandardOpenOption.CREATE);

        pwStore = KeyStore.getInstance("JCEKS");

        pwStore.load(null, null);
        pwStore.store(storeOutStm, pwStorePassword.toCharArray());

        storeOutStm.close();

        return true;

    } catch (IOException ex) {
        Logger.getLogger(PasswordManager.class.getName()).log(Level.SEVERE, null, ex);
    } catch (KeyStoreException | NoSuchAlgorithmException | CertificateException ex) {
        Logger.getLogger(PasswordManager.class.getName()).log(Level.SEVERE, null, ex);
    }

    return false;
}

From source file:org.bimserver.demoplugins.tect.EnergyCalculationServicePlugin.java

@Override
public void newRevision(RunningService runningService, BimServerClientInterface bimServerClientInterface,
        long poid, long roid, String userToken, long soid, SObjectType settings) throws Exception {

    SRevision revision = bimServerClientInterface.getServiceInterface().getRevision(roid);
    if (revision.getComment().equals("Added energy needs")) {
        LOGGER.info("Skipping new revision because seems to be generated by TECT plugin");
        return;/*from   w w  w.  ja  v  a2 s  .  c om*/
    }
    Date startDate = new Date();
    Long topicId = bimServerClientInterface.getRegistry().registerProgressOnRevisionTopic(
            SProgressTopicType.RUNNING_SERVICE, poid, roid, "Running Energy Calculation");
    SLongActionState state = new SLongActionState();
    state.setTitle("Energy Calculation");
    state.setState(SActionState.STARTED);
    state.setProgress(-1);
    state.setStart(startDate);
    bimServerClientInterface.getRegistry().updateProgressTopic(topicId, state);

    SService service = bimServerClientInterface.getServiceInterface().getService(soid);

    SProject project = bimServerClientInterface.getServiceInterface().getProjectByPoid(poid);
    final IfcModelInterface model = bimServerClientInterface.getModel(project, roid, true, false);

    SerializerPlugin serializerPlugin = getPluginContext()
            .getSerializerPlugin("org.bimserver.ifc.step.serializer.Ifc2x3tc1StepSerializerPlugin");

    String Name = "energyModel" + count;
    String DataDir = getPluginContext().getRootPath().resolve("data").toString();
    count++;

    Serializer serializer = serializerPlugin.createSerializer(null);
    serializer.init(model, null, true);
    Path originalFile = getPluginContext().getRootPath().resolve("data").resolve(Name + ".ifc");
    OutputStream resourceAsOutputStream = Files.newOutputStream(originalFile,
            java.nio.file.StandardOpenOption.CREATE);
    serializer.writeToOutputStream(resourceAsOutputStream, null);
    resourceAsOutputStream.close();

    Path tectExe = getPluginContext().getRootPath().resolve("data").resolve("TECTcommandLine.exe");
    Process process = new ProcessBuilder(tectExe.toString(), DataDir + "\\" + Name + ".ifc",
            DataDir + "\\Climate\\FRA_Paris.Orly.071490_IWEC.epw", DataDir + "\\System\\SystemData.txt",
            DataDir + "\\Envelope\\EnvelopeData.txt", DataDir + "\\" + Name).start();

    InputStream is = process.getInputStream();
    InputStreamReader isr = new InputStreamReader(is);
    BufferedReader br = new BufferedReader(isr);
    String line;

    while ((line = br.readLine()) != null) {
        LOGGER.info(line);
    }

    DeserializerPlugin deserializerPlugin = getPluginContext()
            .getDeserializerPlugin("org.bimserver.ifc.step.deserializer.Ifc2x3tc1StepDeserializerPlugin", true);
    Deserializer deserializer = deserializerPlugin.createDeserializer(null);
    deserializer.init(model.getPackageMetaData());
    Path resultPath = getPluginContext().getRootPath().resolve("data").resolve(Name).resolve(Name + ".ifc");
    File f = resultPath.toFile();
    if (f.exists() && !f.isDirectory()) {

        SDeserializerPluginConfiguration deserializerForExtension = bimServerClientInterface
                .getServiceInterface().getSuggestedDeserializerForExtension("ifc", project.getOid());
        System.out.println("Checking in " + f.toString() + " - " + Formatters.bytesToString(f.length()));
        try {
            bimServerClientInterface.checkin(project.getOid(), "", deserializerForExtension.getOid(), false,
                    Flow.SYNC, resultPath);
        } catch (UserException e) {
            e.printStackTrace();
        }
        FileUtils.deleteDirectory(
                new File(getPluginContext().getRootPath().resolve("data").resolve(Name).toString()));
    }
}

From source file:org.objectpocket.storage.blob.MultiZipBlobStore.java

@Override
public void writeBlobs(Set<Blob> blobs) throws IOException {
    if (blobs == null || blobs.isEmpty()) {
        return;/*from  ww  w  .j  a  v  a  2s.c o m*/
    }
    if (blobContainerIndex == null) {
        initIndexAndReadFileSystems();
    }
    FileSystem currentWriteFileSystem = null;
    for (Blob blob : blobs) {

        // get blob path
        String path = blob.getPath();
        if (path == null || path.trim().isEmpty()) {
            path = blob.getId();
        }
        path = path.replaceAll("\\\\", "/");

        String selectedBlobContainerName = null;

        // case 1: replace blob data
        String blobContainer = blobContainerIndex.get(path);
        if (blobContainer != null) {
            currentWriteFileSystem = getWriteFileSystem(blobContainer);
            selectedBlobContainerName = blobContainer;
        }

        // case 2: add blob data
        else {

            // create new blob container
            if (lastBlobContainer == null
                    || blob.getBytes().length + lastBlobContainerSize > MAX_BINARY_FILE_SIZE) {
                if (currentWriteFileSystem != null) {
                    currentWriteFileSystem.close();
                }
                createNextBinary();
            }

            currentWriteFileSystem = getWriteFileSystem(lastBlobContainer.getName());
            selectedBlobContainerName = lastBlobContainer.getName();
        }

        // write data to blob container
        String name = path;
        if (path.contains("/")) {
            name = path.substring(path.lastIndexOf("/"));
            path = path.substring(0, path.lastIndexOf("/"));
            while (path.startsWith("/")) {
                path = path.substring(1);
            }
            Path pathInZip = currentWriteFileSystem.getPath(path);
            if (!Files.exists(pathInZip)) {
                Files.createDirectories(pathInZip);
            }
            if (!Files.exists(pathInZip)) {
                throw new IOException("Could not create directory for blob. " + path);
            }
            path = path + name;
        }
        Path fileInZip = currentWriteFileSystem.getPath(path);
        try (OutputStream out = Files.newOutputStream(fileInZip, StandardOpenOption.CREATE)) {
            out.write(blob.getBytes());
        }
        blobContainerIndex.put(path, selectedBlobContainerName);
        blob.setPersisted();
        if (lastBlobContainer.getName().equals(selectedBlobContainerName)) {
            lastBlobContainerSize = lastBlobContainerSize + blob.getBytes().length;
        }

    }

    for (FileSystem fs : writeFileSystems.values()) {
        if (fs != null && fs.isOpen()) {
            fs.close();
        }
    }
    writeFileSystems.clear();

}

From source file:edu.cmu.tetrad.cli.AbstractAlgorithmCli.java

@Override
public void run() {
    AlgorithmType algorithmType = getAlgorithmType();
    if (needsToShowHelp()) {
        showHelp(algorithmType.getCmd());

        return;//w  w w. jav a2s.  c o  m
    }

    parseOptions();

    String heading = creteHeading(algorithmType);
    String argInfo = createArgsInfo();
    System.out.printf(heading);
    System.out.println(argInfo);
    LOGGER.info(String.format("=== Starting %s: %s", algorithmType.getTitle(), Args.toString(args, ' ')));
    LOGGER.info(argInfo.trim().replaceAll("\n", ",").replaceAll(" = ", "="));

    if (!skipLatest) {
        LatestClient latestClient = LatestClient.getInstance();
        String version = AppTool.jarVersion();
        if (version == null)
            version = "DEVELOPMENT";
        latestClient.checkLatest("causal-cmd", version);
        System.out.println(latestClient.getLatestResult());
    }

    Set<String> excludedVariables = getExcludedVariables();
    runPreDataValidations(excludedVariables);

    DataSet dataSet = AlgorithmCommonTask.readInDataSet(excludedVariables, dataFile,
            getDataReader(dataFile, delimiter));
    runDataValidations(dataSet);

    IKnowledge knowledge = AlgorithmCommonTask.readInPriorKnowledge(knowledgeFile);

    Path outputFile = Paths.get(dirOut.toString(), outputPrefix + ".txt");
    try (PrintStream writer = new PrintStream(
            new BufferedOutputStream(Files.newOutputStream(outputFile, StandardOpenOption.CREATE)))) {
        writer.println(heading);
        writer.println(createRunInfo(excludedVariables, dataSet));

        Algorithm algorithm = getAlgorithm(knowledge);
        Parameters parameters = getParameters();
        if (verbose) {
            parameters.set(ParamAttrs.PRINT_STREAM, writer);
        }

        Graph graph = search(dataSet, algorithm, parameters);
        writer.println();
        writer.println(graph.toString());

        if (isSerializeJson) {
            writeOutJson(outputPrefix, graph, Paths.get(dirOut.toString(), outputPrefix + "_graph.json"));
        }

        if (tetradGraphJson) {
            writeOutTetradGraphJson(graph, Paths.get(dirOut.toString(), outputPrefix + ".json"));
        }
    } catch (Exception exception) {
        LOGGER.error("Run algorithm failed.", exception);
        System.exit(-128);
    }
}

From source file:com.liferay.sync.engine.document.library.handler.DownloadFileHandler.java

protected void copyFile(final SyncFile syncFile, Path filePath, InputStream inputStream, boolean append)
        throws Exception {

    OutputStream outputStream = null;

    Watcher watcher = WatcherManager.getWatcher(getSyncAccountId());

    try {//from w ww .jav  a  2 s. com
        Path tempFilePath = FileUtil.getTempFilePath(syncFile);

        boolean exists = FileUtil.exists(filePath);

        if (append) {
            outputStream = Files.newOutputStream(tempFilePath, StandardOpenOption.APPEND);

            IOUtils.copyLarge(inputStream, outputStream);
        } else {
            if (exists && (boolean) getParameterValue("patch")) {
                if (_logger.isDebugEnabled()) {
                    _logger.debug("Patching {}", syncFile.getFilePathName());
                }

                Files.copy(filePath, tempFilePath, StandardCopyOption.REPLACE_EXISTING);

                IODeltaUtil.patch(tempFilePath, inputStream);
            } else {
                Files.copy(inputStream, tempFilePath, StandardCopyOption.REPLACE_EXISTING);
            }
        }

        watcher.addDownloadedFilePathName(filePath.toString());

        if (GetterUtil.getBoolean(syncFile.getLocalExtraSettingValue("restoreEvent"))) {

            syncFile.unsetLocalExtraSetting("restoreEvent");

            syncFile.setUiEvent(SyncFile.UI_EVENT_RESTORED_REMOTE);
        } else if (exists) {
            syncFile.setUiEvent(SyncFile.UI_EVENT_DOWNLOADED_UPDATE);
        } else {
            syncFile.setUiEvent(SyncFile.UI_EVENT_DOWNLOADED_NEW);
        }

        FileKeyUtil.writeFileKey(tempFilePath, String.valueOf(syncFile.getSyncFileId()), false);

        FileUtil.setModifiedTime(tempFilePath, syncFile.getModifiedTime());

        if (MSOfficeFileUtil.isLegacyExcelFile(filePath)) {
            syncFile.setLocalExtraSetting("lastSavedDate", MSOfficeFileUtil.getLastSavedDate(tempFilePath));
        }

        Files.move(tempFilePath, filePath, StandardCopyOption.ATOMIC_MOVE, StandardCopyOption.REPLACE_EXISTING);

        ExecutorService executorService = SyncEngine.getExecutorService();

        Runnable runnable = new Runnable() {

            @Override
            public void run() {
                IODeltaUtil.checksums(syncFile);

                syncFile.setState(SyncFile.STATE_SYNCED);

                SyncFileService.update(syncFile);
            }

        };

        executorService.execute(runnable);
    } catch (FileSystemException fse) {
        if (fse instanceof AccessDeniedException) {
            _logger.error(fse.getMessage(), fse);

            syncFile.setState(SyncFile.STATE_ERROR);
            syncFile.setUiEvent(SyncFile.UI_EVENT_ACCESS_DENIED_LOCAL);

            SyncFileService.update(syncFile);

            return;
        } else if (fse instanceof NoSuchFileException) {
            if (isEventCancelled()) {
                SyncFileService.deleteSyncFile(syncFile);

                return;
            }
        }

        watcher.removeDownloadedFilePathName(filePath.toString());

        String message = fse.getMessage();

        _logger.error(message, fse);

        syncFile.setState(SyncFile.STATE_ERROR);

        if (message.contains("File name too long")) {
            syncFile.setUiEvent(SyncFile.UI_EVENT_FILE_NAME_TOO_LONG);
        }

        SyncFileService.update(syncFile);
    } finally {
        StreamUtil.cleanUp(outputStream);
    }
}

From source file:org.apache.tika.parser.DigestingParserTest.java

private void testMulti(Path tmp, int fileLength, int markLimit, boolean useTikaInputStream) throws IOException {

    OutputStream os = new BufferedOutputStream(Files.newOutputStream(tmp, StandardOpenOption.CREATE));

    for (int i = 0; i < fileLength; i++) {
        os.write(random.nextInt());//w  w w. j a va2s. c  o  m
    }
    os.flush();
    os.close();

    Metadata truth = new Metadata();
    addTruth(tmp, CommonsDigester.DigestAlgorithm.MD5, truth);
    addTruth(tmp, CommonsDigester.DigestAlgorithm.SHA1, truth);
    addTruth(tmp, CommonsDigester.DigestAlgorithm.SHA512, truth);

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, CommonsDigester.DigestAlgorithm.SHA512,
            CommonsDigester.DigestAlgorithm.SHA1, CommonsDigester.DigestAlgorithm.MD5);

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, CommonsDigester.DigestAlgorithm.MD5,
            CommonsDigester.DigestAlgorithm.SHA1);

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, CommonsDigester.DigestAlgorithm.SHA1,
            CommonsDigester.DigestAlgorithm.SHA512, CommonsDigester.DigestAlgorithm.MD5);

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, CommonsDigester.DigestAlgorithm.SHA1);

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, CommonsDigester.DigestAlgorithm.MD5);

}

From source file:org.apache.tika.parser.BouncyCastleDigestingParserTest.java

private void testMulti(Path tmp, int fileLength, int markLimit, boolean useTikaInputStream) throws IOException {

    OutputStream os = new BufferedOutputStream(Files.newOutputStream(tmp, StandardOpenOption.CREATE));

    for (int i = 0; i < fileLength; i++) {
        os.write(random.nextInt());/*from  w w  w .  j  a v a  2 s .  c o  m*/
    }
    os.flush();
    os.close();

    Metadata truth = new Metadata();
    addTruth(tmp, "MD5", truth);
    addTruth(tmp, "SHA1", truth);
    addTruth(tmp, "SHA512", truth);

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, "SHA512", "SHA1", "MD5");
    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, "MD5", "SHA1");

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, "SHA1", "SHA512", "MD5");
    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, "SHA1");

    checkMulti(truth, tmp, fileLength, markLimit, useTikaInputStream, "MD5");

}

From source file:cloudeventbus.pki.CertificateUtils.java

public static void savePrivateKey(PrivateKey privateKey, String fileName) throws IOException {
    try (final OutputStream outputStream = Files.newOutputStream(Paths.get(fileName),
            StandardOpenOption.CREATE_NEW)) {
        outputStream.write(privateKey.getEncoded());
    }/*from   ww w .ja  v a2 s.  c  om*/
}

From source file:edu.cmu.tetrad.cli.search.FgsDiscrete.java

private static void writeOutGraphML(Graph graph, Path outputFile) {
    if (graph == null) {
        return;//  w  w w .j  a  va 2 s .  c  om
    }

    try (PrintStream graphWriter = new PrintStream(
            new BufferedOutputStream(Files.newOutputStream(outputFile, StandardOpenOption.CREATE)))) {
        String fileName = outputFile.getFileName().toString();

        String msg = String.format("Writing out GraphML file '%s'.", fileName);
        System.out.printf("%s: %s%n", DateTime.printNow(), msg);
        LOGGER.info(msg);
        XmlPrint.printPretty(GraphmlSerializer.serialize(graph, outputPrefix), graphWriter);
        msg = String.format("Finished writing out GraphML file '%s'.", fileName);
        System.out.printf("%s: %s%n", DateTime.printNow(), msg);
        LOGGER.info(msg);
    } catch (Throwable throwable) {
        String errMsg = String.format("Failed when writting out GraphML file '%s'.",
                outputFile.getFileName().toString());
        System.err.println(errMsg);
        LOGGER.error(errMsg, throwable);
    }
}

From source file:com.facebook.buck.cxx.CxxLibraryIntegrationTest.java

@Test
public void thinArchivesDoNotContainAbsolutePaths() throws IOException {
    CxxPlatform cxxPlatform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build()));
    assumeTrue(cxxPlatform.getAr().supportsThinArchives());
    ProjectWorkspace workspace = TestDataHelper.createProjectWorkspaceForScenario(this, "cxx_library", tmp);
    workspace.setUp();/*  w  ww  .j a v a2s  .co m*/
    Path archive = workspace.buildAndReturnOutput("-c", "cxx.archive_contents=thin", "//:foo#default,static");

    // NOTE: Replace the thin header with a normal header just so the commons compress parser
    // can parse the archive contents.
    try (OutputStream outputStream = Files.newOutputStream(workspace.getPath(archive),
            StandardOpenOption.WRITE)) {
        outputStream.write(ObjectFileScrubbers.GLOBAL_HEADER);
    }

    // Now iterate the archive and verify it contains no absolute paths.
    try (ArArchiveInputStream stream = new ArArchiveInputStream(
            new FileInputStream(workspace.getPath(archive).toFile()))) {
        ArArchiveEntry entry;
        while ((entry = stream.getNextArEntry()) != null) {
            if (!entry.getName().isEmpty()) {
                assertFalse("found absolute path: " + entry.getName(),
                        workspace.getDestPath().getFileSystem().getPath(entry.getName()).isAbsolute());
            }
        }
    }
}