Example usage for java.nio.file Path getParent

List of usage examples for java.nio.file Path getParent

Introduction

In this page you can find the example usage for java.nio.file Path getParent.

Prototype

Path getParent();

Source Link

Document

Returns the parent path, or null if this path does not have a parent.

Usage

From source file:objective.taskboard.utils.ZipUtilsTest.java

@Test
public void whenZipAndUnzipToADestinyWithNonexistentParentFolder_BothShouldBeGenerated()
        throws IOException, URISyntaxException {
    Path source = Paths.get(getClass().getResource("zipUtilsTest.json").toURI());
    Path destiny = Paths.get("parent", "zipAndUnzipFile.zip");
    Path assertDir = Paths.get("parentAssert", "zipAndUnzipFile");

    try {//from w  ww .  j  a  va2s  . c  om
        zip(source, destiny);

        assertTrue("Zip file should be generated", exists(destiny));
        assertThat(size(destiny), greaterThan(0L));

        unzip(destiny.toFile(), assertDir);

        File file = assertDir.resolve(source.getFileName()).toFile();
        assertTrue("JSON file should be generated", file.exists());

        String actual = IOUtils.toString(asResource(file).getInputStream(), ENCODE_UTF_8);
        String expected = IOUtils.toString(asResource(source).getInputStream(), ENCODE_UTF_8);

        assertEquals("JSON", expected, actual);
    } finally {
        deleteQuietly(destiny.getParent().toFile());
        deleteQuietly(assertDir.getParent().toFile());
    }
}

From source file:org.codice.ddf.catalog.content.impl.FileSystemStorageProvider.java

private void commitDeletes(StorageRequest request) throws StorageException {
    List<Metacard> itemsToBeDeleted = deletionMap.get(request.getId());
    try {/*from w  w  w  . ja  v  a2 s  .  co m*/
        for (Metacard metacard : itemsToBeDeleted) {
            LOGGER.debug("File to be deleted: {}", metacard.getId());

            String metacardId = metacard.getId();

            List<String> parts = getContentFilePathParts(metacardId, "");

            Path contentIdDir = Paths.get(baseContentDirectory.toAbsolutePath().toString(),
                    parts.toArray(new String[parts.size()]));

            if (!Files.exists(contentIdDir)) {
                throw new StorageException("File doesn't exist for id: " + metacard.getId());
            }

            try {
                FileUtils.deleteDirectory(contentIdDir.toFile());

                Path part1 = contentIdDir.getParent();
                if (Files.isDirectory(part1) && isDirectoryEmpty(part1)) {
                    FileUtils.deleteDirectory(part1.toFile());
                    Path part0 = part1.getParent();
                    if (Files.isDirectory(part0) && isDirectoryEmpty(part0)) {
                        FileUtils.deleteDirectory(part0.toFile());
                    }
                }

            } catch (IOException e) {
                throw new StorageException("Could not delete file: " + metacard.getId(), e);
            }
        }
    } finally {
        rollback(request);
    }
}

From source file:org.opencb.cellbase.app.cli.VariantAnnotationCommandExecutor.java

private boolean runAnnotation() throws Exception {

    // Build indexes for custom files and/or population frequencies file
    getIndexes();/*from   ww w. jav a  2 s.c  om*/

    if (variantAnnotationCommandOptions.variant != null && !variantAnnotationCommandOptions.variant.isEmpty()) {
        List<Variant> variants = Variant.parseVariants(variantAnnotationCommandOptions.variant);
        if (local) {
            DBAdaptorFactory dbAdaptorFactory = new MongoDBAdaptorFactory(configuration);
            VariantAnnotationCalculator variantAnnotationCalculator = new VariantAnnotationCalculator(
                    this.species, this.assembly, dbAdaptorFactory);
            List<QueryResult<VariantAnnotation>> annotationByVariantList = variantAnnotationCalculator
                    .getAnnotationByVariantList(variants, queryOptions);

            ObjectMapper jsonObjectMapper = new ObjectMapper();
            jsonObjectMapper.setSerializationInclusion(JsonInclude.Include.NON_NULL);
            jsonObjectMapper.configure(MapperFeature.REQUIRE_SETTERS_FOR_GETTERS, true);
            ObjectWriter objectWriter = jsonObjectMapper.writer();

            Path outPath = Paths.get(variantAnnotationCommandOptions.output);
            FileUtils.checkDirectory(outPath.getParent());
            BufferedWriter bufferedWriter = FileUtils.newBufferedWriter(outPath);
            for (QueryResult queryResult : annotationByVariantList) {
                bufferedWriter.write(objectWriter.writeValueAsString(queryResult.getResult()));
                bufferedWriter.newLine();
            }
            bufferedWriter.close();
        }
        return true;
    }

    // If a variant file is provided then we annotate it. Lines in the input file can be computationally
    // expensive to parse, i.e.: multisample vcf with thousands of samples. A specific task is created to enable
    // parallel parsing of these lines
    if (input != null) {
        DataReader dataReader = new StringDataReader(input);
        List<ParallelTaskRunner.TaskWithException<String, Variant, Exception>> variantAnnotatorTaskList = getStringTaskList();
        DataWriter dataWriter = getDataWriter(output.toString());

        ParallelTaskRunner.Config config = new ParallelTaskRunner.Config(numThreads, batchSize, QUEUE_CAPACITY,
                false);
        ParallelTaskRunner<String, Variant> runner = new ParallelTaskRunner<>(dataReader,
                variantAnnotatorTaskList, dataWriter, config);
        runner.run();
        // For internal use only - will only be run when -Dpopulation-frequencies is activated
        writeRemainingPopFrequencies();
    } else {
        // This will annotate the CellBase Variation collection
        if (cellBaseAnnotation) {
            // TODO: enable this query in the parseQuery method within VariantMongoDBAdaptor
            //                    Query query = new Query("$match",
            //                            new Document("annotation.consequenceTypes", new Document("$exists", 0)));
            //                    Query query = new Query();
            QueryOptions options = new QueryOptions("include", "chromosome,start,reference,alternate,type");
            List<ParallelTaskRunner.TaskWithException<Variant, Variant, Exception>> variantAnnotatorTaskList = getVariantTaskList();
            ParallelTaskRunner.Config config = new ParallelTaskRunner.Config(numThreads, batchSize,
                    QUEUE_CAPACITY, false);

            for (String chromosome : chromosomeList) {
                logger.info("Annotating chromosome {}", chromosome);
                Query query = new Query("chromosome", chromosome);
                DataReader dataReader = new VariationDataReader(dbAdaptorFactory.getVariationDBAdaptor(species),
                        query, options);
                DataWriter dataWriter = getDataWriter(
                        output.toString() + "/" + VARIATION_ANNOTATION_FILE_PREFIX + chromosome + ".json.gz");
                ParallelTaskRunner<Variant, Variant> runner = new ParallelTaskRunner<Variant, Variant>(
                        dataReader, variantAnnotatorTaskList, dataWriter, config);
                runner.run();
            }
        }
    }

    if (customFiles != null || populationFrequenciesFile != null) {
        closeIndexes();
    }

    logger.info("Variant annotation finished.");
    return false;
}

From source file:de.teamgrit.grit.checking.compile.HaskellCompileChecker.java

/**
 * checkProgram invokes the Haskell compiler on a given file and reports
 * the output.// ww w . j  a v a 2  s  .c o  m
 * 
 * @param pathToProgramFile
 *            Specifies the file or folder containing that should be
 *            compiled. (accepts .lhs and .hs files)
 * @param compilerName
 *            The compiler to be used (usually ghc).
 * @param compilerFlags
 *            Additional flags to be passed to the compiler.
 * @throws FileNotFoundException
 *             Is thrown when the file in pathToProgramFile cannot be
 *             opened
 * @throws BadCompilerSpecifiedException
 *             Is thrown when the given compiler cannot be called
 * @return A {@link CompilerOutput} that contains all compiler messages and
 *         flags on how the compile run went.
 * @throws BadFlagException
 *             When ghc doesn't recognize a flag, this exception is thrown.
 */
@Override
public CompilerOutput checkProgram(Path pathToProgramFile, String compilerName, List<String> compilerFlags)
        throws FileNotFoundException, BadCompilerSpecifiedException, BadFlagException {

    Process compilerProcess = null;

    try {
        // create compiler invocation.
        List<String> compilerInvocation = createCompilerInvocation(pathToProgramFile, compilerName,
                compilerFlags);

        ProcessBuilder compilerProcessBuilder = new ProcessBuilder(compilerInvocation);

        // make sure the compiler stays in its directory.
        compilerProcessBuilder.directory(pathToProgramFile.getParent().toFile());

        compilerProcess = compilerProcessBuilder.start();
        // this will never happen because createCompilerInvocation never
        // throws this Exception. Throw declaration needs to be in method
        // declaration because of the implemented Interface although we
        // never use it in the HaskellCompileChecker
    } catch (CompilerOutputFolderExistsException e) {
        LOGGER.severe("A problem while compiling, which never should happen, occured" + e.getMessage());
    } catch (BadCompilerSpecifiedException e) {
        throw new BadCompilerSpecifiedException(e.getMessage());
    } catch (IOException e) {

        // If we cannot call the compiler we return a CompilerOutput
        // initialized with false, false, indicating
        // that the compiler wasn't invoked properly and that there was no
        // clean Compile.
        CompilerOutput compilerInvokeError = new CompilerOutput();
        compilerInvokeError.setClean(false);
        compilerInvokeError.setCompilerInvoked(false);
        return compilerInvokeError;
    }

    // Now we read compiler output. If everything is ok ghc reports
    // nothing in the errorStream.
    InputStream compilerOutputStream = compilerProcess.getErrorStream();
    InputStreamReader compilerStreamReader = new InputStreamReader(compilerOutputStream);
    BufferedReader compilerOutputBuffer = new BufferedReader(compilerStreamReader);
    String line;

    CompilerOutput compilerOutput = new CompilerOutput();
    compilerOutput.setCompilerInvoked(true);

    List<String> compilerOutputLines = new LinkedList<>();

    try {
        while ((line = compilerOutputBuffer.readLine()) != null) {
            compilerOutputLines.add(line);
        }
        // Errors are separated via an empty line (""). But after the
        // the last error the OutputBuffer has nothing more to write.
        // In order to recognize the last error we insert an empty String
        // at the end of the list.
        // Only needs to be done when there are errors.
        if (compilerOutputLines.size() != 0) {
            line = "";
            compilerOutputLines.add(line);
        }

        compilerOutputStream.close();
        compilerStreamReader.close();
        compilerOutputBuffer.close();
        compilerProcess.destroy();

    } catch (IOException e) {

        // Reading might go wrong here if ghc should unexpectedly die
        LOGGER.severe("Error while reading from compiler stream.");
        compilerOutput.setClean(false);
        compilerOutput.setCompileStreamBroken(true);
        return compilerOutput;
    }

    // ghc -c generates a .o(object) and a .hi(haskell interface) file.
    // But we don't need those files so they can be deleted.
    // The generated files have the same name like our input file so we
    // can just exchange the file endings in order to get the
    // correct file paths for deletion
    if (Files.isDirectory(pathToProgramFile, LinkOption.NOFOLLOW_LINKS)) {

        // we use a file walker in order to find all files in the folder
        // and its subfolders
        RegexDirectoryWalker dirWalker = new RegexDirectoryWalker(".+\\.([Ll])?[Hh][Ss]");
        try {
            Files.walkFileTree(pathToProgramFile, dirWalker);
        } catch (IOException e) {
            LOGGER.severe("Could not walk submission " + pathToProgramFile.toString()
                    + " while building copiler invocation: " + e.getMessage());
        }

        for (Path candidatePath : dirWalker.getFoundFiles()) {
            File candidateFile = candidatePath.toFile();
            if (!candidateFile.isDirectory()) {
                String extension = FilenameUtils.getExtension(candidateFile.toString());
                if (extension.matches("[Ll]?[Hh][Ss]")) {
                    File ghcGeneratedObject = new File(
                            FilenameUtils.removeExtension(candidateFile.toString()) + ".o");
                    File ghcGeneratedInterface = new File(
                            FilenameUtils.removeExtension(candidateFile.toString()) + ".hi");
                    ghcGeneratedObject.delete();
                    ghcGeneratedInterface.delete();
                }
            }
        }
    } else {
        String extension = FilenameUtils.getExtension(pathToProgramFile.toString());
        if (extension.matches("[Ll]?[Hh][Ss]")) {
            File ghcGeneratedObject = new File(
                    FilenameUtils.removeExtension(pathToProgramFile.toString()) + ".o");
            File ghcGeneratedInterface = new File(
                    FilenameUtils.removeExtension(pathToProgramFile.toString()) + ".hi");
            ghcGeneratedObject.delete();
            ghcGeneratedInterface.delete();
        }

    }

    // if there are no errors there is no Output to handle
    if (compilerOutputLines.size() != 0) {
        compilerOutput = splitCompilerOutput(compilerOutputLines, compilerOutput);
    } else {
        compilerOutput.setClean(true);
    }
    return compilerOutput;
}

From source file:org.fao.geonet.api.records.attachments.FilesystemStore.java

@Override
public Path getResource(ServiceContext context, String metadataUuid, String resourceId) throws Exception {
    // Those characters should not be allowed by URL structure
    if (resourceId.contains("..") || resourceId.startsWith("/") || resourceId.startsWith("file:/")) {
        throw new SecurityException(String.format("Invalid resource identifier '%s'.", resourceId));
    }//  w  w w .  ja  v a  2s.c om
    ApplicationContext _appContext = ApplicationContextHolder.get();
    AccessManager accessManager = _appContext.getBean(AccessManager.class);
    GeonetworkDataDirectory dataDirectory = _appContext.getBean(GeonetworkDataDirectory.class);
    String metadataId = getAndCheckMetadataId(metadataUuid);
    Path metadataDir = Lib.resource.getMetadataDir(dataDirectory, metadataId);

    Path resourceFile = null;

    boolean canDownload = accessManager.canDownload(context, metadataId);
    for (MetadataResourceVisibility r : MetadataResourceVisibility.values()) {
        try (DirectoryStream<Path> directoryStream = Files.newDirectoryStream(metadataDir.resolve(r.toString()),
                resourceId)) {
            for (Path path : directoryStream) {
                if (Files.isRegularFile(path)) {
                    resourceFile = path;
                }
            }
        } catch (IOException ignored) {
        }
    }

    if (resourceFile != null && Files.exists(resourceFile)) {
        if (resourceFile.getParent().getFileName().toString()
                .equals(MetadataResourceVisibility.PRIVATE.toString()) && !canDownload) {
            throw new SecurityException(String.format(
                    "Current user can't download resources for metadata '%s' and as such can't access the requested resource '%s'.",
                    metadataUuid, resourceId));
        }
        return resourceFile;
    } else {
        throw new ResourceNotFoundException(
                String.format("Metadata resource '%s' not found for metadata '%s'", resourceId, metadataUuid));
    }
}

From source file:com.liferay.sync.engine.file.system.SyncWatchEventProcessor.java

protected void addFolder(SyncWatchEvent syncWatchEvent) throws Exception {
    Path targetFilePath = Paths.get(syncWatchEvent.getFilePathName());

    if (sanitizeFileName(targetFilePath) || isInErrorState(targetFilePath)) {

        return;/*from  w  w w  .  ja  v  a 2s .co m*/
    }

    Path parentTargetFilePath = targetFilePath.getParent();

    SyncFile parentSyncFile = SyncFileService.fetchSyncFile(parentTargetFilePath.toString());

    if ((parentSyncFile == null) || (!parentSyncFile.isSystem() && (parentSyncFile.getTypePK() == 0))) {

        queueSyncWatchEvent(parentTargetFilePath.toString(), syncWatchEvent);

        return;
    }

    SyncFile syncFile = SyncFileService.fetchSyncFile(targetFilePath.toString());

    if (syncFile == null) {
        syncFile = SyncFileService.fetchSyncFile(FileKeyUtil.getFileKey(targetFilePath));

        if (!verifySite(syncFile, parentSyncFile)) {
            syncFile = null;
        }
    }

    if (syncFile == null) {
        SyncFileService.addFolderSyncFile(targetFilePath, parentSyncFile.getTypePK(),
                parentSyncFile.getRepositoryId(), _syncAccountId);

        return;
    }

    Path sourceFilePath = Paths.get(syncFile.getFilePathName());

    if (targetFilePath.equals(sourceFilePath)) {
        if (isPendingTypePK(syncFile)) {
            queueSyncWatchEvent(syncFile.getFilePathName(), syncWatchEvent);

            return;
        }

        FileKeyUtil.writeFileKey(targetFilePath, String.valueOf(syncFile.getSyncFileId()), true);
    } else if (FileUtil.exists(sourceFilePath)) {
        SyncFileService.addFolderSyncFile(targetFilePath, parentSyncFile.getTypePK(),
                parentSyncFile.getRepositoryId(), _syncAccountId);

        return;
    } else if (parentTargetFilePath.equals(sourceFilePath.getParent())) {
        if (isPendingTypePK(syncFile)) {
            queueSyncWatchEvent(syncFile.getFilePathName(), syncWatchEvent);

            return;
        }

        SyncFileService.updateFolderSyncFile(targetFilePath, _syncAccountId, syncFile);
    } else {
        if (isPendingTypePK(syncFile)) {
            queueSyncWatchEvent(syncFile.getFilePathName(), syncWatchEvent);

            return;
        }

        SyncFileService.moveFolderSyncFile(targetFilePath, parentSyncFile.getTypePK(), _syncAccountId,
                syncFile);

        Path sourceFileNameFilePath = sourceFilePath.getFileName();

        if (!sourceFileNameFilePath.equals(targetFilePath.getFileName())) {
            SyncFileService.updateFolderSyncFile(targetFilePath, _syncAccountId, syncFile);
        }
    }

    SyncAccount syncAccount = SyncAccountService.fetchSyncAccount(_syncAccountId);

    if (syncAccount.getState() == SyncAccount.STATE_CONNECTED) {
        SyncWatchEvent relatedSyncWatchEvent = SyncWatchEventService.fetchSyncWatchEvent(
                SyncWatchEvent.EVENT_TYPE_DELETE, syncWatchEvent.getFilePathName(),
                syncWatchEvent.getTimestamp());

        if (relatedSyncWatchEvent != null) {
            _processedSyncWatchEventIds.add(relatedSyncWatchEvent.getSyncWatchEventId());
        }
    }
}

From source file:org.roda.core.storage.fs.FileStorageService.java

@Override
public Binary createBinary(StoragePath storagePath, ContentPayload payload, boolean asReference)
        throws GenericException, AlreadyExistsException {
    if (asReference) {
        throw new GenericException("Method not yet implemented");
    } else {/*from   w  ww  . j av  a 2 s.  c  o  m*/
        Path binPath = FSUtils.getEntityPath(basePath, storagePath);
        if (FSUtils.exists(binPath)) {
            throw new AlreadyExistsException("Binary already exists: " + binPath);
        } else {

            try {
                // ensuring parent exists
                Path parent = binPath.getParent();
                if (!FSUtils.exists(parent)) {
                    Files.createDirectories(parent);
                }

                // writing file
                payload.writeToPath(binPath);
                ContentPayload newPayload = new FSPathContentPayload(binPath);
                Long sizeInBytes = Files.size(binPath);
                boolean isReference = false;
                Map<String, String> contentDigest = null;

                return new DefaultBinary(storagePath, newPayload, sizeInBytes, isReference, contentDigest);
            } catch (FileAlreadyExistsException e) {
                throw new AlreadyExistsException("Binary already exists: " + binPath);
            } catch (IOException e) {
                throw new GenericException("Could not create binary", e);
            }
        }
    }
}

From source file:org.opennms.test.system.api.NewTestEnvironment.java

/**
 * Spawns the OpenNMS container, linked to PostgreSQL.
 *///  w ww  . j a  v a  2s .  c  om
private void spawnOpenNMS() throws DockerException, InterruptedException, IOException {
    final ContainerAlias alias = ContainerAlias.OPENNMS;
    if (!(isEnabled(alias) && isSpawned(alias))) {
        return;
    }

    final Path overlayRoot = initializeOverlayRoot();

    final Path opennmsOverlay = overlayRoot.resolve("opennms-overlay");
    final Path opennmsLogs = overlayRoot.resolve("opennms-logs");
    final Path opennmsKarafLogs = overlayRoot.resolve("opennms-karaf-logs");

    Files.createDirectories(opennmsOverlay);
    Files.createDirectories(opennmsLogs);
    Files.createDirectories(opennmsKarafLogs);

    if (this.overlayDirectory != null) {
        Files.find(this.overlayDirectory, 10, (path, attr) -> {
            return path.toFile().isFile();
        }).forEach(path -> {
            final Path relative = Paths
                    .get(this.overlayDirectory.toFile().toURI().relativize(path.toFile().toURI()).getPath());
            final Path to = Paths.get(opennmsOverlay.toString(), relative.toString());
            LOG.debug("Copying {} to {}", path.toAbsolutePath(), to.toAbsolutePath());
            try {
                Files.createDirectories(to.getParent());
                Files.copy(path.toAbsolutePath(), to.toAbsolutePath());
            } catch (final Exception e) {
                throw new RuntimeException(e);
            }
        });
    }

    final List<String> binds = new ArrayList<>();
    binds.add(opennmsOverlay.toString() + ":/opennms-docker-overlay");
    binds.add(opennmsLogs.toString() + ":/var/log/opennms");
    binds.add(opennmsKarafLogs.toString() + ":/opt/opennms/data/log");

    final List<String> links = new ArrayList<>();
    links.add(String.format("%s:postgres", containerInfoByAlias.get(ContainerAlias.POSTGRES).name()));

    // Link to the Elasticsearch container, if enabled
    if (isEnabled(ContainerAlias.ELASTICSEARCH_1)) {
        links.add(String.format("%s:elasticsearch",
                containerInfoByAlias.get(ContainerAlias.ELASTICSEARCH_1).name()));
    } else if (isEnabled(ContainerAlias.ELASTICSEARCH_2)) {
        links.add(String.format("%s:elasticsearch",
                containerInfoByAlias.get(ContainerAlias.ELASTICSEARCH_2).name()));
    } else if (isEnabled(ContainerAlias.ELASTICSEARCH_5)) {
        links.add(String.format("%s:elasticsearch",
                containerInfoByAlias.get(ContainerAlias.ELASTICSEARCH_5).name()));
    }

    Builder builder = HostConfig.builder().privileged(true).publishAllPorts(true).links(links).binds(binds);

    spawnContainer(alias, builder, Collections.emptyList());
}

From source file:org.apache.storm.localizer.LocallyCachedTopologyBlob.java

@Override
public long downloadToTempLocation(ClientBlobStore store)
        throws IOException, KeyNotFoundException, AuthorizationException {
    if (isLocalMode && type == TopologyBlobType.TOPO_JAR) {
        LOG.debug("DOWNLOADING LOCAL JAR to TEMP LOCATION... {}", topologyId);
        //This is a special case where the jar was not uploaded so we will not download it (it is already on the classpath)
        ClassLoader classloader = Thread.currentThread().getContextClassLoader();
        String resourcesJar = resourcesJar();
        URL url = classloader.getResource(ServerConfigUtils.RESOURCES_SUBDIR);
        Path extractionDest = topologyBasicBlobsRootDir
                .resolve(type.getTempExtractionDir(LOCAL_MODE_JAR_VERSION));
        if (resourcesJar != null) {
            LOG.info("Extracting resources from jar at {} to {}", resourcesJar, extractionDest);
            extractDirFromJar(resourcesJar, ServerConfigUtils.RESOURCES_SUBDIR, extractionDest);
        } else if (url != null) {
            LOG.info("Copying resources at {} to {}", url, extractionDest);
            if ("jar".equals(url.getProtocol())) {
                JarURLConnection urlConnection = (JarURLConnection) url.openConnection();
                extractDirFromJar(urlConnection.getJarFileURL().getFile(), ServerConfigUtils.RESOURCES_SUBDIR,
                        extractionDest);
            } else {
                fsOps.copyDirectory(new File(url.getFile()), extractionDest.toFile());
            }//  w ww  . j a v  a2 s.c o  m
        }
        return LOCAL_MODE_JAR_VERSION;
    }

    long newVersion;
    Path tmpLocation;
    String key = type.getKey(topologyId);
    try (InputStreamWithMeta in = store.getBlob(key)) {
        newVersion = in.getVersion();
        long expectedSize = in.getFileLength();
        if (newVersion == version) {
            throw new RuntimeException(
                    "The version did not change, but we tried to download it. " + version + " " + key);
        }
        tmpLocation = topologyBasicBlobsRootDir.resolve(type.getTempFileName(newVersion));
        long totalRead = 0;
        //Make sure the parent directory is there and ready to go
        fsOps.forceMkdir(tmpLocation.getParent());
        try (OutputStream outStream = fsOps.getOutputStream(tmpLocation.toFile())) {
            byte[] buffer = new byte[4096];
            int read = 0;
            while ((read = in.read(buffer)) > 0) {
                outStream.write(buffer, 0, read);
                totalRead += read;
            }
        }
        if (totalRead != expectedSize) {
            throw new IOException(
                    "We expected to download " + expectedSize + " bytes but found we got " + totalRead);
        }
    }

    if (type.needsExtraction()) {
        Path extractionDest = topologyBasicBlobsRootDir.resolve(type.getTempExtractionDir(newVersion));
        extractDirFromJar(tmpLocation.toAbsolutePath().toString(), ServerConfigUtils.RESOURCES_SUBDIR,
                extractionDest);
    }
    return newVersion;
}

From source file:io.cloudslang.content.ssh.services.impl.SSHServiceImpl.java

/**
 * Open SSH session.//from   w w  w  .j  a va 2  s . co  m
 *
 * @param details                     The connection details.
 * @param identityKey                 The private key file or string.
 * @param knownHostsFile              The known_hosts file and policy.
 * @param connectTimeout              The open SSH session timeout.
 * @param keepContextForExpectCommand Use the same channel for the expect command.
 * @param proxyHTTP                   The proxy settings, parse it as null if no proxy settings required
 * @param allowedCiphers              The list of allowed ciphers. If not empty, it will be used to overwrite the default list.
 */
public SSHServiceImpl(ConnectionDetails details, IdentityKey identityKey, KnownHostsFile knownHostsFile,
        int connectTimeout, boolean keepContextForExpectCommand, ProxyHTTP proxyHTTP, String allowedCiphers)
        throws SSHException {
    JSch jsch = new JSch();
    String finalListOfAllowedCiphers = StringUtilities.isNotBlank(allowedCiphers) ? allowedCiphers
            : ALLOWED_CIPHERS;
    JSch.setConfig("cipher.s2c", finalListOfAllowedCiphers);
    JSch.setConfig("cipher.c2s", finalListOfAllowedCiphers);
    JSch.setConfig("PreferredAuthentications", "publickey,password,keyboard-interactive");

    try {
        session = jsch.getSession(details.getUsername(), details.getHost(), details.getPort());
    } catch (JSchException e) {
        throw new SSHException(e);
    }

    try {
        String policy = knownHostsFile.getPolicy();
        Path knownHostsFilePath = knownHostsFile.getPath();
        switch (policy.toLowerCase(Locale.ENGLISH)) {
        case KNOWN_HOSTS_ALLOW:
            session.setConfig("StrictHostKeyChecking", "no");
            break;
        case KNOWN_HOSTS_STRICT:
            jsch.setKnownHosts(knownHostsFilePath.toString());
            session.setConfig("StrictHostKeyChecking", "yes");
            break;
        case KNOWN_HOSTS_ADD:
            if (!knownHostsFilePath.isAbsolute()) {
                throw new SSHException("The known_hosts file path should be absolute.");
            }
            if (!Files.exists(knownHostsFilePath)) {
                Path parent = knownHostsFilePath.getParent();
                if (parent != null) {
                    Files.createDirectories(parent);
                }
                Files.createFile(knownHostsFilePath);
            }
            jsch.setKnownHosts(knownHostsFilePath.toString());
            session.setConfig("StrictHostKeyChecking", "no");
            break;
        default:
            throw new SSHException("Unknown known_hosts file policy.");
        }
    } catch (JSchException e) {
        throw new SSHException("The known_hosts file couldn't be set.", e);
    } catch (IOException e) {
        throw new SSHException("The known_hosts file couldn't be created.", e);
    }

    if (identityKey == null) {
        // use the password
        session.setPassword(details.getPassword());
    } else {
        // or use the OpenSSH private key file or string
        IdentityKeyUtils.setIdentity(jsch, identityKey);
    }

    if (proxyHTTP != null) {
        session.setProxy(proxyHTTP);
    }

    try {
        session.connect(connectTimeout);

        if (keepContextForExpectCommand) {
            // create exec channel
            execChannel = session.openChannel(EXEC_CHANNEL);

            // connect to the channel and run the command(s)
            execChannel.connect(connectTimeout);
        }
    } catch (JSchException e) {
        throw new SSHException(e);
    }
}