List of usage examples for java.nio.file Files copy
public static long copy(Path source, OutputStream out) throws IOException
From source file:de.tudarmstadt.ukp.dkpro.core.api.datasets.DatasetFactory.java
/** * Verify/download/update artifact in cache. Execute post-download actions. *//*from ww w . j a va 2s .c o m*/ private void materialize(DatasetDescription aDataset) throws IOException { Path root = resolve(aDataset); Collection<ArtifactDescription> artifacts = aDataset.getArtifacts().values(); // First validate if local copies are still up-to-date boolean reload = false; packageValidationLoop: for (ArtifactDescription artifact : artifacts) { Path cachedFile = resolve(aDataset, artifact); if (!Files.exists(cachedFile)) { continue; } if (artifact.getSha1() != null) { String actual = getDigest(cachedFile, "SHA1"); if (!artifact.getSha1().equals(actual)) { LOG.info("Local SHA1 hash mismatch on [" + cachedFile + "] - expected [" + artifact.getSha1() + "] - actual [" + actual + "]"); reload = true; break packageValidationLoop; } else { LOG.info("Local SHA1 hash verified on [" + cachedFile + "] - [" + actual + "]"); } } } // If any of the packages are outdated, clear the cache and download again if (reload) { LOG.info("Clearing local cache for [" + root + "]"); FileUtils.deleteQuietly(root.toFile()); } for (ArtifactDescription artifact : artifacts) { Path cachedFile = resolve(aDataset, artifact); if (Files.exists(cachedFile)) { continue; } if (artifact.getText() != null) { Files.createDirectories(cachedFile.getParent()); LOG.info("Creating [" + cachedFile + "]"); try (Writer out = Files.newBufferedWriter(cachedFile, StandardCharsets.UTF_8)) { out.write(artifact.getText()); } } if (artifact.getUrl() != null) { Files.createDirectories(cachedFile.getParent()); MessageDigest sha1; try { sha1 = MessageDigest.getInstance("SHA1"); } catch (NoSuchAlgorithmException e) { throw new IOException(e); } URL source = new URL(artifact.getUrl()); LOG.info("Fetching [" + cachedFile + "]"); URLConnection connection = source.openConnection(); connection.setRequestProperty("User-Agent", "Java"); try (InputStream is = connection.getInputStream()) { DigestInputStream sha1Filter = new DigestInputStream(is, sha1); Files.copy(sha1Filter, cachedFile); if (artifact.getSha1() != null) { String sha1Hex = new String(Hex.encodeHex(sha1Filter.getMessageDigest().digest())); if (!artifact.getSha1().equals(sha1Hex)) { String message = "SHA1 mismatch. Expected [" + artifact.getSha1() + "] but got [" + sha1Hex + "]."; LOG.error(message); throw new IOException(message); } } } } } // Perform a post-fetch action such as unpacking Path postActionCompleteMarker = resolve(aDataset).resolve(".postComplete"); if (!Files.exists(postActionCompleteMarker)) { for (ArtifactDescription artifact : artifacts) { Path cachedFile = resolve(aDataset, artifact); List<ActionDescription> actions = artifact.getActions(); if (actions != null && !actions.isEmpty()) { try { for (ActionDescription action : actions) { LOG.info("Post-download action [" + action.getAction() + "]"); Class<? extends Action_ImplBase> implClass = actionRegistry.get(action.getAction()); if (implClass == null) { throw new IllegalStateException( "Unknown or unsupported action [" + action.getAction() + "]"); } Action_ImplBase impl = implClass.newInstance(); impl.apply(action, aDataset, artifact, cachedFile); } } catch (IllegalStateException e) { throw e; } catch (IOException e) { throw e; } catch (Exception e) { throw new IllegalStateException(e); } } } Files.createFile(postActionCompleteMarker); } }
From source file:at.ac.tuwien.ims.latex2mobiformulaconv.converter.Converter.java
/** * Saves the html document to a file with .html extension * * @param document JDOM Document representing the HTML * @return written HTML File object/*from w w w. j a v a 2s.c om*/ */ private File saveHtmlFile(Document document) { Path tempFilepath = null; Path tempDirPath = formulaConverter.getTempDirPath(); ClassLoader classLoader = getClass().getClassLoader(); InputStream mainCssIs = classLoader.getResourceAsStream(MAIN_CSS_FILENAME); logger.debug("Copying main.css file to temp dir: " + tempDirPath.toAbsolutePath().toString()); try { Files.copy(mainCssIs, tempDirPath.resolve(MAIN_CSS_FILENAME)); } catch (FileAlreadyExistsException e) { // do nothing } catch (IOException e) { logger.error("could not copy main.css file to temp dir!"); } tempFilepath = tempDirPath.resolve("latex2mobi.html"); logger.debug("tempFile created at: " + tempFilepath.toAbsolutePath().toString()); try { Files.write(tempFilepath, new XMLOutputter().outputString(document).getBytes(Charset.forName("UTF-8"))); if (debugMarkupOutput) { logger.info("Debug markup will be generated."); } } catch (IOException e) { logger.error("Error writing HTML to temp dir!"); logger.error(e.getMessage(), e); } return tempFilepath.toFile(); }
From source file:org.elasticsearch.xpack.core.ssl.SSLConfigurationReloaderTests.java
/** * Tests the reloading of SSLContext when the trust store is modified. The same store is used as a TrustStore (for the * reloadable SSLContext used in the HTTPClient) and as a KeyStore for the MockWebServer *///from w w w . ja v a 2 s . c om public void testReloadingTrustStore() throws Exception { Path tempDir = createTempDir(); Path trustStorePath = tempDir.resolve("testnode.jks"); Path updatedTruststorePath = tempDir.resolve("testnode_updated.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), trustStorePath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"), updatedTruststorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder().put("xpack.ssl.truststore.path", trustStorePath) .put("path.home", createTempDir()).setSecureSettings(secureSettings).build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); // Create the MockWebServer once for both pre and post checks try (MockWebServer server = getSslServer(trustStorePath, "testnode")) { final Consumer<SSLContext> trustMaterialPreChecks = (context) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { privilegedConnect( () -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close()); } catch (Exception e) { throw new RuntimeException("Error connecting to the mock server", e); } }; final Runnable modifier = () -> { try { atomicMoveIfPossible(updatedTruststorePath, trustStorePath); } catch (Exception e) { throw new RuntimeException("failed to modify file", e); } }; // Client's truststore doesn't contain the server's certificate anymore so SSLHandshake should fail final Consumer<SSLContext> trustMaterialPostChecks = (updatedContext) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(updatedContext).build()) { SSLHandshakeException sslException = expectThrows(SSLHandshakeException.class, () -> privilegedConnect(() -> client .execute(new HttpGet("https://localhost:" + server.getPort())).close())); assertThat(sslException.getCause().getMessage(), containsString("PKIX path building failed")); } catch (Exception e) { throw new RuntimeException("Error closing CloseableHttpClient", e); } }; validateSSLConfigurationIsReloaded(settings, env, trustMaterialPreChecks, modifier, trustMaterialPostChecks); } }
From source file:org.apache.karaf.tooling.ArchiveMojo.java
private void addFileToZip(ZipArchiveOutputStream tOut, Path f, String base) throws IOException { if (Files.isDirectory(f)) { String entryName = base + f.getFileName().toString() + "/"; ZipArchiveEntry zipEntry = new ZipArchiveEntry(entryName); tOut.putArchiveEntry(zipEntry);/*from w w w. j a v a 2s . co m*/ tOut.closeArchiveEntry(); try (DirectoryStream<Path> children = Files.newDirectoryStream(f)) { for (Path child : children) { addFileToZip(tOut, child, entryName); } } } else if (useSymLinks && Files.isSymbolicLink(f)) { String entryName = base + f.getFileName().toString(); ZipArchiveEntry zipEntry = new ZipArchiveEntry(entryName); zipEntry.setUnixMode(UnixStat.LINK_FLAG | UnixStat.DEFAULT_FILE_PERM); tOut.putArchiveEntry(zipEntry); tOut.write(Files.readSymbolicLink(f).toString().getBytes()); tOut.closeArchiveEntry(); } else { String entryName = base + f.getFileName().toString(); ZipArchiveEntry zipEntry = new ZipArchiveEntry(entryName); zipEntry.setSize(Files.size(f)); if (entryName.contains("/bin/") || (!usePathPrefix && entryName.startsWith("bin"))) { if (!entryName.endsWith(".bat")) { zipEntry.setUnixMode(0755); } else { zipEntry.setUnixMode(0644); } } tOut.putArchiveEntry(zipEntry); Files.copy(f, tOut); tOut.closeArchiveEntry(); } }
From source file:ch.rasc.embeddedtc.plugin.PackageTcWarMojo.java
@Override public void execute() throws MojoExecutionException { Path warExecFile = Paths.get(this.buildDirectory, this.finalName); try {/* w w w . jav a2s.c o m*/ Files.deleteIfExists(warExecFile); Files.createDirectories(warExecFile.getParent()); try (OutputStream os = Files.newOutputStream(warExecFile); ArchiveOutputStream aos = new ArchiveStreamFactory() .createArchiveOutputStream(ArchiveStreamFactory.JAR, os)) { // If project is a war project add the war to the project if ("war".equalsIgnoreCase(this.project.getPackaging())) { File projectArtifact = this.project.getArtifact().getFile(); if (projectArtifact != null && Files.exists(projectArtifact.toPath())) { aos.putArchiveEntry(new JarArchiveEntry(projectArtifact.getName())); try (InputStream is = Files.newInputStream(projectArtifact.toPath())) { IOUtils.copy(is, aos); } aos.closeArchiveEntry(); } } // Add extraWars into the jar if (this.extraWars != null) { for (Dependency extraWarDependency : this.extraWars) { ArtifactRequest request = new ArtifactRequest(); request.setArtifact(new DefaultArtifact(extraWarDependency.getGroupId(), extraWarDependency.getArtifactId(), extraWarDependency.getType(), extraWarDependency.getVersion())); request.setRepositories(this.projectRepos); ArtifactResult result; try { result = this.repoSystem.resolveArtifact(this.repoSession, request); } catch (ArtifactResolutionException e) { throw new MojoExecutionException(e.getMessage(), e); } File extraWarFile = result.getArtifact().getFile(); aos.putArchiveEntry(new JarArchiveEntry(extraWarFile.getName())); try (InputStream is = Files.newInputStream(extraWarFile.toPath())) { IOUtils.copy(is, aos); } aos.closeArchiveEntry(); } } // Add extraResources into the jar. Folder /extra if (this.extraResources != null) { for (Resource extraResource : this.extraResources) { DirectoryScanner directoryScanner = new DirectoryScanner(); directoryScanner.setBasedir(extraResource.getDirectory()); directoryScanner.setExcludes(extraResource.getExcludes() .toArray(new String[extraResource.getExcludes().size()])); if (!extraResource.getIncludes().isEmpty()) { directoryScanner.setIncludes(extraResource.getIncludes() .toArray(new String[extraResource.getIncludes().size()])); } else { // include everything by default directoryScanner.setIncludes(new String[] { "**" }); } directoryScanner.scan(); for (String includeFile : directoryScanner.getIncludedFiles()) { aos.putArchiveEntry( new JarArchiveEntry(Runner.EXTRA_RESOURCES_DIR + "/" + includeFile)); Path extraFile = Paths.get(extraResource.getDirectory(), includeFile); try (InputStream is = Files.newInputStream(extraFile)) { IOUtils.copy(is, aos); } aos.closeArchiveEntry(); } } } Set<String> includeArtifacts = new HashSet<>(); includeArtifacts.add("org.apache.tomcat:tomcat-jdbc"); includeArtifacts.add("org.apache.tomcat.embed:tomcat-embed-core"); includeArtifacts.add("org.apache.tomcat.embed:tomcat-embed-websocket"); includeArtifacts.add("org.apache.tomcat.embed:tomcat-embed-logging-juli"); includeArtifacts.add("org.yaml:snakeyaml"); includeArtifacts.add("com.beust:jcommander"); if (this.includeJSPSupport) { includeArtifacts.add("org.apache.tomcat.embed:tomcat-embed-jasper"); includeArtifacts.add("org.apache.tomcat.embed:tomcat-embed-el"); includeArtifacts.add("org.eclipse.jdt.core.compiler:ecj"); } for (Artifact pluginArtifact : this.pluginArtifacts) { String artifactName = pluginArtifact.getGroupId() + ":" + pluginArtifact.getArtifactId(); if (includeArtifacts.contains(artifactName)) { try (JarFile jarFile = new JarFile(pluginArtifact.getFile())) { extractJarToArchive(jarFile, aos); } } } if (this.extraDependencies != null) { for (Dependency dependency : this.extraDependencies) { ArtifactRequest request = new ArtifactRequest(); request.setArtifact(new DefaultArtifact(dependency.getGroupId(), dependency.getArtifactId(), dependency.getType(), dependency.getVersion())); request.setRepositories(this.projectRepos); ArtifactResult result; try { result = this.repoSystem.resolveArtifact(this.repoSession, request); } catch (ArtifactResolutionException e) { throw new MojoExecutionException(e.getMessage(), e); } try (JarFile jarFile = new JarFile(result.getArtifact().getFile())) { extractJarToArchive(jarFile, aos); } } } if (this.includeJSPSupport) { addFile(aos, "/conf/web.xml", "conf/web.xml"); } else { addFile(aos, "/conf/web_wo_jsp.xml", "conf/web.xml"); } addFile(aos, "/conf/logging.properties", "conf/logging.properties"); if (this.includeTcNativeWin32 != null) { aos.putArchiveEntry(new JarArchiveEntry("tcnative-1.dll.32")); Files.copy(Paths.get(this.includeTcNativeWin32), aos); aos.closeArchiveEntry(); } if (this.includeTcNativeWin64 != null) { aos.putArchiveEntry(new JarArchiveEntry("tcnative-1.dll.64")); Files.copy(Paths.get(this.includeTcNativeWin64), aos); aos.closeArchiveEntry(); } String[] runnerClasses = { "ch.rasc.embeddedtc.runner.CheckConfig$CheckConfigOptions", "ch.rasc.embeddedtc.runner.CheckConfig", "ch.rasc.embeddedtc.runner.Config", "ch.rasc.embeddedtc.runner.Shutdown", "ch.rasc.embeddedtc.runner.Context", "ch.rasc.embeddedtc.runner.DeleteDirectory", "ch.rasc.embeddedtc.runner.ObfuscateUtil$ObfuscateOptions", "ch.rasc.embeddedtc.runner.ObfuscateUtil", "ch.rasc.embeddedtc.runner.Runner$1", "ch.rasc.embeddedtc.runner.Runner$2", "ch.rasc.embeddedtc.runner.Runner$StartOptions", "ch.rasc.embeddedtc.runner.Runner$StopOptions", "ch.rasc.embeddedtc.runner.Runner$RunnerShutdownHook", "ch.rasc.embeddedtc.runner.Runner" }; for (String rc : runnerClasses) { String classAsPath = rc.replace('.', '/') + ".class"; try (InputStream is = getClass().getResourceAsStream("/" + classAsPath)) { aos.putArchiveEntry(new JarArchiveEntry(classAsPath)); IOUtils.copy(is, aos); aos.closeArchiveEntry(); } } Manifest manifest = new Manifest(); Manifest.Attribute mainClassAtt = new Manifest.Attribute(); mainClassAtt.setName("Main-Class"); mainClassAtt.setValue(Runner.class.getName()); manifest.addConfiguredAttribute(mainClassAtt); aos.putArchiveEntry(new JarArchiveEntry("META-INF/MANIFEST.MF")); manifest.write(aos); aos.closeArchiveEntry(); aos.putArchiveEntry(new JarArchiveEntry(Runner.TIMESTAMP_FILENAME)); aos.write(String.valueOf(System.currentTimeMillis()).getBytes(StandardCharsets.UTF_8)); aos.closeArchiveEntry(); } } catch (IOException | ArchiveException | ManifestException e) { throw new MojoExecutionException(e.getMessage(), e); } }
From source file:edu.mit.lib.bagit.Filler.java
/** * Adds the contents of the passed stream to a tag (metadata) file at the * specified relative path in the bag directory tree. * /*from www .j a va 2 s. c o m*/ * @param relPath * the relative path of the file * @param is * the input stream to read. * @return Filler this Filler * @throws IOException */ public Filler tag(String relPath, InputStream is) throws IOException { // make sure tag files not written to payload directory if (relPath.startsWith(DATA_PATH)) { throw new IOException("Tag files not allowed in paylod directory"); } if (bagFile(relPath).exists()) { throw new IllegalStateException("Tag file already exists at: " + relPath); } // wrap stream in digest stream try (DigestInputStream dis = new DigestInputStream(is, MessageDigest.getInstance(csAlg))) { Files.copy(dis, tagFile(relPath).toPath()); // record checksum tagWriter.writeLine(toHex(dis.getMessageDigest().digest()) + " " + relPath); } catch (NoSuchAlgorithmException nsaE) { throw new IOException("no algorithm: " + csAlg); } return this; }
From source file:io.fabric8.docker.client.impl.BuildImage.java
@Override public OutputHandle fromFolder(String path) { try {/*from w ww.ja v a 2 s. c o m*/ final Path root = Paths.get(path); final Path dockerIgnore = root.resolve(DOCKER_IGNORE); final List<String> ignorePatterns = new ArrayList<>(); if (dockerIgnore.toFile().exists()) { for (String p : Files.readAllLines(dockerIgnore, UTF_8)) { ignorePatterns.add(path.endsWith(File.separator) ? path + p : path + File.separator + p); } } final DockerIgnorePathMatcher dockerIgnorePathMatcher = new DockerIgnorePathMatcher(ignorePatterns); File tempFile = Files.createTempFile(Paths.get(DEFAULT_TEMP_DIR), DOCKER_PREFIX, BZIP2_SUFFIX).toFile(); try (FileOutputStream fout = new FileOutputStream(tempFile); BufferedOutputStream bout = new BufferedOutputStream(fout); BZip2CompressorOutputStream bzout = new BZip2CompressorOutputStream(bout); final TarArchiveOutputStream tout = new TarArchiveOutputStream(bzout)) { Files.walkFileTree(root, new SimpleFileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { if (dockerIgnorePathMatcher.matches(dir)) { return FileVisitResult.SKIP_SUBTREE; } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (dockerIgnorePathMatcher.matches(file)) { return FileVisitResult.SKIP_SUBTREE; } final Path relativePath = root.relativize(file); final TarArchiveEntry entry = new TarArchiveEntry(file.toFile()); entry.setName(relativePath.toString()); entry.setMode(TarArchiveEntry.DEFAULT_FILE_MODE); entry.setSize(attrs.size()); tout.putArchiveEntry(entry); Files.copy(file, tout); tout.closeArchiveEntry(); return FileVisitResult.CONTINUE; } }); fout.flush(); } return fromTar(tempFile.getAbsolutePath()); } catch (IOException e) { throw DockerClientException.launderThrowable(e); } }
From source file:edu.mit.lib.bagit.Loader.java
private void inflate(InputStream in, String fmt) throws IOException { switch (fmt) { case "zip": ZipInputStream zin = new ZipInputStream(in); ZipEntry entry = null;//from w w w .j a va 2 s .c o m while ((entry = zin.getNextEntry()) != null) { File outFile = new File(base.getParent(), entry.getName()); outFile.getParentFile().mkdirs(); Files.copy(zin, outFile.toPath()); } zin.close(); break; case "tgz": TarArchiveInputStream tin = new TarArchiveInputStream(new GzipCompressorInputStream(in)); TarArchiveEntry tentry = null; while ((tentry = tin.getNextTarEntry()) != null) { File outFile = new File(base.getParent(), tentry.getName()); outFile.getParentFile().mkdirs(); Files.copy(tin, outFile.toPath()); } tin.close(); break; default: throw new IOException("Unsupported archive format: " + fmt); } }
From source file:de.phillme.PhotoSorter.java
private void moveRelevantFiles(String targetParent, PhotoFile photoFile) throws IOException { //TODO is this all safe? List<String> list = photoFile.getSupportedMetaDataFileExtensions(); //Move original file String fileName = photoFile.getFilePath().getFileName().toString(); if (!this.noRename) { //use the date provided for the photo files as a new name String newFileName = generateNewFileName(photoFile); if (newFileName != null) { fileName = newFileName;// w w w . j a v a 2s . c o m } } Path targetPath = Paths.get(targetParent + File.separator + fileName); if (targetPath != null) { if (this.write) { LOGGER.info(this.actionName + "-ing to " + targetPath); if (this.moveInsteadCopy) { Files.move((photoFile.getFilePath()), targetPath); } else { Files.copy((photoFile.getFilePath()), targetPath); } } else { LOGGER.info("Would " + this.actionName + " to " + targetPath); } //Move metadata files for (String ext : list) { /* This does not work as additional sidecar files usually include the full file name and the sidecar extension (e.g. filename.arw.xmp and not only filename.xmp). String tmpFileBase = getFileBase(photoFile.getFilePath().getFileName().toString()); */ String tmpFileBase = photoFile.getFilePath().getFileName().toString(); if (fileName != null && tmpFileBase != null) { File movableFile = new File(photoFile.getFilePath().getParent().toString() + File.separator + tmpFileBase + "." + ext); if (movableFile.exists()) { targetPath = Paths.get(targetParent + File.separator + fileName + "." + ext); if (this.write) { LOGGER.info(this.actionName + "-ing meta file to " + targetPath); if (this.moveInsteadCopy) { Files.move((movableFile.toPath()), targetPath); } else { Files.copy((movableFile.toPath()), targetPath); } } else { LOGGER.info("Would " + this.actionName + " meta file to " + targetPath); } } } else { LOGGER.info("Filebase of " + photoFile.getFilePath().getFileName().toString() + " could not be determined. Skipping..."); } } } }
From source file:org.elasticsearch.xpack.core.ssl.SSLConfigurationReloaderTests.java
/** * Test the reloading of SSLContext whose trust config is backed by PEM certificate files. *//*from ww w .j a v a 2s . c o m*/ public void testReloadingPEMTrustConfig() throws Exception { Path tempDir = createTempDir(); Path clientCertPath = tempDir.resolve("testnode.crt"); Path keyStorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keyStorePath); //Our keystore contains two Certificates it can present. One build from the RSA keypair and one build from the EC keypair. EC is // used since it keyManager presents the first one in alias alphabetical order (and testnode_ec comes before testnode_rsa) Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt"), clientCertPath); Settings settings = Settings.builder() .putList("xpack.ssl.certificate_authorities", clientCertPath.toString()) .put("path.home", createTempDir()).build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); // Create the MockWebServer once for both pre and post checks try (MockWebServer server = getSslServer(keyStorePath, "testnode")) { final Consumer<SSLContext> trustMaterialPreChecks = (context) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { privilegedConnect( () -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close()); } catch (Exception e) { throw new RuntimeException("Exception connecting to the mock server", e); } }; final Runnable modifier = () -> { try { Path updatedCert = tempDir.resolve("updated.crt"); Files.copy(getDataPath( "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCert, StandardCopyOption.REPLACE_EXISTING); atomicMoveIfPossible(updatedCert, clientCertPath); } catch (Exception e) { throw new RuntimeException("failed to modify file", e); } }; // Client doesn't trust the Server certificate anymore so SSLHandshake should fail final Consumer<SSLContext> trustMaterialPostChecks = (updatedContext) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(updatedContext).build()) { SSLHandshakeException sslException = expectThrows(SSLHandshakeException.class, () -> privilegedConnect(() -> client .execute(new HttpGet("https://localhost:" + server.getPort())).close())); assertThat(sslException.getCause().getMessage(), containsString("PKIX path building failed")); } catch (Exception e) { throw new RuntimeException("Error closing CloseableHttpClient", e); } }; validateSSLConfigurationIsReloaded(settings, env, trustMaterialPreChecks, modifier, trustMaterialPostChecks); } }