List of usage examples for java.nio.file Files deleteIfExists
public static boolean deleteIfExists(Path path) throws IOException
From source file:org.onehippo.cms7.essentials.dashboard.instruction.FileInstruction.java
private InstructionStatus delete() { try {//from w w w .ja va2s . c o m Path path = new File(target).toPath(); final boolean deleted = Files.deleteIfExists(path); if (deleted) { sendEvents(); log.debug("Deleted file {}", target); return InstructionStatus.SUCCESS; } else { log.debug("File not deleted {}", target); eventBus.post(new InstructionEvent(this)); return InstructionStatus.SKIPPED; } } catch (IOException e) { log.error("Error deleting file", e); } eventBus.post(new InstructionEvent(this)); return InstructionStatus.FAILED; }
From source file:rapture.repo.file.FileDataStore.java
private void createSymLink(String k, String filePath) { Path originalFile = Paths.get(filePath); File fromFile = FileRepoUtils.makeGenericFile(parentDir, convertKeyToPathWithExtension(k)); Path fromFilePath = Paths.get(fromFile.getAbsolutePath()); try {/*from w ww . j a va2 s. c om*/ Files.deleteIfExists(fromFilePath); FileUtils.forceMkdir(fromFilePath.getParent().toFile()); Files.createSymbolicLink(fromFilePath, originalFile); } catch (IOException e) { throw RaptureExceptionFactory.create(HttpURLConnection.HTTP_INTERNAL_ERROR, "Error creating sym link", e); } }
From source file:com.arpnetworking.metrics.impl.TsdQueryLogSinkTest.java
@Test public void testEmptySerialization() throws IOException, InterruptedException { final File actualFile = new File("./target/TsdQueryLogSinkTest/testEmptySerialization-Query.log"); Files.deleteIfExists(actualFile.toPath()); final Sink sink = new TsdQueryLogSink.Builder().setPath("./target/TsdQueryLogSinkTest") .setName("testEmptySerialization-Query").setImmediateFlush(Boolean.TRUE).build(); sink.record(ANNOTATIONS, TEST_EMPTY_SERIALIZATION_TIMERS, TEST_EMPTY_SERIALIZATION_COUNTERS, TEST_EMPTY_SERIALIZATION_GAUGES); // TODO(vkoskela): Add protected option to disable async [MAI-181]. Thread.sleep(100);/*from w ww .j a va 2 s .c om*/ final String actualOriginalJson = fileToString(actualFile); assertMatchesJsonSchema(actualOriginalJson); final String actualComparableJson = actualOriginalJson .replaceAll("\"time\":\"[^\"]*\"", "\"time\":\"<TIME>\"") .replaceAll("\"host\":\"[^\"]*\"", "\"host\":\"<HOST>\"") .replaceAll("\"processId\":\"[^\"]*\"", "\"processId\":\"<PROCESSID>\"") .replaceAll("\"threadId\":\"[^\"]*\"", "\"threadId\":\"<THREADID>\"") .replaceAll("\"id\":\"[^\"]*\"", "\"id\":\"<ID>\""); final JsonNode actual = OBJECT_MAPPER.readTree(actualComparableJson); final JsonNode expected = OBJECT_MAPPER.readTree(EXPECTED_EMPTY_METRICS_JSON); Assert.assertEquals("expectedJson=" + OBJECT_MAPPER.writeValueAsString(expected) + " vs actualJson=" + OBJECT_MAPPER.writeValueAsString(actual), expected, actual); }
From source file:org.opencastproject.staticfiles.impl.StaticFileServiceImpl.java
@Override public void deleteFile(String uuid) throws NotFoundException, IOException { final String org = securityService.getOrganization().getId(); Path file = getFile(org, uuid); Files.deleteIfExists(file); }
From source file:org.craftercms.studio.impl.v1.deployment.EnvironmentStoreGitBranchDeployer.java
private boolean createEnvironmentStoreRepository(String site) { boolean success = true; Path siteEnvironmentStoreRepoPath = Paths.get(environmentsStoreRootPath, site, environment); try {//from ww w.j a va 2 s. com Files.deleteIfExists(siteEnvironmentStoreRepoPath); siteEnvironmentStoreRepoPath = Paths.get(environmentsStoreRootPath, site, environment, ".git"); Repository repository = FileRepositoryBuilder.create(siteEnvironmentStoreRepoPath.toFile()); repository.create(); Git git = new Git(repository); git.add().addFilepattern(".").call(); RevCommit commit = git.commit().setMessage("initial content").setAllowEmpty(true).call(); } catch (IOException | GitAPIException e) { logger.error("Error while creating repository for site " + site, e); success = false; } return success; }
From source file:io.sledge.core.impl.installer.SledgePackageConfigurer.java
private void deleteZipFile(Path directory, String packageName) throws IOException { Path currentDir = directory.resolve(packageName); Files.deleteIfExists(currentDir); }
From source file:org.egov.infra.filestore.service.impl.LocalDiskFileStoreService.java
@Override public void delete(String fileStoreId, String moduleName) { Path fileDirPath = this.getFileDirectoryPath(moduleName); if (!fileDirPath.toFile().exists()) { Path filePath = this.getFilePath(fileDirPath, fileStoreId); try {//from w ww . j ava 2 s . com Files.deleteIfExists(filePath); } catch (IOException e) { throw new ApplicationRuntimeException( String.format("Could not remove document %s", filePath.getFileName()), e); } } }
From source file:org.digidoc4j.main.DigiDoc4JTest.java
@Test public void createsContainerWithSignatureProfileIsTMForDDoc() throws Exception { String fileName = "test1.bdoc"; Files.deleteIfExists(Paths.get(fileName)); String[] params = new String[] { "-in", fileName, "-type", "DDOC", "-add", "testFiles/test.txt", "text/plain", "-pkcs12", "testFiles/signout.p12", "test", "-profile", "LT_TM" }; callMainWithoutSystemExit(params);/*from w ww.ja v a 2 s . c o m*/ Container container = ContainerOpener.open(fileName); assertEquals(SignatureProfile.LT_TM, container.getSignature(0).getProfile()); }
From source file:io.fabric8.vertx.maven.plugin.utils.PackageHelper.java
/** * This method will perform the service provider combination by `combining` contents of same spi * across the dependencies//from w w w . j av a 2s . co m * * @param project - the Maven project (must not be {@code null} * @param backupDir - the {@link File} path that can be used to perform backups * @param targetJarFile - the vertx fat jar file where the spi files will be updated - typically remove and add * @throws MojoExecutionException - any error that might occur while doing relocation */ public void combineServiceProviders(MavenProject project, Path backupDir, File targetJarFile) throws MojoExecutionException { try { Path vertxJarOriginalFile = FileUtils.backup(targetJarFile, backupDir.toFile()); JavaArchive targetJar = ShrinkWrap.createFromZipFile(JavaArchive.class, vertxJarOriginalFile.toFile()); List<JavaArchive> archives = Stream.concat(compileAndRuntimeDeps.stream(), transitiveDeps.stream()) .filter(Optional::isPresent).map(f -> ShrinkWrap.createFromZipFile(JavaArchive.class, f.get())) .collect(Collectors.toList()); JavaArchive serviceCombinedArchive = new ServiceCombinerUtil().withLog(log) .withProject(project.getArtifactId(), project.getVersion()) .withClassesDirectory(new File(project.getBuild().getOutputDirectory())).combine(archives); serviceCombinedArchive.get("/META-INF/services").getChildren().forEach(n -> { Asset asset = n.getAsset(); ArchivePath archivePath = n.getPath(); if (log.isDebugEnabled()) { try { log.debug("Asset Content: " + FileUtils.read(asset.openStream())); log.debug("Adding asset:" + n.getPath()); } catch (IOException e) { // Ignore it. } } targetJar.delete(archivePath); targetJar.add(asset, archivePath); }); //delete old vertx jar file Files.deleteIfExists(Paths.get(targetJarFile.toURI())); //Create new fat jar with merged SPI ZipExporter zipExporter = targetJar.as(ZipExporter.class); try (FileOutputStream jarOut = new FileOutputStream(targetJarFile)) { zipExporter.exportTo(jarOut); } org.apache.commons.io.FileUtils.deleteQuietly(vertxJarOriginalFile.toFile()); } catch (Exception e) { throw new MojoExecutionException("Unable to combine SPI files for " + project.getArtifactId(), e); } }
From source file:com.mweagle.tereus.commands.evaluation.common.LambdaUtils.java
public String createFunction(final String logicalResourceName, final String lambdaSourceRoot, final String bucketName, final String s3KeyName) throws IOException, InterruptedException, NoSuchAlgorithmException { // Build it, zip it, and upload it. Return: /*//from w w w .j av a 2 s .c om { "S3Bucket" : String, "S3Key" : String, "S3ObjectVersion" : "TODO - not yet implemented" } */ this.logger.info("Looking for source {} relative to {}", lambdaSourceRoot, templateRoot); final String lambdaDir = this.templateRoot.resolve(lambdaSourceRoot).normalize().toAbsolutePath() .toString(); final Path lambdaPath = Paths.get(lambdaDir); // Build command? final Optional<String> buildCommand = lambdaBuildCommand(lambdaDir); if (buildCommand.isPresent()) { this.logger.info("{} Lambda source: {}", buildCommand.get(), lambdaDir); try { Runtime rt = Runtime.getRuntime(); Process pr = rt.exec(buildCommand.get(), null, new File(lambdaDir)); this.logger.info("Waiting for `{}` to complete", buildCommand.get()); final int buildExitCode = pr.waitFor(); if (0 != buildExitCode) { logger.error("Failed to `{}`: {}", buildCommand.get(), buildExitCode); throw new IOException(buildCommand.get() + " failed for: " + lambdaDir); } } catch (Exception ex) { final String processPath = System.getenv("PATH"); this.logger.error("`{}` failed. Confirm that PATH contains the required executable.", buildCommand.get()); this.logger.error("$PATH: {}", processPath); throw ex; } } else { this.logger.debug("No additional Lambda build file detected"); } Path lambdaSource = null; boolean cleanupLambdaSource = false; MessageDigest md = MessageDigest.getInstance("SHA-256"); try { final BiPredicate<Path, java.nio.file.attribute.BasicFileAttributes> matcher = (path, fileAttrs) -> { final String fileExtension = com.google.common.io.Files.getFileExtension(path.toString()); return (fileExtension.toLowerCase().compareTo("jar") == 0); }; // Find/compress the Lambda source // If there is a JAR file in the source root, then use that for the upload List<Path> jarFiles = Files.find(lambdaPath, 1, matcher).collect(Collectors.toList()); if (!jarFiles.isEmpty()) { Preconditions.checkArgument(jarFiles.size() == 1, "More than 1 JAR file detected in directory: {}", lambdaDir); lambdaSource = jarFiles.get(0); md.update(Files.readAllBytes(lambdaSource)); } else { lambdaSource = Files.createTempFile("lambda-", ".zip"); this.logger.info("Zipping lambda source code: {}", lambdaSource.toString()); final FileOutputStream os = new FileOutputStream(lambdaSource.toFile()); final ZipOutputStream zipOS = new ZipOutputStream(os); createStableZip(zipOS, lambdaPath, lambdaPath, md); zipOS.close(); this.logger.info("Compressed filesize: {} bytes", lambdaSource.toFile().length()); cleanupLambdaSource = true; } // Upload it final String sourceHash = Hex.encodeHexString(md.digest()); this.logger.info("Lambda source hash: {}", sourceHash); if (!s3KeyName.isEmpty()) { this.logger.warn( "User supplied S3 keyname overrides content-addressable name. Automatic updates disabled."); } final String keyName = !s3KeyName.isEmpty() ? s3KeyName : String.format("%s-lambda-%s.%s", logicalResourceName, sourceHash, com.google.common.io.Files.getFileExtension(lambdaSource.toString())); JsonObject jsonObject = new JsonObject(); jsonObject.add("S3Bucket", new JsonPrimitive(bucketName)); jsonObject.add("S3Key", new JsonPrimitive(keyName)); // Upload it to s3... final FileInputStream fis = new FileInputStream(lambdaSource.toFile()); try (S3Resource resource = new S3Resource(bucketName, keyName, fis, Optional.of(lambdaSource.toFile().length()))) { this.logger.info("Source payload S3 URL: {}", resource.getS3Path()); if (resource.exists()) { this.logger.info("Source {} already uploaded to S3", keyName); } else if (!this.dryRun) { Optional<String> result = resource.upload(); this.logger.info("Uploaded Lambda source to: {}", result.get()); resource.setReleased(true); } else { this.logger.info("Dry run requested (-n/--noop). Lambda payload upload bypassed."); } } final Gson serializer = new GsonBuilder().disableHtmlEscaping().enableComplexMapKeySerialization() .create(); return serializer.toJson(jsonObject); } finally { if (cleanupLambdaSource) { this.logger.debug("Deleting temporary file: {}", lambdaSource.toString()); Files.deleteIfExists(lambdaSource); } } }