List of usage examples for java.nio.file Files delete
public static void delete(Path path) throws IOException
From source file:misc.FileHandler.java
/** * Returns a temporary file path that is on the same file store as the given * file. The temporary file is created without content, if the given file's * file store is identical to the system's default temporary directory file * store.//w w w . j ava 2 s.c o m * * @param target * the file which determines the file store. * @return the path of the temporary file or <code>null</code>, if an error * occurred. */ public static Path getTempFile(Path target) { Path tempFile = null; boolean success = false; target = target.normalize(); try { Path targetDirectory = target.toAbsolutePath().getParent(); tempFile = Files.createTempFile(target.getFileName().toString(), TEMP_FILE_SUFFIX); if (!Files.getFileStore(tempFile).equals(Files.getFileStore(targetDirectory))) { // the temporary file should be in the target directory. Files.delete(tempFile); tempFile = Paths.get(targetDirectory.toString(), tempFile.getFileName().toString()); success = true; } else { success = true; } } catch (IOException e) { Logger.logError(e); } finally { if (!success && (tempFile != null)) { try { Files.deleteIfExists(tempFile); } catch (IOException innerE) { Logger.logError(innerE); } } } return success ? tempFile : null; }
From source file:org.apache.openaz.xacml.std.pap.StdEngine.java
@Override public void removeGroup(PDPGroup group, PDPGroup newGroup) throws PAPException, NullPointerException { if (group == null) { throw new NullPointerException(); }//from ww w.jav a 2 s . c om // // Does this group exist? // if (!this.groups.contains(group)) { logger.error("This group doesn't exist."); throw new PAPException("The group '" + group.getId() + "' does not exist"); } // // Is it the default group? // if (group.isDefaultGroup()) { throw new PAPException("You cannot delete the default group."); } Set<PDP> pdps = group.getPdps(); // // Are there PDPs? If so, then we need a target group // if (!pdps.isEmpty() && newGroup == null) { throw new NullPointerException( "Group targeted for deletion has PDPs, you must provide a new group for them."); } // // Move the PDPs // if (!pdps.isEmpty()) { if (!(newGroup instanceof StdPDPGroup)) { throw new PAPException("Unexpected class for newGroup: " + newGroup.getClass().getCanonicalName()); } // The movePDP function will modify the set of PDPs in the group. // To avoid concurrent modification exceptions we need to duplicate the list before calling that // function. List<PDP> pdpList = new ArrayList<PDP>(); for (PDP pdp : pdps) { pdpList.add(pdp); } // now we can use the PDPs from the list without having ConcurrentAccessExceptions for (PDP pdp : pdpList) { this.movePDP(pdp, newGroup); } } // // remove the directory for the group // String id = group.getId(); Path groupPath = Paths.get(this.repository.toString(), id); // // If it exists already // if (!Files.exists(groupPath)) { logger.warn("removeGroup " + id + " directory does not exist" + groupPath.toString()); } else { try { Files.walkFileTree(groupPath, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return super.visitFile(file, attrs); } }); // // delete the directory // Files.delete(groupPath); } catch (IOException e) { logger.error("Failed to delete " + groupPath + ": " + e); throw new PAPException("Failed to delete " + id); } } // remove the group from the set of all groups groups.remove(group); // // Save changes // changed(); this.doSave(); }
From source file:de.blizzy.backup.backup.BackupRun.java
private int backupFileContents(final IFile file, final File backupFile, String backupFilePath) throws IOException { FileUtils.forceMkdir(backupFile.getParentFile()); final MessageDigest[] digest = new MessageDigest[1]; IOutputStreamProvider outputStreamProvider = new IOutputStreamProvider() { @Override// ww w.j a va 2s .c o m public OutputStream getOutputStream() throws IOException { try { digest[0] = MessageDigest.getInstance("SHA-256"); //$NON-NLS-1$ OutputStream fileOut = new BufferedOutputStream(new FileOutputStream(backupFile)); OutputStream interceptOut = fileOut; for (IStorageInterceptor interceptor : storageInterceptors) { interceptOut = interceptor.interceptOutputStream(interceptOut, file.getLength()); } OutputStream compressOut = Compression.BZIP2.getOutputStream(interceptOut); OutputStream digestOut = new DigestOutputStream(compressOut, digest[0]); return digestOut; } catch (GeneralSecurityException e) { throw new RuntimeException(e); } } }; boolean fileCopied = false; try { file.copy(outputStreamProvider); fileCopied = true; } finally { if (!fileCopied) { try { Files.delete(backupFile.toPath()); } catch (IOException e) { BackupPlugin.getDefault().logError("error while deleting file: " + //$NON-NLS-1$ backupFile.getAbsolutePath(), e); fireBackupErrorOccurred(e, BackupErrorEvent.Severity.WARNING); } removeFoldersIfEmpty(backupFile.getParentFile()); } } String checksum = toHexString(digest[0]); database.factory().insertInto(Tables.FILES).set(Tables.FILES.BACKUP_PATH, backupFilePath) .set(Tables.FILES.CHECKSUM, checksum).set(Tables.FILES.LENGTH, Long.valueOf(file.getLength())) .set(Tables.FILES.COMPRESSION, Byte.valueOf((byte) Compression.BZIP2.getValue())).execute(); return database.factory().lastID().intValue(); }
From source file:org.codice.ddf.security.migratable.impl.SecurityMigratableTest.java
/** Verifies that when no policy file exists, the version upgrade import succeeds. */ @Test// www . j av a 2 s . c o m public void testDoVersionUpgradeImportWhenNoPolicyFileExists() throws IOException { // Setup export Path exportDir = tempDir.getRoot().toPath().toRealPath(); // Remove Policy file for (final String file : POLICY_FILES) { Path policy = ddfHome.resolve(SECURITY_POLICIES_DIR).resolve(file); Files.delete(policy); } // Perform export doExport(exportDir); // Setup import setup(DDF_IMPORTED_HOME, DDF_IMPORTED_TAG, IMPORTING_PRODUCT_VERSION); SecurityMigratable iSecurityMigratable = spy(new SecurityMigratable()); when(iSecurityMigratable.getVersion()).thenReturn("3.0"); List<Migratable> iMigratables = Arrays.asList(iSecurityMigratable); ConfigurationMigrationManager iConfigurationMigrationManager = new ConfigurationMigrationManager( iMigratables, systemService); MigrationReport importReport = iConfigurationMigrationManager.doImport(exportDir, this::print); // Verify import verify(iSecurityMigratable).doVersionUpgradeImport(any(ImportMigrationContext.class)); assertThat("The import report has errors.", importReport.hasErrors(), is(false)); assertThat("The import report has warnings.", importReport.hasWarnings(), is(false)); assertThat("Import was not successful.", importReport.wasSuccessful(), is(true)); verifyPdpFilesImported(); verifyCrlImported(); }
From source file:org.niord.core.batch.BatchService.java
/** * Called every minute to monitor the batch job "[jobName]/in" folders. If a file has been * placed in one of these folders, the cause the "jobName" batch job to be started. *///w ww . j av a 2 s . c om @Schedule(persistent = false, second = "48", minute = "*/1", hour = "*/1") protected void monitorBatchJobInFolderInitiation() { // Resolve the list of batch job "in" folders List<Path> executionFolders = getBatchJobSubFolders("in"); // Check for new batch-initiating files in each folder for (Path dir : executionFolders) { for (Path file : getDirectoryFiles(dir)) { String jobName = file.getParent().getParent().getFileName().toString(); log.info("Found file " + file.getFileName() + " for batch job " + jobName); try { startBatchJobWithDataFile(jobName, file, new HashMap<>()); } catch (IOException e) { log.error("Failed starting batch job " + jobName + " with file " + file.getFileName()); } finally { // Delete the file // Note to self: Move to error folder? try { Files.delete(file); } catch (IOException ignored) { } } } } }
From source file:org.opencb.opencga.server.rest.FileWSServer.java
@POST @Path("/upload") @Consumes(MediaType.MULTIPART_FORM_DATA) @ApiOperation(httpMethod = "POST", position = 4, value = "Resource to upload a file by chunks", response = File.class) public Response upload(@FormDataParam("chunk_content") byte[] chunkBytes, @FormDataParam("chunk_content") FormDataContentDisposition contentDisposition, @FormDataParam("file") InputStream fileInputStream, @FormDataParam("file") FormDataContentDisposition fileMetaData, @DefaultValue("") @FormDataParam("chunk_id") String chunk_id, @DefaultValue("false") @FormDataParam("last_chunk") String last_chunk, @DefaultValue("") @FormDataParam("chunk_total") String chunk_total, @DefaultValue("") @FormDataParam("chunk_size") String chunk_size, @DefaultValue("") @FormDataParam("chunk_hash") String chunkHash, @DefaultValue("false") @FormDataParam("resume_upload") String resume_upload, @ApiParam(value = "filename", required = false) @FormDataParam("filename") String filename, @ApiParam(value = "fileFormat", required = true) @DefaultValue("") @FormDataParam("fileFormat") String fileFormat, @ApiParam(value = "bioformat", required = true) @DefaultValue("") @FormDataParam("bioformat") String bioformat, // @ApiParam(value = "userId", required = true) @DefaultValue("") @FormDataParam("userId") String userId, // @ApiParam(defaultValue = "projectId", required = true) @DefaultValue("") @FormDataParam("projectId") String projectId, @ApiParam(value = "studyId", required = true) @FormDataParam("studyId") String studyIdStr, @ApiParam(value = "Path within catalog where the file will be located (default: root folder)", required = false) @DefaultValue(".") @FormDataParam("relativeFilePath") String relativeFilePath, @ApiParam(value = "description", required = false) @DefaultValue("") @FormDataParam("description") String description, @ApiParam(value = "Create the parent directories if they do not exist", required = false) @DefaultValue("true") @FormDataParam("parents") boolean parents) { long t = System.currentTimeMillis(); if (relativeFilePath.endsWith("/")) { relativeFilePath = relativeFilePath.substring(0, relativeFilePath.length() - 1); }//from w w w . jav a 2s . c o m if (relativeFilePath.startsWith("/")) { return createErrorResponse(new CatalogException("The path cannot be absolute")); } java.nio.file.Path filePath = null; final long studyId; try { studyId = catalogManager.getStudyId(studyIdStr, sessionId); } catch (Exception e) { return createErrorResponse(e); } try { filePath = Paths.get(catalogManager.getFileUri(studyId, relativeFilePath)); System.out.println(filePath); } catch (CatalogIOException e) { System.out.println("catalogManager.getFilePath"); e.printStackTrace(); } catch (CatalogException e) { e.printStackTrace(); } if (chunkBytes != null) { java.nio.file.Path completedFilePath = filePath.getParent().resolve("_" + filename); java.nio.file.Path folderPath = filePath.getParent().resolve("__" + filename); logger.info(relativeFilePath + ""); logger.info(folderPath + ""); logger.info(filePath + ""); boolean resume = Boolean.parseBoolean(resume_upload); try { logger.info("---resume is: " + resume); if (resume) { logger.info("Resume ms :" + (System.currentTimeMillis() - t)); return createOkResponse(getResumeFileJSON(folderPath)); } int chunkId = Integer.parseInt(chunk_id); int chunkSize = Integer.parseInt(chunk_size); boolean lastChunk = Boolean.parseBoolean(last_chunk); logger.info("---saving chunk: " + chunkId); logger.info("lastChunk: " + lastChunk); // WRITE CHUNK TYPE_FILE if (!Files.exists(folderPath)) { logger.info("createDirectory(): " + folderPath); Files.createDirectory(folderPath); } logger.info("check dir " + Files.exists(folderPath)); // String hash = StringUtils.sha1(new String(chunkBytes)); // logger.info("bytesHash: " + hash); // logger.info("chunkHash: " + chunkHash); // hash = chunkHash; if (chunkBytes.length == chunkSize) { Files.write(folderPath.resolve(chunkId + "_" + chunkBytes.length + "_partial"), chunkBytes); } else { String errorMessage = "Chunk content size (" + chunkBytes.length + ") " + "!= chunk_size (" + chunk_size + ")."; logger.error(errorMessage); return createErrorResponse(new IOException(errorMessage)); } if (lastChunk) { logger.info("lastChunk is true..."); Files.deleteIfExists(completedFilePath); Files.createFile(completedFilePath); List<java.nio.file.Path> chunks = getSortedChunkList(folderPath); logger.info("----ordered chunks length: " + chunks.size()); for (java.nio.file.Path partPath : chunks) { logger.info(partPath.getFileName().toString()); Files.write(completedFilePath, Files.readAllBytes(partPath), StandardOpenOption.APPEND); } IOUtils.deleteDirectory(folderPath); try { QueryResult<File> queryResult = catalogManager.createFile(studyId, File.Format.valueOf(fileFormat.toUpperCase()), File.Bioformat.valueOf(bioformat.toUpperCase()), relativeFilePath, completedFilePath.toUri(), description, parents, sessionId); File file = new FileMetadataReader(catalogManager).setMetadataInformation( queryResult.first(), null, new QueryOptions(queryOptions), sessionId, false); queryResult.setResult(Collections.singletonList(file)); return createOkResponse(queryResult); } catch (Exception e) { logger.error(e.toString()); return createErrorResponse(e); } } } catch (IOException e) { System.out.println("e = " + e); // TODO Auto-generated catch block e.printStackTrace(); } logger.info("chunk saved ms :" + (System.currentTimeMillis() - t)); return createOkResponse("ok"); } else if (fileInputStream != null) { logger.info("filePath: {}", filePath.toString()); // We obtain the basic studyPath where we will upload the file temporarily java.nio.file.Path studyPath = null; try { studyPath = Paths.get(catalogManager.getStudyUri(studyId)); } catch (CatalogException e) { e.printStackTrace(); return createErrorResponse("Upload file", e.getMessage()); } if (filename == null) { filename = fileMetaData.getFileName(); } java.nio.file.Path tempFilePath = studyPath.resolve("tmp_" + filename).resolve(filename); logger.info("tempFilePath: {}", tempFilePath.toString()); logger.info("tempParent: {}", tempFilePath.getParent().toString()); // Create the temporal directory and upload the file try { if (!Files.exists(tempFilePath.getParent())) { logger.info("createDirectory(): " + tempFilePath.getParent()); Files.createDirectory(tempFilePath.getParent()); } logger.info("check dir " + Files.exists(tempFilePath.getParent())); // Start uploading the file to the temporal directory int read; byte[] bytes = new byte[1024]; // Upload the file to a temporary folder OutputStream out = new FileOutputStream(new java.io.File(tempFilePath.toString())); while ((read = fileInputStream.read(bytes)) != -1) { out.write(bytes, 0, read); } out.flush(); out.close(); } catch (IOException e) { e.printStackTrace(); } // Register the file in catalog try { String destinationPath; // Check if the relativeFilePath is not the root folder if (relativeFilePath.length() > 1 && !relativeFilePath.equals("./")) { try { // Create parents directory if necessary catalogManager.createFolder(studyId, Paths.get(relativeFilePath), parents, null, sessionId); } catch (CatalogException e) { logger.debug("The folder {} already exists", relativeFilePath); } destinationPath = Paths.get(relativeFilePath).resolve(filename).toString(); } else { destinationPath = filename; } logger.debug("Relative path: {}", relativeFilePath); logger.debug("Destination path: {}", destinationPath); logger.debug("File name {}", filename); // Register the file and move it to the proper directory QueryResult<File> queryResult = catalogManager.createFile(studyId, File.Format.valueOf(fileFormat.toUpperCase()), File.Bioformat.valueOf(bioformat.toUpperCase()), destinationPath, tempFilePath.toUri(), description, parents, sessionId); File file = new FileMetadataReader(catalogManager).setMetadataInformation(queryResult.first(), null, new QueryOptions(queryOptions), sessionId, false); queryResult.setResult(Collections.singletonList(file)); // Remove the temporal directory Files.delete(tempFilePath.getParent()); return createOkResponse(queryResult); } catch (CatalogException e) { e.printStackTrace(); return createErrorResponse("Upload file", e.getMessage()); } catch (IOException e) { e.printStackTrace(); return createErrorResponse("Upload file", e.getMessage()); } } else { return createErrorResponse("Upload file", "No file or chunk found"); } }
From source file:org.apache.geode.management.internal.cli.commands.QueueCommandsDUnitTest.java
@Override protected final void preTearDownCliCommandTestBase() throws Exception { for (String path : this.filesToBeDeleted) { try {/*w w w . j a va 2s . com*/ final File fileToDelete = new File(path); if (fileToDelete.isDirectory()) FileUtils.deleteDirectory(fileToDelete); else Files.delete(fileToDelete.toPath()); if (path.endsWith(".jar")) { executeCommand("undeploy --jar=" + fileToDelete.getName()); } } catch (IOException e) { getLogWriter().error("Unable to delete file", e); } } this.filesToBeDeleted.clear(); }
From source file:org.codice.ddf.platform.migratable.impl.PlatformMigratableTest.java
/** * Verify that when the keystore and truststore are located outside of they system home directory, * warnings are recorded on export but not on import. Both the export and import will still be * successful./*from ww w .j a v a2 s. co m*/ */ @Test public void testDoExportAndDoImportKeystoresOutsideOfDdfHome() throws IOException { // Setup export Path exportDir = tempDir.getRoot().toPath().toRealPath(); // For export, move keystore and truststore into tempDir and reset system properties for (Map.Entry<String, Path> entry : KEYSTORES_MAP.entrySet()) { Path source = ddfHome.resolve(entry.getValue()).toRealPath(); Files.move(source, tempDir.getRoot().toPath().toRealPath().resolve(entry.getValue().getFileName())); if ("keystore".equals(entry.getKey())) { System.setProperty(KEYSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } else if ("truststore".equals(entry.getKey())) { System.setProperty(TRUSTSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } } MigrationReport exportReport = doExport(exportDir); // Verify export assertThat("The export report has errors.", exportReport.hasErrors(), is(false)); assertThat("The export report does not have warnings.", exportReport.hasWarnings(), is(true)); assertThat("Export was not successful.", exportReport.wasSuccessful(), is(true)); String exportedZipBaseName = String.format("%s-%s.dar", SUPPORTED_BRANDING, SUPPORTED_VERSION); Path exportedZip = exportDir.resolve(exportedZipBaseName).toRealPath(); assertThat(String.format("Export zip [%s] does not exist.", exportedZip), exportedZip.toFile().exists(), is(true)); assertThat(String.format("Exported zip [%s] is empty.", exportedZip), exportedZip.toFile().length(), greaterThan(0L)); // Setup import setup(DDF_IMPORTED_HOME, DDF_IMPORTED_TAG, SUPPORTED_VERSION); // For import, delete keystore and truststore since they are already in tempDir and reset system // properties. // Since these are outside of ddf.home, they should not be imported. A checksum should be // computed // to verify that they are the same as the exported files. for (Map.Entry<String, Path> entry : KEYSTORES_MAP.entrySet()) { Path keystore = ddfHome.resolve(entry.getValue()).toRealPath(); Files.delete(ddfHome.resolve(keystore)); if ("keystore".equals(entry.getKey())) { System.setProperty(KEYSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } else if ("truststore".equals(entry.getKey())) { System.setProperty(TRUSTSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } } PlatformMigratable iPlatformMigratable = new PlatformMigratable(); List<Migratable> iMigratables = Arrays.asList(iPlatformMigratable); ConfigurationMigrationManager iConfigurationMigrationManager = new ConfigurationMigrationManager( iMigratables, systemService); MigrationReport importReport = iConfigurationMigrationManager.doImport(exportDir, this::print); // Verify import assertThat("The import report has errors.", importReport.hasErrors(), is(false)); assertThat("The import report does have warnings.", importReport.hasWarnings(), is(false)); assertThat("Import was not successful.", importReport.wasSuccessful(), is(true)); verifyRequiredSystemFilesImported(); verifyOptionalSystemFilesImported(); verifyWsSecurityFilesImported(); verifyServiceWrapperImported(); }
From source file:me.ryanhamshire.griefprevention.FlatFileDataStore.java
@Override public void deleteClaimFromSecondaryStorage(GPClaim claim) { try {//from ww w. j a v a 2 s. c om Files.delete(claim.getClaimStorage().filePath); } catch (IOException e) { e.printStackTrace(); GriefPreventionPlugin.addLogEntry( "Error: Unable to delete claim file \"" + claim.getClaimStorage().filePath + "\"."); } }
From source file:org.cryptomator.cryptofs.CryptoFileSystemImpl.java
void move(CryptoPath cleartextSource, CryptoPath cleartextTarget, CopyOption... options) throws IOException { if (cleartextSource.equals(cleartextTarget)) { return;//from ww w . j a va 2 s .c o m } Path ciphertextSourceFile = cryptoPathMapper.getCiphertextFilePath(cleartextSource, CiphertextFileType.FILE); Path ciphertextSourceDirFile = cryptoPathMapper.getCiphertextFilePath(cleartextSource, CiphertextFileType.DIRECTORY); if (Files.exists(ciphertextSourceFile)) { // FILE: Path ciphertextTargetFile = cryptoPathMapper.getCiphertextFilePath(cleartextTarget, CiphertextFileType.FILE); Files.move(ciphertextSourceFile, ciphertextTargetFile, options); } else if (Files.exists(ciphertextSourceDirFile)) { // DIRECTORY: Path ciphertextTargetDirFile = cryptoPathMapper.getCiphertextFilePath(cleartextTarget, CiphertextFileType.DIRECTORY); if (!ArrayUtils.contains(options, StandardCopyOption.REPLACE_EXISTING)) { // try to move, don't replace: Files.move(ciphertextSourceDirFile, ciphertextTargetDirFile, options); } else if (ArrayUtils.contains(options, StandardCopyOption.ATOMIC_MOVE)) { // replace atomically (impossible): assert ArrayUtils.contains(options, StandardCopyOption.REPLACE_EXISTING); throw new AtomicMoveNotSupportedException(cleartextSource.toString(), cleartextTarget.toString(), "Replacing directories during move requires non-atomic status checks."); } else { // move and replace (if dir is empty): assert ArrayUtils.contains(options, StandardCopyOption.REPLACE_EXISTING); assert !ArrayUtils.contains(options, StandardCopyOption.ATOMIC_MOVE); if (Files.exists(ciphertextTargetDirFile)) { Path ciphertextTargetDir = cryptoPathMapper.getCiphertextDirPath(cleartextTarget); try (DirectoryStream<Path> ds = Files.newDirectoryStream(ciphertextTargetDir)) { if (ds.iterator().hasNext()) { throw new DirectoryNotEmptyException(cleartextTarget.toString()); } } Files.delete(ciphertextTargetDir); } Files.move(ciphertextSourceDirFile, ciphertextTargetDirFile, options); } dirIdProvider.move(ciphertextSourceDirFile, ciphertextTargetDirFile); } else { throw new NoSuchFileException(cleartextSource.toString()); } }