List of usage examples for java.nio.file Files delete
public static void delete(Path path) throws IOException
From source file:com.clust4j.data.TestDataSet.java
@Test public void testWrite4() throws IOException { String path = "iris.csv"; final File file = new File(path); Path ppath = FileSystems.getDefault().getPath(path); try {/*from www . ja v a 2 s . co m*/ TestSuite.IRIS_DATASET.toFlatFile(false, file, '|'); } finally { Files.delete(ppath); } }
From source file:org.hawk.orientdb.OrientDatabase.java
private static void deleteRecursively(File f) throws IOException { if (!f.exists()) return;// w w w .j av a 2s . c om Files.walkFileTree(f.toPath(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Files.delete(file); return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Files.delete(dir); return FileVisitResult.CONTINUE; } }); }
From source file:io.personium.core.model.impl.fs.DavCmpFsImpl.java
/** * Overwrite resources../*from w w w.j a va2 s .c o m*/ * @param contentType ContentType of the update file * @param inputStream Stream of update file * @param etag Etag * @return ResponseBuilder */ private ResponseBuilder doPutForUpdate(final String contentType, final InputStream inputStream, String etag) { // ?? long now = new Date().getTime(); // // TODO ?????????????? this.load(); // ?(???)?WebDav??????? // WebDav???????????404?? if (!this.exists()) { throw getNotFoundException().params(getUrl()); } // etag????????*?????????????? if (etag != null && !"*".equals(etag) && !matchesETag(etag)) { throw PersoniumCoreException.Dav.ETAG_NOT_MATCH; } try { // Update Content InputStream input = inputStream; if (PersoniumUnitConfig.isDavEncryptEnabled()) { // Perform encryption. DataCryptor cryptor = new DataCryptor(getCellId()); input = cryptor.encode(inputStream); } BufferedInputStream bufferedInput = new BufferedInputStream(input); File tmpFile = new File(getTempContentFilePath()); File contentFile = new File(getContentFilePath()); Files.copy(bufferedInput, tmpFile.toPath()); Files.delete(contentFile.toPath()); Files.move(tmpFile.toPath(), contentFile.toPath()); long writtenBytes = contentFile.length(); String encryptionType = DataCryptor.ENCRYPTION_TYPE_NONE; if (PersoniumUnitConfig.isDavEncryptEnabled()) { writtenBytes = ((CipherInputStream) input).getReadLengthBeforEncryption(); encryptionType = DataCryptor.ENCRYPTION_TYPE_AES; } // Update Metadata this.metaFile.setUpdated(now); this.metaFile.setContentType(contentType); this.metaFile.setContentLength(writtenBytes); this.metaFile.setEncryptionType(encryptionType); this.metaFile.save(); } catch (IOException ex) { throw PersoniumCoreException.Dav.FS_INCONSISTENCY_FOUND.reason(ex); } // response return javax.ws.rs.core.Response.ok().status(HttpStatus.SC_NO_CONTENT).header(HttpHeaders.ETAG, getEtag()); }
From source file:org.apache.openaz.xacml.admin.components.PolicyWorkspace.java
protected void deleteSubDomain(final File subdomain) { String message = "Are you sure you want to delete subdomain\n" + subdomain.getName() + "\nThis will remove <B>ALL</B> of its subdomains and policy files."; ConfirmDialog dialog = ConfirmDialog.getFactory().create("Confirm SubDomain Deletion", message, "Delete", "Cancel"); dialog.setContentMode(ContentMode.HTML); dialog.show(getUI(), new ConfirmDialog.Listener() { private static final long serialVersionUID = 1L; @Override/* ww w . j av a 2s .co m*/ public void onClose(ConfirmDialog dialog) { if (dialog.isConfirmed()) { // // Iterate the subdomain // try { Files.walkFileTree(Paths.get(subdomain.getAbsolutePath()), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path deleteFile, BasicFileAttributes attrs) throws IOException { try { boolean removeFromTree = deleteFile.getFileName().toString().endsWith(".xml"); Files.delete(deleteFile); if (removeFromTree) { self.treeWorkspace.removeItem(deleteFile.toFile()); } if (logger.isDebugEnabled()) { logger.debug("Deleted file: " + deleteFile.toString()); } } catch (IOException e) { logger.error("Failed to delete file: " + deleteFile.toString(), e); return FileVisitResult.TERMINATE; } return super.visitFile(deleteFile, attrs); } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { try { Files.delete(dir); self.treeWorkspace.removeItem(dir.toFile()); if (logger.isDebugEnabled()) { logger.debug("Deleted dir: " + dir.toString()); } } catch (IOException e) { logger.error("Failed to delete directory: " + dir.toString(), e); return FileVisitResult.TERMINATE; } return super.postVisitDirectory(dir, exc); } }); } catch (IOException e) { logger.error("Failed to walk subdomain: " + subdomain.getAbsolutePath(), e); } } } }, true); }
From source file:com.twosigma.beaker.core.rest.PluginServiceLocatorRest.java
private void writePrivateFile(java.nio.file.Path path, String contents) throws IOException, InterruptedException { if (windows()) { String p = path.toString(); Thread.sleep(1000); // XXX unknown race condition try (PrintWriter out = new PrintWriter(p)) { out.print(contents);// w w w . ja v a2s . c o m } return; } if (Files.exists(path)) { Files.delete(path); } try (PrintWriter out = new PrintWriter(path.toFile())) { out.print(""); } Set<PosixFilePermission> perms = EnumSet.of(PosixFilePermission.OWNER_READ, PosixFilePermission.OWNER_WRITE); Files.setPosixFilePermissions(path, perms); // XXX why is this in a try block? try (PrintWriter out = new PrintWriter(path.toFile())) { out.print(contents); } }
From source file:org.codice.ddf.platform.migratable.impl.PlatformMigratableTest.java
/** * Verify that when the keystore and truststore are located outside of they system home directory, * warnings are not recorded on import.//from w w w .j av a 2 s .c o m */ @Test public void testDoVersionUpgradeImportKeystoresOutsideOfDdfHome() throws IOException { // Setup export Path exportDir = tempDir.getRoot().toPath().toRealPath(); // For export, move keystore and truststore into tempDir and reset system properties for (Map.Entry<String, Path> entry : KEYSTORES_MAP.entrySet()) { Path source = ddfHome.resolve(entry.getValue()).toRealPath(); Files.move(source, tempDir.getRoot().toPath().toRealPath().resolve(entry.getValue().getFileName())); if ("keystore".equals(entry.getKey())) { System.setProperty(KEYSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } else if ("truststore".equals(entry.getKey())) { System.setProperty(TRUSTSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } } MigrationReport exportReport = doExport(exportDir); // Setup import setup(DDF_IMPORTED_HOME, DDF_IMPORTED_TAG, IMPORTING_PRODUCT_VERSION); // For import, delete keystore and truststore since they are already in tempDir and reset system // properties. // Since these are outside of ddf.home, they should not be imported. A checksum should be // computed // to verify that they are the same as the exported files. for (Map.Entry<String, Path> entry : KEYSTORES_MAP.entrySet()) { Path keystore = ddfHome.resolve(entry.getValue()).toRealPath(); Files.delete(ddfHome.resolve(keystore)); if ("keystore".equals(entry.getKey())) { System.setProperty(KEYSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } else if ("truststore".equals(entry.getKey())) { System.setProperty(TRUSTSTORE_SYSTEM_PROP_KEY, tempDir.getRoot().toPath().resolve(entry.getValue().getFileName()).toRealPath().toString()); } } PlatformMigratable iPlatformMigratable = spy(new PlatformMigratable()); when(iPlatformMigratable.getVersion()).thenReturn("3.0"); List<Migratable> iMigratables = Arrays.asList(iPlatformMigratable); ConfigurationMigrationManager iConfigurationMigrationManager = new ConfigurationMigrationManager( iMigratables, systemService); MigrationReport importReport = iConfigurationMigrationManager.doImport(exportDir, this::print); // Verify import assertThat("The import report has errors.", importReport.hasErrors(), is(false)); assertThat("The import report does have warnings.", importReport.hasWarnings(), is(false)); assertThat("Import was not successful.", importReport.wasSuccessful(), is(true)); verifyUpgradeableSystemFilesImported(); verifyServiceWrapperImported(); }
From source file:dotaSoundEditor.Controls.EditorPanel.java
protected void revertAllButtonActionPerformed(ActionEvent evt) { //Delete existing script file String scriptFilePath = getCurrentScriptString(); File scriptFileToDelete = new File(scriptFilePath); if (scriptFileToDelete.isFile()) { try {/*from www.j av a2s . c o m*/ Files.delete(Paths.get(scriptFilePath)); } catch (NoSuchFileException | DirectoryNotEmptyException | SecurityException ex) { ex.printStackTrace(); } catch (IOException ex) { System.err.println("IOException in delete."); } } else { System.err.println("Unable to delete script file at " + scriptFileToDelete.getAbsolutePath()); } //Repopulate soundtree populateSoundList(); }
From source file:de.decoit.visa.rdf.RDFManager.java
/** * Restore the contents of the RDF model to a saved state. Only information * stored in uploaded files will be restored at this time. * * @param pState ID number of the state which will be restored * @throws RDFSourceException if source file contains invalid RDF * information//from w ww.j av a 2 s. c o m * @throws IOException */ public void restore(int pState) throws RDFSourceException, IOException { if (pState >= 0 && pState < source.size()) { ds.begin(ReadWrite.WRITE); try { // Check if the provided state ID points to a valid list index // Backup the old source file list List<Path> oldSource = source; // Get a sub list with only the needed source files source = new ArrayList<>(oldSource.subList(0, pState + 1)); // Iterate over the temporary source file list and add all files // to the model boolean replace = true; for (Path f : source) { this.loadRDF(f, replace); replace = false; } // Delete all source files that are no longer needed for (int i = pState + 1; i < oldSource.size(); i++) { Files.delete(oldSource.get(i)); ds.removeNamedModel(VISA.createModelURI(oldSource.get(i).getFileName().toString())); } ds.commit(); } catch (Throwable ex) { ds.abort(); throw ex; } finally { ds.end(); TDB.sync(ds); } } else { throw new IllegalArgumentException("Invalid state ID provided"); } }
From source file:backtype.storm.localizer.Localizer.java
private LocalizedResource downloadBlob(Map conf, String key, File localFile, String user, boolean uncompress, boolean isUpdate) throws AuthorizationException, KeyNotFoundException, IOException { ClientBlobStore blobstore = null;//from ww w . j av a 2s.c o m try { blobstore = getClientBlobStore(); long nimbusBlobVersion = Utils.nimbusVersionOfBlob(key, blobstore); long oldVersion = Utils.localVersionOfBlob(localFile.toString()); FileOutputStream out = null; PrintWriter writer = null; int numTries = 0; String localizedPath = localFile.toString(); String localFileWithVersion = Utils.constructBlobWithVersionFileName(localFile.toString(), nimbusBlobVersion); String localVersionFile = Utils.constructVersionFileName(localFile.toString()); String downloadFile = localFileWithVersion; if (uncompress) { // we need to download to temp file and then unpack into the one requested downloadFile = new File(localFile.getParent(), TO_UNCOMPRESS + localFile.getName()).toString(); } while (numTries < _blobDownloadRetries) { out = new FileOutputStream(downloadFile); numTries++; try { if (!Utils.canUserReadBlob(blobstore.getBlobMeta(key), user)) { throw new AuthorizationException(user + " does not have READ access to " + key); } InputStreamWithMeta in = blobstore.getBlob(key); byte[] buffer = new byte[1024]; int len; while ((len = in.read(buffer)) >= 0) { out.write(buffer, 0, len); } out.close(); in.close(); if (uncompress) { Utils.unpack(new File(downloadFile), new File(localFileWithVersion)); LOG.debug("uncompressed " + downloadFile + " to: " + localFileWithVersion); } // Next write the version. LOG.info("Blob: " + key + " updated with new Nimbus-provided version: " + nimbusBlobVersion + " local version was: " + oldVersion); // The false parameter ensures overwriting the version file, not appending writer = new PrintWriter(new BufferedWriter(new FileWriter(localVersionFile, false))); writer.println(nimbusBlobVersion); writer.close(); try { setBlobPermissions(conf, user, localFileWithVersion); setBlobPermissions(conf, user, localVersionFile); // Update the key.current symlink. First create tmp symlink and do // move of tmp to current so that the operation is atomic. String tmp_uuid_local = java.util.UUID.randomUUID().toString(); LOG.debug("Creating a symlink @" + localFile + "." + tmp_uuid_local + " , " + "linking to: " + localFile + "." + nimbusBlobVersion); File uuid_symlink = new File(localFile + "." + tmp_uuid_local); Files.createSymbolicLink(uuid_symlink.toPath(), Paths.get( Utils.constructBlobWithVersionFileName(localFile.toString(), nimbusBlobVersion))); File current_symlink = new File( Utils.constructBlobCurrentSymlinkName(localFile.toString())); Files.move(uuid_symlink.toPath(), current_symlink.toPath(), ATOMIC_MOVE); } catch (IOException e) { // if we fail after writing the version file but before we move current link we need to // restore the old version to the file try { PrintWriter restoreWriter = new PrintWriter( new BufferedWriter(new FileWriter(localVersionFile, false))); restoreWriter.println(oldVersion); restoreWriter.close(); } catch (IOException ignore) { } throw e; } String oldBlobFile = localFile + "." + oldVersion; try { // Remove the old version. Note that if a number of processes have that file open, // the OS will keep the old blob file around until they all close the handle and only // then deletes it. No new process will open the old blob, since the users will open the // blob through the "blob.current" symlink, which always points to the latest version of // a blob. Remove the old version after the current symlink is updated as to not affect // anyone trying to read it. if ((oldVersion != -1) && (oldVersion != nimbusBlobVersion)) { LOG.info("Removing an old blob file:" + oldBlobFile); Files.delete(Paths.get(oldBlobFile)); } } catch (IOException e) { // At this point we have downloaded everything and moved symlinks. If the remove of // old fails just log an error LOG.error("Exception removing old blob version: " + oldBlobFile); } break; } catch (AuthorizationException ae) { // we consider this non-retriable exceptions if (out != null) { out.close(); } new File(downloadFile).delete(); throw ae; } catch (IOException | KeyNotFoundException e) { if (out != null) { out.close(); } if (writer != null) { writer.close(); } new File(downloadFile).delete(); if (uncompress) { try { FileUtils.deleteDirectory(new File(localFileWithVersion)); } catch (IOException ignore) { } } if (!isUpdate) { // don't want to remove existing version file if its an update new File(localVersionFile).delete(); } if (numTries < _blobDownloadRetries) { LOG.error("Failed to download blob, retrying", e); } else { throw e; } } } return new LocalizedResource(key, localizedPath, uncompress); } finally { if (blobstore != null) { blobstore.shutdown(); } } }
From source file:org.apache.geode.internal.cache.BackupDUnitTest.java
/** * Recursively delete a file or directory. A description of any files or directories that can not * be deleted will be added to failures if failures is non-null. This method tries to delete as * much as possible./*from w w w .java 2 s .c om*/ */ public static void delete(File file, StringBuilder failures) { if (!file.exists()) return; if (file.isDirectory()) { File[] fileList = file.listFiles(); if (fileList != null) { for (File child : fileList) { delete(child, failures); } } } try { Files.delete(file.toPath()); } catch (IOException e) { if (failures != null) { failures.append("Could not delete ").append(file).append(" due to ").append(e.getMessage()) .append('\n'); } } }