List of usage examples for java.nio.file Files walkFileTree
public static Path walkFileTree(Path start, FileVisitor<? super Path> visitor) throws IOException
From source file:org.tinymediamanager.core.Utils.java
/** * Copies a complete directory recursively, using Java NIO * /* w w w .j a va 2s . co m*/ * @param from * source * @param to * destination * @throws IOException */ public static void copyDirectoryRecursive(Path from, Path to) throws IOException { LOGGER.info("Copyin complete directory from " + from + " to " + to); Files.walkFileTree(from, new CopyFileVisitor(to)); }
From source file:com.dell.asm.asmcore.asmmanager.util.ServiceTemplateUtil.java
/** * Decrypt, unzip and copy attachments to template folder. * @param encryptedTemplateFileData encrypted template file data * @param password/*from w w w . ja v a2 s .c om*/ * @return * @throws AsmManagerRuntimeException */ public static ServiceTemplate importTemplate(byte[] encryptedTemplateFileData, String password) throws AsmManagerRuntimeException { ServiceTemplate svc = null; if (encryptedTemplateFileData != null && encryptedTemplateFileData.length > 0) { ensureTemplateAttachmentsFolderExists(); // This is the tar file name. long currenttime = System.currentTimeMillis(); String filename = "template_" + currenttime + ".gpg"; String restoredFile = "template_" + currenttime + ".zip"; String tempFolder = "/tmp/template_" + currenttime; writeToFile("/tmp", filename, encryptedTemplateFileData); CommandResponse cmdresponse = null; ExecuteSystemCommands cmdRunner = ExecuteSystemCommands.getInstance(); String[] command = new String[] { "/bin/bash", "-c", "echo " + ExecuteSystemCommands.sanitizeShellArgument(password) + " | gpg --batch -q --utf8-strings --output " + ExecuteSystemCommands.sanitizeShellArgument("/tmp/" + restoredFile) + " --passphrase-fd 0 --decrypt " + ExecuteSystemCommands.sanitizeShellArgument("/tmp/" + filename) }; try { cmdresponse = cmdRunner.callCommand(command); } catch (Exception e) { LOGGER.error("GPG execution failed", e); throw new AsmManagerRuntimeException(e); } finally { File file = new File("/tmp/" + filename); file.delete(); } LOGGER.debug("GPG return code: " + cmdresponse.getReturnCode() + " Return message: " + cmdresponse.getReturnMessage()); if (!cmdresponse.getReturnCode().equals("0")) { throw new AsmManagerRuntimeException("Decryption of template failed, invalid file or password"); } String encFil = "/tmp/" + restoredFile; // unzip all attachments command = new String[] { "/usr/bin/unzip", encFil, "-d" + tempFolder }; try { cmdresponse = cmdRunner.runCommandWithConsoleOutput(command); } catch (Exception e) { LOGGER.error("Command execution failed: " + command, e); throw new AsmManagerRuntimeException(e); } if (!cmdresponse.getReturnCode().equals("0")) { throw new AsmManagerRuntimeException("UnZipping of template attachments failed"); } try { Files.delete(Paths.get(encFil)); } catch (IOException e) { LOGGER.error("Cannot delete file " + encFil, e); } // read template content File file = new File(tempFolder + "/template.tmp"); String content = null; InputStream is = null; try { is = new FileInputStream(file); content = IOUtils.toString(is); } catch (IOException e) { throw new AsmManagerRuntimeException(e); } finally { if (is != null) IOUtils.closeQuietly(is); } file.delete(); svc = MarshalUtil.unmarshal(ServiceTemplate.class, content); if (svc == null) { throw new AsmManagerRuntimeException("Template content is null. Bad import file."); } // copy Path attachPath = Paths.get(TEMPLATE_ATTACHMENT_DIR + svc.getId()); if (Files.exists(attachPath)) { svc.setId(UUID.randomUUID().toString()); attachPath = Paths.get(TEMPLATE_ATTACHMENT_DIR + svc.getId()); } try { Files.createDirectory(attachPath); } catch (IOException e) { LOGGER.error(e); throw new AsmManagerRuntimeException("Cannot create directory: " + attachPath); } File tmpDir = new File(tempFolder); if (tmpDir.exists() && tmpDir.isDirectory()) { if (tmpDir.list().length > 0) { try { Files.walkFileTree(tmpDir.toPath(), new CopyFileVisitor(attachPath)); } catch (Exception e) { LOGGER.error(e); throw new AsmManagerRuntimeException( "Cannot copy attachments to template folder: " + attachPath); } } // cleanup try { FileUtils.deleteDirectory(tmpDir); } catch (IOException e) { LOGGER.error("Cannot delete temp folder: " + tempFolder, e); } } } return svc; }
From source file:org.roda.core.RodaCoreFactory.java
public static Set<String> getFilenamesInsideConfigFolder(String folder) throws IOException { Set<String> fileNames = new HashSet<>(); // get from external config Set<String> externalFileNames = new HashSet<>(); Path configPath = RodaCoreFactory.getConfigPath().resolve(folder); Files.walkFileTree(configPath, new SimpleFileVisitor<Path>() { @Override//from w w w . j a v a 2s . c om public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { externalFileNames.add(file.getFileName().toString()); return FileVisitResult.CONTINUE; } }); fileNames.addAll(externalFileNames); // get from internal config List<ClassLoader> classLoadersList = new LinkedList<>(); classLoadersList.add(ClasspathHelper.contextClassLoader()); Set<String> internalFilesPath = new Reflections(new ConfigurationBuilder() .filterInputsBy(new FilterBuilder() .include(FilterBuilder.prefix("" + RodaConstants.CORE_CONFIG_FOLDER + "/" + folder))) .setScanners(new ResourcesScanner()) .setUrls(ClasspathHelper.forClassLoader(classLoadersList.toArray(new ClassLoader[0])))) .getResources(Pattern.compile(".*")); for (String internalFilePath : internalFilesPath) { fileNames.add(Paths.get(internalFilePath).getFileName().toString()); } return fileNames; }
From source file:playRepository.GitRepository.java
/** * Clones a local repository.//from w w w.ja va 2 s. c o m * * This doesn't copy Git objects but hardlink them to save disk space. * * @param originalProject * @param forkProject * @throws IOException */ protected static void cloneHardLinkedRepository(Project originalProject, Project forkProject) throws IOException { Repository origin = GitRepository.buildGitRepository(originalProject); Repository forked = GitRepository.buildGitRepository(forkProject); forked.create(); final Path originObjectsPath = Paths.get(new File(origin.getDirectory(), "objects").getAbsolutePath()); final Path forkedObjectsPath = Paths.get(new File(forked.getDirectory(), "objects").getAbsolutePath()); // Hardlink files .git/objects/ directory to save disk space, // but copy .git/info/alternates because the file can be modified. SimpleFileVisitor<Path> visitor = new SimpleFileVisitor<Path>() { public FileVisitResult visitFile(Path file, BasicFileAttributes attr) throws IOException { Path newPath = forkedObjectsPath.resolve(originObjectsPath.relativize(file.toAbsolutePath())); if (file.equals(forkedObjectsPath.resolve("/info/alternates"))) { Files.copy(file, newPath); } else { FileUtils.mkdirs(newPath.getParent().toFile(), true); Files.createLink(newPath, file); } return java.nio.file.FileVisitResult.CONTINUE; } }; Files.walkFileTree(originObjectsPath, visitor); // Import refs. for (Map.Entry<String, Ref> entry : origin.getAllRefs().entrySet()) { RefUpdate updateRef = forked.updateRef(entry.getKey()); Ref ref = entry.getValue(); if (ref.isSymbolic()) { updateRef.link(ref.getTarget().getName()); } else { updateRef.setNewObjectId(ref.getObjectId()); updateRef.update(); } } }
From source file:com.dell.asm.asmcore.asmmanager.util.ServiceTemplateUtil.java
public static void copyAttachments(String oldId, String id) throws AsmManagerRuntimeException, IOException { validateId(oldId);//w w w . j ava 2s. co m validateId(id); Path newAttachPath = Paths.get(TEMPLATE_ATTACHMENT_DIR + id); if (!Files.exists(newAttachPath)) { try { Files.createDirectory(newAttachPath); } catch (IOException e) { LOGGER.error(e); throw new AsmManagerRuntimeException("Cannot create directory: " + newAttachPath.toString()); } } File oldDir = new File(TEMPLATE_ATTACHMENT_DIR + oldId); if (oldDir.isDirectory() && oldDir.list().length > 0) { try { Files.walkFileTree(oldDir.toPath(), new CopyFileVisitor(newAttachPath)); } catch (Exception e) { LOGGER.error(e); throw new AsmManagerRuntimeException( "Cannot copy attachments to template folder: " + newAttachPath.toString()); } } }
From source file:com.dell.asm.asmcore.asmmanager.util.ServiceTemplateUtil.java
/** * Move template attachments from one location to another * @param oldId/*from w ww .j a v a2 s . c om*/ * @param id */ public static void moveAttachments(String oldId, String id) throws AsmManagerRuntimeException, IOException { validateId(oldId); validateId(id); Path attachPath = Paths.get(TEMPLATE_ATTACHMENT_DIR + id); if (!Files.exists(attachPath)) { try { Files.createDirectory(attachPath); } catch (IOException e) { LOGGER.error(e); throw new AsmManagerRuntimeException("Cannot create directory: " + attachPath); } } File oldDir = new File(TEMPLATE_ATTACHMENT_DIR + oldId); if (oldDir.isDirectory() && oldDir.list().length > 0) { try { Files.walkFileTree(oldDir.toPath(), new CopyFileVisitor(attachPath)); } catch (Exception e) { LOGGER.error(e); throw new AsmManagerRuntimeException("Cannot move attachments to template folder: " + attachPath); } } // cleanup try { FileUtils.deleteDirectory(oldDir); } catch (IOException e) { LOGGER.error("Cannot delete template folder: " + TEMPLATE_ATTACHMENT_DIR + oldId, e); } }
From source file:org.apache.nifi.controller.repository.FileSystemRepository.java
private long destroyExpiredArchives(final String containerName, final Path container) throws IOException { archiveExpirationLog.debug("Destroying Expired Archives for Container {}", containerName); final List<ArchiveInfo> notYetExceedingThreshold = new ArrayList<>(); long removalTimeThreshold = System.currentTimeMillis() - maxArchiveMillis; long oldestArchiveDateFound = System.currentTimeMillis(); // determine how much space we must have in order to stop deleting old data final Long minRequiredSpace = minUsableContainerBytesForArchive.get(containerName); if (minRequiredSpace == null) { archiveExpirationLog/*from w ww . j a v a2 s. co m*/ .debug("Could not determine minimum required space so will not destroy any archived data"); return -1L; } final long usableSpace = getContainerUsableSpace(containerName); final ContainerState containerState = containerStateMap.get(containerName); // First, delete files from our queue final long startNanos = System.nanoTime(); final long toFree = minRequiredSpace - usableSpace; final BlockingQueue<ArchiveInfo> fileQueue = archivedFiles.get(containerName); if (archiveExpirationLog.isDebugEnabled()) { if (toFree < 0) { archiveExpirationLog.debug( "Currently {} bytes free for Container {}; requirement is {} byte free, so no need to free space until an additional {} bytes are used", usableSpace, containerName, minRequiredSpace, Math.abs(toFree)); } else { archiveExpirationLog.debug( "Currently {} bytes free for Container {}; requirement is {} byte free, so need to free {} bytes", usableSpace, containerName, minRequiredSpace, toFree); } } ArchiveInfo toDelete; int deleteCount = 0; long freed = 0L; while ((toDelete = fileQueue.peek()) != null) { try { final long fileSize = toDelete.getSize(); removalTimeThreshold = System.currentTimeMillis() - maxArchiveMillis; // we use fileQueue.peek above instead of fileQueue.poll() because we don't always want to // remove the head of the queue. Instead, we want to remove it only if we plan to delete it. // In order to accomplish this, we just peek at the head and check if it should be deleted. // If so, then we call poll() to remove it if (freed < toFree || getLastModTime(toDelete.toPath()) < removalTimeThreshold) { toDelete = fileQueue.poll(); // remove the head of the queue, which is already stored in 'toDelete' Files.deleteIfExists(toDelete.toPath()); containerState.decrementArchiveCount(); LOG.debug( "Deleted archived ContentClaim with ID {} from Container {} because the archival size was exceeding the max configured size", toDelete.getName(), containerName); freed += fileSize; deleteCount++; } // If we'd freed up enough space, we're done... unless the next file needs to be destroyed based on time. if (freed >= toFree) { // If the last mod time indicates that it should be removed, just continue loop. if (deleteBasedOnTimestamp(fileQueue, removalTimeThreshold)) { archiveExpirationLog.debug( "Freed enough space ({} bytes freed, needed to free {} bytes) but will continue to expire data based on timestamp", freed, toFree); continue; } archiveExpirationLog.debug( "Freed enough space ({} bytes freed, needed to free {} bytes). Finished expiring data", freed, toFree); final ArchiveInfo archiveInfo = fileQueue.peek(); final long oldestArchiveDate = archiveInfo == null ? System.currentTimeMillis() : getLastModTime(archiveInfo.toPath()); // Otherwise, we're done. Return the last mod time of the oldest file in the container's archive. final long millis = TimeUnit.NANOSECONDS.toMillis(System.nanoTime() - startNanos); if (deleteCount > 0) { LOG.info( "Deleted {} files from archive for Container {}; oldest Archive Date is now {}; container cleanup took {} millis", deleteCount, containerName, new Date(oldestArchiveDate), millis); } else { LOG.debug( "Deleted {} files from archive for Container {}; oldest Archive Date is now {}; container cleanup took {} millis", deleteCount, containerName, new Date(oldestArchiveDate), millis); } return oldestArchiveDate; } } catch (final IOException ioe) { LOG.warn("Failed to delete {} from archive due to {}", toDelete, ioe.toString()); if (LOG.isDebugEnabled()) { LOG.warn("", ioe); } } } // Go through each container and grab the archived data into a List archiveExpirationLog.debug("Searching for more archived data to expire"); final StopWatch stopWatch = new StopWatch(true); for (int i = 0; i < SECTIONS_PER_CONTAINER; i++) { final Path sectionContainer = container.resolve(String.valueOf(i)); final Path archive = sectionContainer.resolve("archive"); if (!Files.exists(archive)) { continue; } try { final long timestampThreshold = removalTimeThreshold; Files.walkFileTree(archive, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { if (attrs.isDirectory()) { return FileVisitResult.CONTINUE; } final long lastModTime = getLastModTime(file); if (lastModTime < timestampThreshold) { try { Files.deleteIfExists(file); containerState.decrementArchiveCount(); LOG.debug( "Deleted archived ContentClaim with ID {} from Container {} because it was older than the configured max archival duration", file.toFile().getName(), containerName); } catch (final IOException ioe) { LOG.warn( "Failed to remove archived ContentClaim with ID {} from Container {} due to {}", file.toFile().getName(), containerName, ioe.toString()); if (LOG.isDebugEnabled()) { LOG.warn("", ioe); } } } else if (usableSpace < minRequiredSpace) { notYetExceedingThreshold .add(new ArchiveInfo(container, file, attrs.size(), lastModTime)); } return FileVisitResult.CONTINUE; } }); } catch (final IOException ioe) { LOG.warn("Failed to cleanup archived files in {} due to {}", archive, ioe.toString()); if (LOG.isDebugEnabled()) { LOG.warn("", ioe); } } } final long deleteExpiredMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS); // Sort the list according to last modified time Collections.sort(notYetExceedingThreshold, new Comparator<ArchiveInfo>() { @Override public int compare(final ArchiveInfo o1, final ArchiveInfo o2) { return Long.compare(o1.getLastModTime(), o2.getLastModTime()); } }); final long sortRemainingMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS) - deleteExpiredMillis; // Delete the oldest data archiveExpirationLog.debug("Deleting data based on timestamp"); final Iterator<ArchiveInfo> itr = notYetExceedingThreshold.iterator(); int counter = 0; while (itr.hasNext()) { final ArchiveInfo archiveInfo = itr.next(); try { final Path path = archiveInfo.toPath(); Files.deleteIfExists(path); containerState.decrementArchiveCount(); LOG.debug( "Deleted archived ContentClaim with ID {} from Container {} because the archival size was exceeding the max configured size", archiveInfo.getName(), containerName); // Check if we've freed enough space every 25 files that we destroy if (++counter % 25 == 0) { if (getContainerUsableSpace(containerName) > minRequiredSpace) { // check if we can stop now LOG.debug("Finished cleaning up archive for Container {}", containerName); break; } } } catch (final IOException ioe) { LOG.warn("Failed to delete {} from archive due to {}", archiveInfo, ioe.toString()); if (LOG.isDebugEnabled()) { LOG.warn("", ioe); } } itr.remove(); } final long deleteOldestMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS) - sortRemainingMillis - deleteExpiredMillis; long oldestContainerArchive; if (notYetExceedingThreshold.isEmpty()) { oldestContainerArchive = System.currentTimeMillis(); } else { oldestContainerArchive = notYetExceedingThreshold.get(0).getLastModTime(); } if (oldestContainerArchive < oldestArchiveDateFound) { oldestArchiveDateFound = oldestContainerArchive; } // Queue up the files in the order that they should be destroyed so that we don't have to scan the directories for a while. for (final ArchiveInfo toEnqueue : notYetExceedingThreshold.subList(0, Math.min(100000, notYetExceedingThreshold.size()))) { fileQueue.offer(toEnqueue); } final long cleanupMillis = stopWatch.getElapsed(TimeUnit.MILLISECONDS) - deleteOldestMillis - sortRemainingMillis - deleteExpiredMillis; LOG.debug( "Oldest Archive Date for Container {} is {}; delete expired = {} ms, sort remaining = {} ms, delete oldest = {} ms, cleanup = {} ms", containerName, new Date(oldestContainerArchive), deleteExpiredMillis, sortRemainingMillis, deleteOldestMillis, cleanupMillis); return oldestContainerArchive; }
From source file:com.dell.asm.asmcore.asmmanager.util.ServiceTemplateUtil.java
public static void deleteAttachments(String id) { if (id != null) { validateId(id);//from w ww .j a va2 s . c om ensureTemplateAttachmentsFolderExists(); try { Path directory = Paths.get(TEMPLATE_ATTACHMENT_DIR + id); if (Files.exists(directory) && Files.isDirectory(directory)) { Files.walkFileTree(directory, new DeleteDirectoryVisitor()); } } catch (Exception e) { LOGGER.error("Exception occurred deleting directory during cleanup.", e); } } }
From source file:ch.puzzle.itc.mobiliar.business.deploy.boundary.DeploymentBoundary.java
public void cleanupDeploymentLogs() { int cleanupAge = ConfigurationService.getPropertyAsInt(ConfigKey.LOGS_CLEANUP_AGE); String logsPathName = ConfigurationService.getProperty(ConfigKey.LOGS_PATH); Path logsDir = Paths.get(logsPathName); log.fine("Cleaning logs folder " + logsDir); FileVisitor<Path> fileVisitor = new ClenaupFileVisitor(logsDir, cleanupAge); try {//from w w w .j av a 2 s . co m Files.walkFileTree(logsDir, fileVisitor); } catch (IOException e) { log.severe("Log cleanup failed: " + e.getMessage()); e.printStackTrace(); } }
From source file:ch.puzzle.itc.mobiliar.business.deploy.boundary.DeploymentBoundary.java
/** * Deletes all files that are older than DEPLOYMENT_CLEANUP_AGE in the GENERATOR_PATH* directories * * @throws IOException// w w w . j a va 2 s. c o m */ public void cleanupDeploymentFiles() { final int cleanupAge = ConfigurationService.getPropertyAsInt(ConfigKey.DEPLOYMENT_CLEANUP_AGE); HashSet<String> pathsToCheck = new HashSet<>(); ConfigKey[] keys = { ConfigKey.GENERATOR_PATH, ConfigKey.GENERATOR_PATH_SIMULATION, ConfigKey.GENERATOR_PATH_TEST }; //get all generator paths and merge them for (ConfigKey key : keys) { String path = ConfigurationService.getProperty(key); if (path != null) { pathsToCheck.add(path); } } for (String basePathName : pathsToCheck) { final Path basePath = Paths.get(basePathName); log.fine("Cleaning generator folder " + basePath); FileVisitor<Path> fileVisitor = new ClenaupFileVisitor(basePath, cleanupAge); try { Files.walkFileTree(basePath, fileVisitor); } catch (IOException e) { log.severe("Deployment cleanup failed: " + e.getMessage()); } } }