List of usage examples for java.nio.file Path getParent
Path getParent();
From source file:company.gonapps.loghut.dao.TagDao.java
public void createMonthIndex(String tagName, int year) throws IOException, TemplateException { if (monthIndexTemplate == null) monthIndexTemplate = freeMarkerConfigurer.getConfiguration().getTemplate("blog/month_index.ftl"); List<String> months = getMonths(tagName, year); StringWriter temporaryBuffer = new StringWriter(); Map<String, Object> modelMap = new HashMap<>(); modelMap.put("settings", settingDao); modelMap.put("months", months); monthIndexTemplate.process(modelMap, temporaryBuffer); Path monthIndexPath = Paths.get(settingDao.getSetting("tags.directory") + "/" + tagName + "/" + String.format("%04d", year) + "/index.html"); rrwl.writeLock().lock();/*from w w w. j a v a 2 s .c o m*/ Files.createDirectories(monthIndexPath.getParent()); try (BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(monthIndexPath.toFile()))) { bufferedWriter.write(temporaryBuffer.toString()); } finally { rrwl.writeLock().unlock(); } }
From source file:fr.duminy.jbackup.core.ConfigurationManagerTest.java
private Path testSaveBackupConfiguration(BackupConfiguration config) throws Exception { Path output = manager.saveBackupConfiguration(config); assertThat(output.getParent()).isEqualTo(configDir); Assertions.assertThat(output.toFile()).hasContent(CONFIG_XML); return output; }
From source file:org.apache.taverna.robundle.manifest.Manifest.java
/** * Write as an RO Bundle JSON-LD manifest * //from w w w . j a v a2 s . c om * @return The path of the written manifest (e.g. ".ro/manifest.json") * @throws IOException */ public Path writeAsJsonLD() throws IOException { Path jsonld = bundle.getFileSystem().getPath(RO, MANIFEST_JSON); createDirectories(jsonld.getParent()); // Files.createFile(jsonld); if (!getManifest().contains(jsonld)) getManifest().add(0, jsonld); ObjectMapper om = new ObjectMapper(); om.addMixInAnnotations(Path.class, PathMixin.class); om.addMixInAnnotations(FileTime.class, FileTimeMixin.class); om.enable(INDENT_OUTPUT); om.disable(WRITE_EMPTY_JSON_ARRAYS); om.disable(FAIL_ON_EMPTY_BEANS); om.disable(WRITE_NULL_MAP_VALUES); om.setSerializationInclusion(Include.NON_NULL); try (Writer w = newBufferedWriter(jsonld, Charset.forName("UTF-8"), WRITE, TRUNCATE_EXISTING, CREATE)) { om.writeValue(w, this); } return jsonld; }
From source file:org.fim.Fim.java
private void findRepositoryRootDir(Context context) { boolean invokedFromSubDirectory = false; Path directory = context.getAbsoluteCurrentDirectory(); while (directory != null) { Path dotFimDir = directory.resolve(Context.DOT_FIM_DIR); if (Files.exists(dotFimDir)) { setRepositoryRootDir(context, directory, invokedFromSubDirectory); return; }/*from www. j a va 2s. c om*/ directory = directory.getParent(); invokedFromSubDirectory = true; } }
From source file:dk.dma.msiproxy.common.provider.AbstractProviderService.java
/** * May be called periodically to clean up the message repo folder associated * with the provider./* w w w . j a v a2s . c o m*/ * <p> * The procedure will determine which repository message ID's are still active. * and delete folders associated with messages ID's that are not active anymore. */ public void cleanUpMessageRepoFolder() { long t0 = System.currentTimeMillis(); // Compute the ID's for message repository folders to keep Set<Integer> ids = computeReferencedMessageIds(messages); // Build a lookup map of all the paths that ara still active Set<Path> paths = new HashSet<>(); ids.forEach(id -> { try { Path path = getMessageRepoFolder(id); // Add the path and the hashed sub-folders above it paths.add(path); paths.add(path.getParent()); paths.add(path.getParent().getParent()); } catch (IOException e) { log.error("Failed computing " + getProviderId() + " message repo paths for id " + id + ": " + e.getMessage()); } }); // Scan all sub-folders and delete those Path messageRepoRoot = getRepositoryService().getRepoRoot().resolve(MESSAGE_REPO_ROOT_FOLDER) .resolve(getProviderId()); paths.add(messageRepoRoot); try { Files.walkFileTree(messageRepoRoot, new SimpleFileVisitor<Path>() { @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { if (!paths.contains(dir)) { log.info("Deleting message repo directory :" + dir); Files.delete(dir); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (!paths.contains(file.getParent())) { log.info("Deleting message repo file :" + file); Files.delete(file); } return FileVisitResult.CONTINUE; } }); } catch (IOException e) { log.error("Failed cleaning up " + getProviderId() + " message repo: " + e.getMessage()); } log.info(String.format("Cleaned up %s message repo in %d ms", getProviderId(), System.currentTimeMillis() - t0)); }
From source file:com.streamsets.pipeline.lib.io.FileContext.java
public void releaseReader(boolean inErrorDiscardReader) throws IOException { Utils.checkState(open, "FileContext is closed"); // update starting offsets for next invocation either cold (no reader) or hot (reader) boolean hasNext; try {//from ww w. j a va2s . co m hasNext = reader != null && reader.hasNext(); } catch (IOException ex) { IOUtils.closeQuietly(reader); reader = null; hasNext = false; } boolean doneWithFile = !hasNext || inErrorDiscardReader; if (doneWithFile) { IOUtils.closeQuietly(reader); reader = null; // Using Long.MAX_VALUE to signal we reach the end of the file and next iteration should get the next file. setStartingCurrentFileName(currentFile); setStartingOffset(Long.MAX_VALUE); // If we failed to open the file in first place, it will be null and hence we won't do anything with it. if (currentFile == null) { return; } // File end event LiveFile file = currentFile.refresh(); if (file == null) { return; } if (inErrorDiscardReader) { LOG.warn("Processing file '{}' produced an error, skipping '{}' post processing on that file", file, postProcessing); eventPublisher.publish(new FileEvent(file, FileEvent.Action.ERROR)); } else { eventPublisher.publish(new FileEvent(file, FileEvent.Action.END)); switch (postProcessing) { case NONE: LOG.debug("File '{}' processing completed, post processing action 'NONE'", file); break; case DELETE: try { Files.delete(file.getPath()); LOG.debug("File '{}' processing completed, post processing action 'DELETED'", file); } catch (IOException ex) { throw new IOException(Utils.format("Could not delete '{}': {}", file, ex.toString()), ex); } break; case ARCHIVE: Path fileArchive = Paths.get(archiveDir, file.getPath().toString()); if (fileArchive == null) { throw new IOException("Could not find archive file"); } try { Files.createDirectories(fileArchive.getParent()); Files.move(file.getPath(), fileArchive); LOG.debug("File '{}' processing completed, post processing action 'ARCHIVED' as", file); } catch (IOException ex) { throw new IOException(Utils.format("Could not archive '{}': {}", file, ex.toString()), ex); } break; } } } else { setStartingCurrentFileName(currentFile); setStartingOffset(getReader().getOffset()); } }
From source file:org.olat.course.assessment.bulk.DataStepForm.java
private void processReturnFiles(VFSLeaf target, List<BulkAssessmentRow> rows) { Map<String, BulkAssessmentRow> assessedIdToRow = new HashMap<>(); for (BulkAssessmentRow row : rows) { assessedIdToRow.put(row.getAssessedId(), row); }/*w w w .ja v a2 s . co m*/ if (target.exists()) { InputStream is = target.getInputStream(); File parentTarget = ((LocalImpl) target).getBasefile().getParentFile(); ZipInputStream zis = new ZipInputStream(is); ZipEntry entry; try { byte[] b = new byte[FileUtils.BSIZE]; while ((entry = zis.getNextEntry()) != null) { if (!entry.isDirectory()) { while (zis.read(b) > 0) { //continue } Path op = new File(parentTarget, entry.getName()).toPath(); if (!Files.isHidden(op) && !Files.isDirectory(op)) { Path parentDir = op.getParent(); String assessedId = parentDir.getFileName().toString(); String filename = op.getFileName().toString(); BulkAssessmentRow row; if (assessedIdToRow.containsKey(assessedId)) { row = assessedIdToRow.get(assessedId); } else { row = new BulkAssessmentRow(); row.setAssessedId(assessedId); assessedIdToRow.put(assessedId, row); rows.add(row); } if (row.getReturnFiles() == null) { row.setReturnFiles(new ArrayList<String>(2)); } row.getReturnFiles().add(filename); } } } } catch (Exception e) { logError("", e); } finally { IOUtils.closeQuietly(is); IOUtils.closeQuietly(zis); } } }
From source file:org.fcrepo.http.api.ExternalContentPathValidator.java
/** * Starts up monitoring of the allowed list configuration for changes. *//*from w w w. j ava 2 s . com*/ private void monitorForChanges() { if (monitorRunning) { return; } final Path path = Paths.get(configPath); if (!path.toFile().exists()) { LOGGER.debug("Allow list configuration {} does not exist, disabling monitoring", configPath); return; } final Path directoryPath = path.getParent(); try { final WatchService watchService = FileSystems.getDefault().newWatchService(); directoryPath.register(watchService, ENTRY_MODIFY); monitorThread = new Thread(new Runnable() { @Override public void run() { try { for (;;) { WatchKey key; try { key = watchService.take(); } catch (final InterruptedException e) { LOGGER.debug("Interrupted the configuration monitor thread."); break; } for (final WatchEvent<?> event : key.pollEvents()) { final WatchEvent.Kind<?> kind = event.kind(); if (kind == OVERFLOW) { continue; } // If the configuration file triggered this event, reload it final Path changed = (Path) event.context(); if (changed.equals(path.getFileName())) { LOGGER.info("External binary configuration {} has been updated, reloading.", path); try { loadAllowedPaths(); } catch (final IOException e) { LOGGER.error("Failed to reload external locations configuration", e); } } // reset the key final boolean valid = key.reset(); if (!valid) { LOGGER.debug("Monitor of {} is no longer valid", path); break; } } } } finally { try { watchService.close(); } catch (final IOException e) { LOGGER.error("Failed to stop configuration monitor", e); } } monitorRunning = false; } }); } catch (final IOException e) { LOGGER.error("Failed to start configuration monitor", e); } monitorThread.start(); monitorRunning = true; }
From source file:org.apache.taverna.robundle.manifest.TestManifestJSON.java
@Test public void testHistory() throws IOException { Path tmpBundle = Files.createTempFile("testbundle", "history"); // create history try (Bundle bundle = Bundles.createBundle()) { Bundles.closeAndSaveBundle(bundle, tmpBundle); } catch (IOException e) { fail("failed to create bundle for history test: " + e.getMessage()); }/*w ww .j a v a 2 s .co m*/ // make sure it doesn't fail if there is no history try (Bundle bundle = Bundles.openBundle(tmpBundle)) { Manifest manifest = bundle.getManifest(); Path evolutionPath = bundle.getPath(".ro/evolution.ttl"); assertFalse("did not expect a history file", Files.exists(evolutionPath)); assertEquals("did not expect a history", 0, manifest.getHistory().size()); Files.createDirectories(evolutionPath.getParent()); Bundles.setStringValue(evolutionPath, "<manifest.json> < http://purl.org/pav/retrievedFrom> " + "<http://wf4ever.github.io/ro/bundle/2013-05-21/example/.ro/manifest.json> ."); manifest.getHistory().add(evolutionPath); assertTrue("expected a history file", Files.exists(evolutionPath)); assertTrue("expected a history", manifest.getHistory().size() > 0); Bundles.closeBundle(bundle); } catch (IOException e) { fail("failed to read bundle for history test: " + e.getMessage()); } // check if history is still there try (Bundle bundle = Bundles.openBundleReadOnly(tmpBundle)) { Manifest manifest = bundle.getManifest(); Path evolutionPath = bundle.getPath(".ro/evolution.ttl"); assertTrue("expected a history file", Files.exists(evolutionPath)); assertEquals("expected exactly one history", 1, manifest.getHistory().size()); Bundles.closeBundle(bundle); } catch (IOException e) { fail("failed to read bundle for history test: " + e.getMessage()); } Files.delete(tmpBundle); }
From source file:fr.ortolang.diffusion.client.cmd.CheckBagCommand.java
private void checkSnapshotMetadata(Path root) { Path metadata = Paths.get(root.toString(), "metadata"); try {/* www . j ava 2 s. co m*/ Files.walkFileTree(metadata, new FileVisitor<Path>() { @Override public FileVisitResult preVisitDirectory(Path dir, BasicFileAttributes attrs) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Path target = Paths.get(root.toString(), "objects", metadata.relativize(file.getParent()).toString()); if (!Files.exists(target)) { errors.append("-> unexisting target for metadata: ").append(file).append("\r\n"); if (fix) { try { Files.delete(file); fixed.append("-> deleted metadata: ").append(file).append("\r\n"); } catch (IOException e) { errors.append("-> unable to fix: ").append(e.getMessage()).append("\r\n"); } } } else if (file.endsWith("ortolang-item-json")) { checkOrtolangItemJson(file); } return FileVisitResult.CONTINUE; } @Override public FileVisitResult visitFileFailed(Path file, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { return FileVisitResult.CONTINUE; } }); } catch (IOException e) { System.out.println("Unable to walk file tree: " + e.getMessage()); } }