List of usage examples for java.nio.file Path getParent
Path getParent();
From source file:org.apache.tika.batch.fs.FSOutputStreamFactory.java
/** * This tries to create a file based on the {@link org.apache.tika.batch.fs.FSUtil.HANDLE_EXISTING} * value that was passed in during initialization. * <p>//from w w w. j a v a 2 s. c om * If {@link #handleExisting} is set to "SKIP" and the output file already exists, * this will return null. * <p> * If an output file can be found, this will try to mkdirs for that output file. * If mkdirs() fails, this will throw an IOException. * <p> * Finally, this will open an output stream for the appropriate output file. * @param metadata must have a value set for FSMetadataProperties.FS_ABSOLUTE_PATH or * else NullPointerException will be thrown! * @return OutputStream * @throws java.io.IOException, NullPointerException */ @Override public OutputStream getOutputStream(Metadata metadata) throws IOException { String initialRelativePath = metadata.get(FSProperties.FS_REL_PATH); Path outputPath = FSUtil.getOutputPath(outputRoot, initialRelativePath, handleExisting, suffix); if (outputPath == null) { return null; } if (!Files.isDirectory(outputPath.getParent())) { Files.createDirectories(outputPath.getParent()); //TODO: shouldn't need this any more in java 7, right? if (!Files.isDirectory(outputPath.getParent())) { throw new IOException("Couldn't create parent directory for:" + outputPath.toAbsolutePath()); } } OutputStream os = Files.newOutputStream(outputPath); switch (compression) { case BZIP2: os = new BZip2CompressorOutputStream(os); break; case GZIP: os = new GZIPOutputStream(os); break; case ZIP: os = new ZipArchiveOutputStream(os); break; } return new BufferedOutputStream(os); }
From source file:com.facebook.buck.cxx.ArchiveStepIntegrationTest.java
@Test public void inputDirs() throws IOException, InterruptedException { assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX); ProjectFilesystem filesystem = new ProjectFilesystem(tmp.getRoot()); CxxPlatform platform = CxxPlatformUtils.build(new CxxBuckConfig(FakeBuckConfig.builder().build())); // Build up the paths to various files the archive step will use. SourcePathResolver sourcePathResolver = new SourcePathResolver(new SourcePathRuleFinder( new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer()))); Archiver archiver = platform.getAr(); Path output = filesystem.getPath("output.a"); Path input = filesystem.getPath("foo/blah.dat"); filesystem.mkdirs(input.getParent()); filesystem.writeContentsToPath("blah", input); // Build an archive step. ArchiveStep archiveStep = new ArchiveStep(filesystem, archiver.getEnvironment(), archiver.getCommandPrefix(sourcePathResolver), ImmutableList.of(), getArchiveOptions(false), output, ImmutableList.of(input.getParent()), archiver); // Execute the archive step and verify it ran successfully. ExecutionContext executionContext = TestExecutionContext.newInstance(); TestConsole console = (TestConsole) executionContext.getConsole(); int exitCode = archiveStep.execute(executionContext).getExitCode(); assertEquals("archive step failed: " + console.getTextWrittenToStdErr(), 0, exitCode); // Now read the archive entries and verify that the timestamp, UID, and GID fields are // zero'd out. try (ArArchiveInputStream stream = new ArArchiveInputStream( new FileInputStream(filesystem.resolve(output).toFile()))) { ArArchiveEntry entry = stream.getNextArEntry(); assertThat(entry.getName(), Matchers.equalTo("blah.dat")); }//from ww w . j a v a 2s .co m }
From source file:ru.histone.staticrender.StaticRender.java
public void renderSite(final Path srcDir, final Path dstDir) { log.info("Running StaticRender for srcDir={}, dstDir={}", srcDir.toString(), dstDir.toString()); Path contentDir = srcDir.resolve("content/"); final Path layoutDir = srcDir.resolve("layouts/"); FileVisitor<Path> layoutVisitor = new SimpleFileVisitor<Path>() { @Override//from w w w.j a v a 2 s.co m public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (file.toString().endsWith("." + TEMPLATE_FILE_EXTENSION)) { ArrayNode ast = null; try { ast = histone.parseTemplateToAST(new FileReader(file.toFile())); } catch (HistoneException e) { throw new RuntimeException("Error parsing histone template:" + e.getMessage(), e); } final String fileName = file.getFileName().toString(); String layoutId = fileName.substring(0, fileName.length() - TEMPLATE_FILE_EXTENSION.length() - 1); layouts.put(layoutId, ast); if (log.isDebugEnabled()) { log.debug("Layout found id='{}', file={}", layoutId, file); } else { log.info("Layout found id='{}'", layoutId); } } else { final Path relativeFileName = srcDir.resolve("layouts").relativize(Paths.get(file.toUri())); final Path resolvedFile = dstDir.resolve(relativeFileName); if (!resolvedFile.getParent().toFile().exists()) { Files.createDirectories(resolvedFile.getParent()); } Files.copy(Paths.get(file.toUri()), resolvedFile, StandardCopyOption.REPLACE_EXISTING, LinkOption.NOFOLLOW_LINKS); } return FileVisitResult.CONTINUE; } }; FileVisitor<Path> contentVisitor = new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { Scanner scanner = new Scanner(file, "UTF-8"); scanner.useDelimiter("-----"); String meta = null; StringBuilder content = new StringBuilder(); if (!scanner.hasNext()) { throw new RuntimeException("Wrong format #1:" + file.toString()); } if (scanner.hasNext()) { meta = scanner.next(); } if (scanner.hasNext()) { content.append(scanner.next()); scanner.useDelimiter("\n"); } while (scanner.hasNext()) { final String next = scanner.next(); content.append(next); if (scanner.hasNext()) { content.append("\n"); } } Map<String, String> metaYaml = (Map<String, String>) yaml.load(meta); String layoutId = metaYaml.get("layout"); if (!layouts.containsKey(layoutId)) { throw new RuntimeException(MessageFormat.format("No layout with id='{0}' found", layoutId)); } final Path relativeFileName = srcDir.resolve("content").relativize(Paths.get(file.toUri())); final Path resolvedFile = dstDir.resolve(relativeFileName); if (!resolvedFile.getParent().toFile().exists()) { Files.createDirectories(resolvedFile.getParent()); } Writer output = new FileWriter(resolvedFile.toFile()); ObjectNode context = jackson.createObjectNode(); ObjectNode metaNode = jackson.createObjectNode(); context.put("content", content.toString()); context.put("meta", metaNode); for (String key : metaYaml.keySet()) { if (!key.equalsIgnoreCase("content")) { metaNode.put(key, metaYaml.get(key)); } } try { histone.evaluateAST(layoutDir.toUri().toString(), layouts.get(layoutId), context, output); output.flush(); } catch (HistoneException e) { throw new RuntimeException("Error evaluating content: " + e.getMessage(), e); } finally { output.close(); } return FileVisitResult.CONTINUE; } }; try { Files.walkFileTree(layoutDir, layoutVisitor); Files.walkFileTree(contentDir, contentVisitor); } catch (Exception e) { throw new RuntimeException("Error during site render", e); } }
From source file:joachimeichborn.geotag.io.jpeg.PictureMetadataWriter.java
private void backupFile() throws IOException { final Path sourceFile = picture.getFile(); final String parentPath = sourceFile.getParent().toString(); final String hashedPath = DigestUtils.md5Hex(parentPath + backupId); final Path targetDir = BACKUP_DIR.resolve(hashedPath); Files.createDirectories(targetDir); Files.copy(sourceFile, targetDir.resolve(sourceFile.getFileName())); }
From source file:org.mycore.common.MCRUtils.java
/** * Extracts files in a tar archive. Currently works only on uncompressed tar files. * //from w w w. j a va 2 s . c o m * @param source * the uncompressed tar to extract * @param expandToDirectory * the directory to extract the tar file to * @throws IOException * if the source file does not exists */ public static void untar(Path source, Path expandToDirectory) throws IOException { try (TarArchiveInputStream tain = new TarArchiveInputStream(Files.newInputStream(source))) { TarArchiveEntry tarEntry; FileSystem targetFS = expandToDirectory.getFileSystem(); HashMap<Path, FileTime> directoryTimes = new HashMap<>(); while ((tarEntry = tain.getNextTarEntry()) != null) { Path target = MCRPathUtils.getPath(targetFS, tarEntry.getName()); Path absoluteTarget = expandToDirectory.resolve(target).normalize().toAbsolutePath(); if (tarEntry.isDirectory()) { Files.createDirectories(expandToDirectory.resolve(absoluteTarget)); directoryTimes.put(absoluteTarget, FileTime.fromMillis(tarEntry.getLastModifiedDate().getTime())); } else { if (Files.notExists(absoluteTarget.getParent())) { Files.createDirectories(absoluteTarget.getParent()); } Files.copy(tain, absoluteTarget, StandardCopyOption.REPLACE_EXISTING); Files.setLastModifiedTime(absoluteTarget, FileTime.fromMillis(tarEntry.getLastModifiedDate().getTime())); } } //restore directory dates Files.walkFileTree(expandToDirectory, new SimpleFileVisitor<Path>() { @Override public FileVisitResult postVisitDirectory(Path dir, IOException exc) throws IOException { Path absolutePath = dir.normalize().toAbsolutePath(); Files.setLastModifiedTime(absolutePath, directoryTimes.get(absolutePath)); return super.postVisitDirectory(dir, exc); } }); } }
From source file:org.eclipse.che.api.fs.server.impl.FsOperations.java
void createFileWithParents(Path fsPath) throws ServerException { try {//from ww w . j a v a 2 s. c o m Files.createDirectories(fsPath.getParent()); Files.createFile(fsPath); } catch (IOException e) { throw new ServerException("Failed to create file: " + fsPath, e); } }
From source file:org.eclipse.che.api.fs.server.impl.FsOperations.java
void createDirWithParents(Path fsPath) throws ServerException { try {/* w w w .j av a2 s. c o m*/ Files.createDirectories(fsPath.getParent()); Files.createDirectory(fsPath); } catch (IOException e) { throw new ServerException("Failed to create item: " + fsPath, e); } }
From source file:at.tfr.securefs.client.TestWebSocket.java
@Ignore @Test/*w w w . j ava2 s. c o m*/ public void testSendFile() throws Exception { Path path = Paths.get(getClass().getResource("/test.txt").toURI()); Message m = new Message(MessageType.OPEN, "test.txt"); WebsocketHandler wh = new WebsocketHandler(new URI(localhost), m, path.getParent()); wh.connectBlocking(); synchronized (wh) { wh.wait(100000L); } }
From source file:org.apache.hadoop.hive.ql.log.TestSlidingFilenameRolloverStrategy.java
@Test public void testSlidingLogFiles() throws Exception { assertEquals("bad props file", PROPERTIES_FILE, System.getProperty("log4j.configurationFile")); // Where the log files wll be written Path logTemplate = FileSystems.getDefault().getPath(FILE_PATTERN); String fileName = logTemplate.getFileName().toString(); Path parent = logTemplate.getParent(); try {// w w w . ja va2s . co m Files.createDirectory(parent); } catch (FileAlreadyExistsException e) { // OK, fall through. } // Delete any stale log files left around from previous failed tests deleteLogFiles(parent, fileName); Logger logger = LogManager.getLogger(LineageLogger.class); // Does the logger config look correct? org.apache.logging.log4j.core.Logger coreLogger = (org.apache.logging.log4j.core.Logger) logger; LoggerConfig loggerConfig = coreLogger.get(); Map<String, Appender> appenders = loggerConfig.getAppenders(); assertNotNull("sliding appender is missing", appenders.get("sliding")); // Do some logging and force log rollover int NUM_LOGS = 7; logger.debug("Debug Message Logged !!!"); logger.info("Info Message Logged !!!"); String errorString = "Error Message Logged "; for (int i = 0; i < NUM_LOGS; i++) { TimeUnit.MILLISECONDS.sleep(100); // log an exception - this produces enough text to force a new logfile // (as appender.sliding.policies.size.size=1KB) logger.error(errorString + i, new RuntimeException("part of a test")); } // Check log files look OK DirectoryStream<Path> stream = Files.newDirectoryStream(parent, fileName + ".*"); int count = 0; for (Path path : stream) { count++; String contents = new String(Files.readAllBytes(path), "UTF-8"); // There should be one exception message per file assertTrue("File " + path + " did not have expected content", contents.contains(errorString)); String suffix = StringUtils.substringAfterLast(path.toString(), "."); // suffix should be a timestamp try { long timestamp = Long.parseLong(suffix); } catch (NumberFormatException e) { fail("Suffix " + suffix + " is not a long"); } } assertEquals("bad count of log files", NUM_LOGS, count); // Check there is no log file without the suffix assertFalse("file should not exist:" + logTemplate, Files.exists(logTemplate)); // Clean up deleteLogFiles(parent, fileName); }
From source file:org.eclipse.che.api.fs.server.impl.FsOperations.java
void copyWithParents(Path srcFsPath, Path dstFsPath) throws ServerException { try {/* w ww .j a va 2s. c o m*/ Files.createDirectories(dstFsPath.getParent()); if (Files.isDirectory(srcFsPath)) { FileUtils.copyDirectory(srcFsPath.toFile(), dstFsPath.toFile()); } else { FileUtils.copyFile(srcFsPath.toFile(), dstFsPath.toFile()); } } catch (IOException e) { throw new ServerException("Failed to copy item " + srcFsPath + " to " + dstFsPath, e); } }