List of usage examples for java.nio.file Path getFileName
Path getFileName();
From source file:com.spectralogic.ds3client.helpers.FileObjectPutter_Test.java
private void getFileWithPutter(final Path dir, final Path file) throws IOException { try {//ww w . j av a2s .c o m final FileObjectPutter putter = new FileObjectPutter(dir); try (final SeekableByteChannel newChannel = putter.buildChannel(file.getFileName().toString())) { assertThat(newChannel, is(notNullValue())); final ByteBuffer buff = ByteBuffer.allocate(testData.length); assertThat(newChannel.read(buff), is(testData.length)); assertThat(new String(buff.array(), Charset.forName("UTF-8")), is(testString)); } } finally { Files.deleteIfExists(file); } }
From source file:com.qwazr.extractor.ExtractorServiceImpl.java
private ParserResult putMagicPath(UriInfo uriInfo, String filePath, String mimeType) throws Exception { final MultivaluedMap<String, String> queryParameters = getQueryParameters(uriInfo); final Path path = getFilePath(filePath); // Find a parser with the extension final String extension = FilenameUtils.getExtension(path.getFileName().toString()); Class<? extends ParserInterface> parserClass = getClassParserExtension(extension); // Find a parser with the mimeType if (parserClass == null) { if (StringUtils.isEmpty(mimeType)) mimeType = getMimeMagic(path); if (!StringUtils.isEmpty(mimeType)) parserClass = getClassParserMimeType(mimeType); }/*from ww w . ja va2s .c o m*/ // Do the extraction final ParserInterface parser = getParser(parserClass); final ParserResultBuilder result = new ParserResultBuilder(parser); parser.parseContent(queryParameters, path, extension, mimeType, result); return result.build(); }
From source file:com.reactive.hzdfs.dll.JarModuleLoader.java
private Set<File> filesFromEvents() throws InterruptedException { WatchKey key = watcher.take(); Set<File> files = new LinkedHashSet<File>(); if (key != null && key.isValid()) { for (WatchEvent<?> event : key.pollEvents()) { if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE || event.kind() == StandardWatchEventKinds.ENTRY_MODIFY) { Path item = (Path) event.context(); File file = new File( ((Path) key.watchable()).toAbsolutePath() + File.separator + item.getFileName()); if (log.isDebugEnabled()) { log.debug("Watch Event: " + event.kind() + ": " + file); }/* ww w . java 2s. c o m*/ if (isJarFile(file)) { files.add(file); } else log.warn("[JAR Loader] Ignoring file:- " + file); } } key.reset(); } return files; }
From source file:de.tiqsolutions.hdfs.HadoopFileSystemPath.java
@Override public Path getName(int index) { int c = getNameCount(); if (index >= c || index < 0) return null; Path p = this; for (int i = 0; i < c - index - 1; i++) { p = p.getParent();//from w w w . j av a2 s .com } return p.getFileName(); }
From source file:com.gwac.job.FileTransferServiceImpl.java
void test() { try {/*from www .j a v a 2 s. co m*/ System.out.println("123"); watcher = FileSystems.getDefault().newWatchService(); Path dir = Paths.get("E:/TestData/gwacTest"); dir.register(watcher, ENTRY_CREATE, ENTRY_MODIFY); System.out.println("Watch Service registered for dir: " + dir.getFileName()); isSuccess = true; } catch (IOException ex) { isSuccess = false; ex.printStackTrace(); } }
From source file:br.com.thiaguten.archive.AbstractArchive.java
/** * Generic compress implementation/*from ww w . j a v a 2 s . co m*/ */ @Override public Path compress(Path... paths) throws IOException { Path compress = null; ArchiveOutputStream archiveOutputStream = null; for (Path path : paths) { // get path infos final Path parent = path.getParent(); final String name = path.getFileName().toString(); final boolean isDirectory = isDirectory(path); if (compress == null) { // create compress file String compressName = (paths.length == 1 ? name : getName()); compress = Paths.get(parent.toString(), compressName + getExtension()); // creates a new compress file to not override if already exists // if you do not want this behavior, just comment this line compress = createFile(ArchiveAction.COMPRESS, parent, compress); // open compress file stream archiveOutputStream = createArchiveOutputStream( new BufferedOutputStream(newOutputStream(compress))); logger.debug("creating the archive file " + compressName); } logger.debug("reading path " + path); if (isDirectory) { compressDirectory(parent, path, archiveOutputStream); } else { compressFile(parent, path, archiveOutputStream); } } // closing streams if (archiveOutputStream != null) { archiveOutputStream.finish(); archiveOutputStream.close(); } logger.debug("finishing the archive file: " + compress); return compress; }
From source file:de.dentrassi.rpm.builder.YumMojo.java
@Override public void execute() throws MojoExecutionException, MojoFailureException { this.logger = new Logger(getLog()); try {// w ww. j a v a 2 s .com final Builder builder = new RepositoryCreator.Builder(); builder.setTarget(new FileSystemSpoolOutTarget(this.outputDirectory.toPath())); if (!this.skipSigning) { final PGPPrivateKey privateKey = SigningHelper.loadKey(this.signature, this.logger); if (privateKey != null) { final int digestAlgorithm = HashAlgorithm.from(this.signature.getHashAlgorithm()).getValue(); builder.setSigning(output -> new SigningStream(output, privateKey, digestAlgorithm, false, "RPM builder Mojo - de.dentrassi.maven:rpm")); } } final RepositoryCreator creator = builder.build(); this.packagesPath = new File(this.outputDirectory, "packages"); Files.createDirectories(this.packagesPath.toPath()); final Collection<Path> paths = Lists.newArrayList(); if (!this.skipDependencies) { final Set<Artifact> deps = this.project.getArtifacts(); if (deps != null) { paths.addAll(deps.stream()// .filter(d -> d.getType().equalsIgnoreCase("rpm"))// .map(d -> d.getFile().toPath())// .collect(Collectors.toList())); } } else { this.logger.debug("Skipped RPM artifacts from maven dependencies"); } if (this.files != null) { paths.addAll(this.files.stream().map(f -> f.toPath()).collect(Collectors.toList())); } if (this.directories != null) { for (final File dir : this.directories) { Files.walkFileTree(dir.toPath(), new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(final Path file, final BasicFileAttributes attrs) throws IOException { if (file.getFileName().toString().toLowerCase().endsWith(".rpm")) { paths.add(file); } return FileVisitResult.CONTINUE; } }); } } addPackageList(creator, paths); } catch (final IOException e) { throw new MojoExecutionException("Failed to write repository", e); } }
From source file:com.reactivetechnologies.platform.rest.dll.JarModuleLoader.java
private Set<File> filesFromEvents() throws InterruptedException { WatchKey key = watcher.take(); Set<File> files = new LinkedHashSet<File>(); if (key != null && key.isValid()) { for (WatchEvent<?> event : key.pollEvents()) { if (event.kind() == StandardWatchEventKinds.ENTRY_CREATE || event.kind() == StandardWatchEventKinds.ENTRY_MODIFY) { Path item = (Path) event.context(); File file = new File( ((Path) key.watchable()).toAbsolutePath() + File.separator + item.getFileName()); if (log.isDebugEnabled()) { log.debug("Watch Event: " + event.kind() + ": " + file); }//from www . ja v a2 s. c om if (isJarFile(file)) { files.add(file); } else log.warn("[JAR Loader] Ignoring file " + file); } } key.reset(); } return files; }
From source file:ru.xxlabaza.popa.pack.PackingService.java
private void processCss(Document document) { document.select("link[rel=stylesheet]:not([href^=http])").forEach(link -> { Path path = build.resolve(createPath(link.attr("href"))); log.info("Processing style '{}'", path); String content = commentRemoveService.removeComments(path); content = correctURLs(path, content); if (!path.getFileName().toString().endsWith(".min.css")) { content = compressService.compress(content, CSS); }/* w w w . j a v a2 s.c o m*/ Element style = document.createElement("style"); style.html(content); link.after(style); link.remove(); }); }
From source file:com.facebook.buck.rules.HttpArtifactCache.java
public CacheResult fetchImpl(RuleKey ruleKey, File file) throws IOException { Request request = createRequestBuilder(ruleKey.toString()).get().build(); Response response = fetchCall(request); if (response.code() == HttpURLConnection.HTTP_NOT_FOUND) { LOGGER.info("fetch(%s): cache miss", ruleKey); return CacheResult.MISS; }/*from ww w. j a v a 2 s . co m*/ if (response.code() != HttpURLConnection.HTTP_OK) { LOGGER.warn("fetch(%s): unexpected response: %d", ruleKey, response.code()); return CacheResult.MISS; } // The hash code shipped with the artifact to/from the cache. HashCode expectedHashCode, actualHashCode; // Setup a temporary file, which sits next to the destination, to write to and // make sure all parent dirs exist. Path path = file.toPath(); projectFilesystem.createParentDirs(path); Path temp = projectFilesystem.createTempFile(path.getParent(), path.getFileName().toString(), ".tmp"); // Open the stream to server just long enough to read the hash code and artifact. try (DataInputStream input = new DataInputStream(response.body().byteStream())) { // First, extract the size of the file data portion, which we put in the beginning of // the artifact. long length = input.readLong(); // Now, write the remaining response data to the temp file, while grabbing the hash. try (BoundedInputStream boundedInput = new BoundedInputStream(input, length); HashingInputStream hashingInput = new HashingInputStream(hashFunction, boundedInput); OutputStream output = projectFilesystem.newFileOutputStream(temp)) { ByteStreams.copy(hashingInput, output); actualHashCode = hashingInput.hash(); } // Lastly, extract the hash code from the end of the request data. byte[] hashCodeBytes = new byte[hashFunction.bits() / Byte.SIZE]; ByteStreams.readFully(input, hashCodeBytes); expectedHashCode = HashCode.fromBytes(hashCodeBytes); // We should be at the end of output -- verify this. Also, we could just try to read a // single byte here, instead of all remaining input, but some network stack implementations // require that we exhaust the input stream before the connection can be reusable. try (OutputStream output = ByteStreams.nullOutputStream()) { if (ByteStreams.copy(input, output) != 0) { LOGGER.warn("fetch(%s): unexpected end of input", ruleKey); return CacheResult.MISS; } } } // Now form the checksum on the file we got and compare it to the checksum form the // the HTTP header. If it's incorrect, log this and return a miss. if (!expectedHashCode.equals(actualHashCode)) { LOGGER.warn("fetch(%s): artifact had invalid checksum", ruleKey); projectFilesystem.deleteFileAtPath(temp); return CacheResult.MISS; } // Finally, move the temp file into it's final place. projectFilesystem.move(temp, path, StandardCopyOption.REPLACE_EXISTING); LOGGER.info("fetch(%s): cache hit", ruleKey); return CacheResult.HTTP_HIT; }