List of usage examples for java.nio.file Files readSymbolicLink
public static Path readSymbolicLink(Path link) throws IOException
From source file:org.apache.storm.localizer.LocalizedResource.java
@Override public void cleanupOrphanedData() throws IOException { //There are a few possible files that we would want to clean up //baseDir + "/" + "_tmp_" + baseName //baseDir + "/" + "_tmp_" + baseName + ".current" //baseDir + "/" + baseName.<VERSION> //baseDir + "/" + baseName.current //baseDir + "/" + baseName.version //In general we always want to delete the _tmp_ files if they are there. Path tmpOutput = tmpOutputLocation(); Files.deleteIfExists(tmpOutput); Path tmpSym = tmpSymlinkLocation(); Files.deleteIfExists(tmpSym); try {/*ww w . j a v a2 s . c o m*/ String baseName = getKey(); long version = getLocalVersion(); Path current = getCurrentSymlinkPath(); //If .current and .version do not match, we roll back the .version file to match // what .current is pointing to. if (Files.exists(current) && Files.isSymbolicLink(current)) { Path versionFile = Files.readSymbolicLink(current); Matcher m = VERSION_FILE_PATTERN.matcher(versionFile.getFileName().toString()); if (m.matches()) { long foundVersion = Long.valueOf(m.group(2)); if (foundVersion != version) { LOG.error("{} does not match the version file so fix the version file", current); //The versions are different so roll back to whatever current is try (PrintWriter restoreWriter = new PrintWriter( new BufferedWriter(new FileWriter(versionFilePath.toFile(), false)))) { restoreWriter.println(foundVersion); } version = foundVersion; } } } // Finally delete any baseName.<VERSION> files that are not pointed to by the current version final long finalVersion = version; LOG.debug("Looking to clean up after {} in {}", getKey(), baseDir); try (DirectoryStream<Path> ds = fsOps.newDirectoryStream(baseDir, (path) -> { Matcher m = VERSION_FILE_PATTERN.matcher(path.getFileName().toString()); if (m.matches()) { long foundVersion = Long.valueOf(m.group(2)); return m.group(1).equals(baseName) && foundVersion != finalVersion; } return false; })) { for (Path p : ds) { LOG.info("Cleaning up old localized resource file {}", p); if (Files.isDirectory(p)) { FileUtils.deleteDirectory(p.toFile()); } else { fsOps.deleteIfExists(p.toFile()); } } } } catch (NoSuchFileException e) { LOG.warn("Nothing to cleanup with badeDir {} even though we expected there to be something there", baseDir); } }
From source file:org.panbox.desktop.common.vfs.PanboxFSLinux.java
public synchronized void readlink(final String path, final ByteBuffer buffer, final long size) throws IOException { try {/*from www . j a v a 2 s. co m*/ // TODO: Here, we parse 3 times for the Share that manages the File String fullpath = FilenameUtils.getFullPath(path); String sharePath = FilenameUtils.normalize(fullpath.substring(0, fullpath.indexOf('/', 1) + 1)); VirtualFile vpath = getVirtualFileForFileName(path); String target = Files.readSymbolicLink(vpath.getFile().toPath()).toString(); String shareloc = FilenameUtils.concat(fullpath, target.toString()); if (FilenameUtils.directoryContains(sharePath, shareloc) || FilenameUtils.equals(sharePath, shareloc)) { SecretKey sk = backingStorage.getObfuscationKeyForFile(shareloc); Obfuscator obfuscator = backingStorage.getObfuscator(shareloc); // create deobfuscated symlink target String[] targetparts = target.split("/"); StringBuffer res = new StringBuffer(); if (target.startsWith(File.separator)) { res.append(File.separator); } for (int i = 0; i < targetparts.length; i++) { String cur = targetparts[i]; if (cur.equals(".") || cur.equals("..")) { res.append(cur); } else { // append obfuscated part of path res.append(obfuscator.deObfuscate(cur, sk)); } // append intermediary separators if (i < targetparts.length - 1) { res.append(File.separator); } } if (target.endsWith(File.separator)) { res.append(File.separator); } byte[] ret = res.toString().getBytes(); int realsize = Math.min(ret.length, (int) size); buffer.put(ret, 0, realsize); logger.debug("readline, Link : " + path + ", Target: " + res.toString()); } else { throw new IOException("Symlinks outside of shares are not supported."); } } catch (ObfuscationException e) { throw new IOException("Error deobfuscating symlink contents!", e); } }
From source file:com.android.repository.util.InstallerUtilTest.java
public void testUnzip() throws Exception { if (new MockFileOp().isWindows()) { // can't run on windows. return;/*from www . ja v a 2 s . co m*/ } // zip needs a real file, so no MockFileOp for us. FileOp fop = FileOpUtils.create(); Path root = Files.createTempDirectory("InstallerUtilTest"); Path outRoot = Files.createTempDirectory("InstallerUtilTest"); try { Path file1 = root.resolve("foo"); Files.write(file1, "content".getBytes()); Path dir1 = root.resolve("bar"); Files.createDirectories(dir1); Path file2 = dir1.resolve("baz"); Files.write(file2, "content2".getBytes()); Files.createSymbolicLink(root.resolve("link1"), dir1); Files.createSymbolicLink(root.resolve("link2"), file2); Path outZip = outRoot.resolve("out.zip"); try (ZipArchiveOutputStream out = new ZipArchiveOutputStream(outZip.toFile()); Stream<Path> listing = Files.walk(root)) { listing.forEach(path -> { try { ZipArchiveEntry archiveEntry = (ZipArchiveEntry) out.createArchiveEntry(path.toFile(), root.relativize(path).toString()); out.putArchiveEntry(archiveEntry); if (Files.isSymbolicLink(path)) { archiveEntry.setUnixMode(UnixStat.LINK_FLAG | archiveEntry.getUnixMode()); out.write(path.getParent().relativize(Files.readSymbolicLink(path)).toString() .getBytes()); } else if (!Files.isDirectory(path)) { out.write(Files.readAllBytes(path)); } out.closeArchiveEntry(); } catch (Exception e) { fail(); } }); } Path unzipped = outRoot.resolve("unzipped"); Files.createDirectories(unzipped); InstallerUtil.unzip(outZip.toFile(), unzipped.toFile(), fop, 1, new FakeProgressIndicator()); assertEquals("content", new String(Files.readAllBytes(unzipped.resolve("foo")))); Path resultDir = unzipped.resolve("bar"); Path resultFile2 = resultDir.resolve("baz"); assertEquals("content2", new String(Files.readAllBytes(resultFile2))); Path resultLink = unzipped.resolve("link1"); assertTrue(Files.isDirectory(resultLink)); assertTrue(Files.isSymbolicLink(resultLink)); assertTrue(Files.isSameFile(resultLink, resultDir)); Path resultLink2 = unzipped.resolve("link2"); assertEquals("content2", new String(Files.readAllBytes(resultLink2))); assertTrue(Files.isSymbolicLink(resultLink2)); assertTrue(Files.isSameFile(resultLink2, resultFile2)); } finally { fop.deleteFileOrFolder(root.toFile()); fop.deleteFileOrFolder(outRoot.toFile()); } }
From source file:org.opendedup.sdfs.filestore.cloud.BatchAwsS3ChunkStore.java
@Override public void uploadFile(File f, String to, String pp) throws IOException { this.s3clientLock.readLock().lock(); try {/* w ww.j av a2 s. c om*/ InputStream in = null; while (to.startsWith(File.separator)) to = to.substring(1); String pth = pp + "/" + EncyptUtils.encString(to, Main.chunkStoreEncryptionEnabled); SDFSLogger.getLog().info("uploading " + f.getPath() + " to " + to + " pth " + pth); boolean isDir = false; boolean isSymlink = false; if (!OSValidator.isWindows()) { isDir = Files.readAttributes(f.toPath(), PosixFileAttributes.class, LinkOption.NOFOLLOW_LINKS) .isDirectory(); isSymlink = Files.readAttributes(f.toPath(), PosixFileAttributes.class, LinkOption.NOFOLLOW_LINKS) .isSymbolicLink(); } else { isDir = f.isDirectory(); } if (isSymlink) { try { HashMap<String, String> metaData = new HashMap<String, String>(); metaData.put("encrypt", Boolean.toString(Main.chunkStoreEncryptionEnabled)); metaData.put("lastmodified", Long.toString(f.lastModified())); String slp = EncyptUtils.encString(Files.readSymbolicLink(f.toPath()).toFile().getPath(), Main.chunkStoreEncryptionEnabled); metaData.put("symlink", slp); ObjectMetadata md = new ObjectMetadata(); md.setContentType("binary/octet-stream"); md.setContentLength(pth.getBytes().length); md.setUserMetadata(metaData); PutObjectRequest req = new PutObjectRequest(this.name, pth, new ByteArrayInputStream(pth.getBytes()), md); s3Service.putObject(req); if (this.isClustered()) this.checkoutFile(pth); } catch (Exception e1) { throw new IOException(e1); } } else if (isDir) { HashMap<String, String> metaData = FileUtils.getFileMetaData(f, Main.chunkStoreEncryptionEnabled); metaData.put("encrypt", Boolean.toString(Main.chunkStoreEncryptionEnabled)); metaData.put("lastmodified", Long.toString(f.lastModified())); metaData.put("directory", "true"); ObjectMetadata md = new ObjectMetadata(); md.setContentType("binary/octet-stream"); md.setContentLength(pth.getBytes().length); md.setUserMetadata(metaData); try { PutObjectRequest req = new PutObjectRequest(this.name, pth, new ByteArrayInputStream(pth.getBytes()), md); s3Service.putObject(req); if (this.isClustered()) this.checkoutFile(pth); } catch (Exception e1) { SDFSLogger.getLog().error("error uploading", e1); throw new IOException(e1); } } else { String rnd = RandomGUID.getGuid(); File p = new File(this.staged_sync_location, rnd); File z = new File(this.staged_sync_location, rnd + ".z"); File e = new File(this.staged_sync_location, rnd + ".e"); while (z.exists()) { rnd = RandomGUID.getGuid(); p = new File(this.staged_sync_location, rnd); z = new File(this.staged_sync_location, rnd + ".z"); e = new File(this.staged_sync_location, rnd + ".e"); } try { BufferedInputStream is = new BufferedInputStream(new FileInputStream(f)); BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(p)); IOUtils.copy(is, os); os.flush(); os.close(); is.close(); if (Main.compress) { CompressionUtils.compressFile(p, z); p.delete(); p = z; } byte[] ivb = null; if (Main.chunkStoreEncryptionEnabled) { try { ivb = PassPhrase.getByteIV(); EncryptUtils.encryptFile(p, e, new IvParameterSpec(ivb)); } catch (Exception e1) { throw new IOException(e1); } p.delete(); p = e; } String objName = pth; ObjectMetadata md = new ObjectMetadata(); Map<String, String> umd = FileUtils.getFileMetaData(f, Main.chunkStoreEncryptionEnabled); md.setUserMetadata(umd); md.addUserMetadata("lz4compress", Boolean.toString(Main.compress)); md.addUserMetadata("encrypt", Boolean.toString(Main.chunkStoreEncryptionEnabled)); if (ivb != null) md.addUserMetadata("ivspec", BaseEncoding.base64().encode(ivb)); md.addUserMetadata("lastmodified", Long.toString(f.lastModified())); if (simpleS3) { md.setContentType("binary/octet-stream"); in = new BufferedInputStream(new FileInputStream(p), 32768); try { if (md5sum) { byte[] md5Hash = ServiceUtils.computeMD5Hash(in); in.close(); String mds = BaseEncoding.base64().encode(md5Hash); md.setContentMD5(mds); md.addUserMetadata("md5sum", mds); } } catch (NoSuchAlgorithmException e2) { SDFSLogger.getLog().error("while hashing", e2); throw new IOException(e2); } in = new FileInputStream(p); md.setContentLength(p.length()); try { PutObjectRequest req = new PutObjectRequest(this.name, objName, in, md); s3Service.putObject(req); if (this.isClustered()) this.checkoutFile(pth); SDFSLogger.getLog().debug( "uploaded=" + f.getPath() + " lm=" + md.getUserMetadata().get("lastmodified")); } catch (AmazonS3Exception e1) { if (e1.getStatusCode() == 409) { try { s3Service.deleteObject(this.name, objName); this.uploadFile(f, to, pp); return; } catch (Exception e2) { throw new IOException(e2); } } else { throw new IOException(e1); } } catch (Exception e1) { // SDFSLogger.getLog().error("error uploading", e1); throw new IOException(e1); } } else { try { md.setContentType("binary/octet-stream"); in = new BufferedInputStream(new FileInputStream(p), 32768); byte[] md5Hash = ServiceUtils.computeMD5Hash(in); in.close(); String mds = BaseEncoding.base64().encode(md5Hash); md.setContentMD5(mds); md.addUserMetadata("md5sum", mds); in = new BufferedInputStream(new FileInputStream(p), 32768); md.setContentLength(p.length()); PutObjectRequest req = new PutObjectRequest(this.name, objName, in, md); multiPartUpload(req); if (this.isClustered()) this.checkoutFile(pth); } catch (AmazonS3Exception e1) { if (e1.getStatusCode() == 409) { try { s3Service.deleteObject(this.name, objName); this.uploadFile(f, to, pp); return; } catch (Exception e2) { throw new IOException(e2); } } else { throw new IOException(e1); } } catch (Exception e1) { // SDFSLogger.getLog().error("error uploading", e1); throw new IOException(e1); } } } finally { try { if (in != null) in.close(); } finally { p.delete(); z.delete(); e.delete(); } } } } finally { this.s3clientLock.readLock().unlock(); } }