List of usage examples for java.io File getCanonicalFile
public File getCanonicalFile() throws IOException
From source file:org.apache.tools.ant.taskdefs.optional.net.FTP.java
/** * auto find the time difference between local and remote * @param ftp handle to ftp client/*from w ww.java2 s. c om*/ * @return number of millis to add to remote time to make it comparable to local time * @since ant 1.6 */ private long getTimeDiff(FTPClient ftp) { long returnValue = 0; File tempFile = findFileName(ftp); try { // create a local temporary file FILE_UTILS.createNewFile(tempFile); long localTimeStamp = tempFile.lastModified(); BufferedInputStream instream = new BufferedInputStream(new FileInputStream(tempFile)); ftp.storeFile(tempFile.getName(), instream); instream.close(); boolean success = FTPReply.isPositiveCompletion(ftp.getReplyCode()); if (success) { FTPFile[] ftpFiles = ftp.listFiles(tempFile.getName()); if (ftpFiles.length == 1) { long remoteTimeStamp = ftpFiles[0].getTimestamp().getTime().getTime(); returnValue = localTimeStamp - remoteTimeStamp; } ftp.deleteFile(ftpFiles[0].getName()); } // delegate the deletion of the local temp file to the delete task // because of race conditions occurring on Windows Delete mydelete = new Delete(); mydelete.bindToOwner(this); mydelete.setFile(tempFile.getCanonicalFile()); mydelete.execute(); } catch (Exception e) { throw new BuildException(e, getLocation()); } return returnValue; }
From source file:com.github.carlosrubio.org.apache.tools.ant.taskdefs.optional.net.FTP.java
/** * auto find the time difference between local and remote * @param ftp handle to ftp client//w w w .j a v a 2 s .c om * @return number of millis to add to remote time to make it comparable to local time * @since ant 1.6 */ private long getTimeDiff(FTPClient ftp) { long returnValue = 0; File tempFile = findFileName(ftp); try { // create a local temporary file FILE_UTILS.createNewFile(tempFile); long localTimeStamp = tempFile.lastModified(); BufferedInputStream instream = new BufferedInputStream(new FileInputStream(tempFile)); ftp.storeFile(tempFile.getName(), instream); instream.close(); boolean success = FTPReply.isPositiveCompletion(ftp.getReplyCode()); if (success) { FTPFile[] ftpFiles = ftp.listFiles(tempFile.getName()); if (ftpFiles.length == 1) { long remoteTimeStamp = ftpFiles[0].getTimestamp().getTime().getTime(); returnValue = localTimeStamp - remoteTimeStamp; } ftp.deleteFile(ftpFiles[0].getName()); } // delegate the deletion of the local temp file to the delete task // because of race conditions occuring on Windows Delete mydelete = new Delete(); mydelete.bindToOwner(this); mydelete.setFile(tempFile.getCanonicalFile()); mydelete.execute(); } catch (Exception e) { throw new BuildException(e, getLocation()); } return returnValue; }
From source file:com.kodemore.utility.Kmu.java
/** * Ensure that the specified path exists. If it does not exist then attempt * to create it. Return true if the path was successfully created. * The entire path is assumed to be a directory. *//*w ww . ja va 2 s . com*/ public static boolean createFolder(File f) { try { f = f.getCanonicalFile(); if (f.exists()) return true; File p = f.getParentFile(); if (!createFolder(p)) return false; return f.mkdir(); } catch (IOException ex) { throw toRuntime(ex); } }
From source file:edu.ku.brc.specify.extras.ViewToSchemaReview.java
/** * //from www. j a va2s . c o m */ public static void dumpFormFieldList(final boolean doShowInBrowser) { List<ViewIFace> viewList = ((SpecifyAppContextMgr) AppContextMgr.getInstance()).getEntirelyAllViews(); Hashtable<String, ViewIFace> hash = new Hashtable<String, ViewIFace>(); for (ViewIFace view : viewList) { hash.put(view.getName(), view); } Vector<String> names = new Vector<String>(hash.keySet()); Collections.sort(names); try { File file = new File("FormFields.html"); PrintWriter pw = new PrintWriter(file); pw.println( "<HTML><HEAD><TITLE>Form Fields</TITLE><link rel=\"stylesheet\" href=\"http://specify6.specifysoftware.org/schema/specify6.css\" type=\"text/css\"/></HEAD><BODY>"); pw.println("<center>"); pw.println("<H2>Forms and Fields</H2>"); pw.println("<center><table class=\"brdr\" border=\"0\" cellspacing=\"0\">"); int formCnt = 0; int fieldCnt = 0; for (String name : names) { ViewIFace view = hash.get(name); boolean hasEdit = false; for (AltViewIFace altView : view.getAltViews()) { if (altView.getMode() != AltViewIFace.CreationMode.EDIT) { hasEdit = true; break; } } //int numViews = view.getAltViews().size(); for (AltViewIFace altView : view.getAltViews()) { //AltView av = (AltView)altView; if ((hasEdit && altView.getMode() == AltViewIFace.CreationMode.VIEW)) { ViewDefIFace vd = altView.getViewDef(); if (vd instanceof FormViewDef) { formCnt++; FormViewDef fvd = (FormViewDef) vd; pw.println("<tr><td class=\"brdrodd\">"); pw.println(fvd.getName()); pw.println("</td></tr>"); int r = 1; for (FormRowIFace fri : fvd.getRows()) { FormRow fr = (FormRow) fri; for (FormCellIFace cell : fr.getCells()) { if (StringUtils.isNotEmpty(cell.getName())) { if (cell.getType() == FormCellIFace.CellType.panel) { FormCellPanelIFace panelCell = (FormCellPanelIFace) cell; for (String fieldName : panelCell.getFieldNames()) { pw.print("<tr><td "); pw.print("class=\""); pw.print(r % 2 == 0 ? "brdrodd" : "brdreven"); pw.print("\"> " + fieldName); pw.println("</td></tr>"); fieldCnt++; } } else if (cell.getType() == FormCellIFace.CellType.field || cell.getType() == FormCellIFace.CellType.subview) { pw.print("<tr><td "); pw.print("class=\""); pw.print(r % 2 == 0 ? "brdrodd" : "brdreven"); pw.print("\"> " + cell.getName()); pw.println("</td></tr>"); fieldCnt++; } } } } } } } } pw.println("</table></center><br>"); pw.println("Number of Forms: " + formCnt + "<br>"); pw.println("Number of Fields: " + fieldCnt + "<br>"); pw.println("</body></html>"); pw.close(); try { if (doShowInBrowser) { AttachmentUtils.openURI(file.toURI()); } else { JOptionPane.showMessageDialog(getTopWindow(), String.format(getResourceString("FormDisplayer.OUTPUT"), file.getCanonicalFile())); } } catch (Exception ex) { ex.printStackTrace(); } } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.kodemore.utility.Kmu.java
/** * Check the case of a file name.//from ww w .java 2s . c o m */ public static boolean checkFileNameCase(String path) { try { File f = new File(path); String a = f.getName(); String b = f.getCanonicalFile().getName(); return isEqual(a, b); } catch (Exception ex) { return false; } }
From source file:org.servalproject.maps.osmbboxsplit.BBoxSplit.java
/** * read an OSM PBF file and optionally write an osmosis script using the supplied template * //from w w w . j a v a 2s .co m * @param inputFile path to the input file * @param outputDir path to the output directory * @param template the contents of the template file * @param minFileSize the minimum file size, in MB, of files to process * @throws IOException if the specified file cannot be read */ public static void readFile(File inputFile, File outputDir, String template, int minFileSize) throws IOException { // check the parameters try { if (FileUtils.isFileAccessible(inputFile.getCanonicalPath()) == false) { throw new IOException("unable to access the required file"); } System.out.println("Processing file: " + inputFile.getCanonicalPath()); } catch (IOException e) { throw new IOException("unable to access the required file", e); } // check to see if this file should be ignored if (OsmBBoxSplit.ignoreList.size() > 0) { if (OsmBBoxSplit.ignoreList.contains(inputFile.getCanonicalPath())) { System.out.println("WARNING: File specified in the ignore list, skipping..."); return; } } // read the data in the file BlockInputStream blockinput; BinaryDataParser dataParser = new BinaryDataParser(); try { blockinput = (new BlockInputStream(new FileInputStream(inputFile), dataParser)); } catch (FileNotFoundException e) { throw new IOException("unable to access the required file", e); } // output some information try { blockinput.process(); System.out.println("File Size: " + FileUtils.humanReadableByteCount(inputFile.length(), true)); } catch (IOException e) { throw new IOException("unable to process the required file", e); } finally { blockinput.close(); } // determine if we need to split the file if (inputFile.length() >= (minFileSize * 1000 * 1000)) { // file is over the minimum file size so try to split double minLat = dataParser.getGeoCoordinates()[0]; double minLng = dataParser.getGeoCoordinates()[1]; double maxLat = dataParser.getGeoCoordinates()[2]; double maxLng = dataParser.getGeoCoordinates()[3]; // boolean latOk = false; // boolean lngOk = false; // // // check to make sure that the all of the lats and longs are of the same size // if((minLat < 0 && maxLat < 0) || (minLat > 0 && maxLat > 0)) { // latOk = true; // } // // if((minLng < 0 && maxLng < 0) || (minLng > 0 && maxLng > 0)) { // lngOk = true; // } // // if(!latOk || !lngOk) { // System.out.println("Error: bounding box spans equator or prime meridian, can't split"); // return; // } // // // calculate the differences // double diffLat = (maxLat - minLat) / 2; // double diffLng = (maxLng - minLng) / 2; // // // calculate the new bounding boxes // double newLat = minLat + diffLat; // double newLng = minLng + diffLng; // calculate the new latitude and longitude double newLat = (minLat + maxLat) / 2; double newLng = (minLng + maxLng) / 2; // output the new definitions System.out.println("BBox A lat/lng: " + newLat + ", " + minLng + " - " + maxLat + ", " + newLng); System.out .println("URL: " + String.format(BinaryDataParser.URL_FORMAT, minLng, newLat, newLng, maxLat)); System.out.println("BBox B lat/lng: " + newLat + ", " + newLng + " - " + maxLat + ", " + maxLng); System.out .println("URL: " + String.format(BinaryDataParser.URL_FORMAT, newLng, newLat, maxLng, maxLat)); System.out.println("BBox C lat/lng: " + minLat + ", " + minLng + " - " + newLat + ", " + newLng); System.out .println("URL: " + String.format(BinaryDataParser.URL_FORMAT, minLng, minLat, newLng, newLat)); System.out.println("BBox D lat/lng: " + minLat + ", " + newLng + " - " + newLat + ", " + maxLng); System.out .println("URL: " + String.format(BinaryDataParser.URL_FORMAT, newLng, minLat, maxLng, newLat)); // create a new script if (template != null) { String scriptContents = new String(template); // add missing information scriptContents = scriptContents.replace("{{INPUT_PATH}}", inputFile.getCanonicalPath()); scriptContents = scriptContents.replace("{{OUTPUT_PATH}}", inputFile.getCanonicalFile().getParent()); //replace all of the a quadrant variables scriptContents = scriptContents.replace("{{BBOX_A_BOTTOM}}", Double.toString(newLat)); scriptContents = scriptContents.replace("{{BBOX_A_LEFT}}", Double.toString(minLng)); scriptContents = scriptContents.replace("{{BBOX_A_TOP}}", Double.toString(maxLat)); scriptContents = scriptContents.replace("{{BBOX_A_RIGHT}}", Double.toString(newLng)); scriptContents = scriptContents.replace("{{BBOX_A_FILE}}", inputFile.getName().replace(".osm.pbf", "_a.osm.pbf")); scriptContents = scriptContents.replace("{{BBOX_B_BOTTOM}}", Double.toString(newLat)); scriptContents = scriptContents.replace("{{BBOX_B_LEFT}}", Double.toString(newLng)); scriptContents = scriptContents.replace("{{BBOX_B_TOP}}", Double.toString(maxLat)); scriptContents = scriptContents.replace("{{BBOX_B_RIGHT}}", Double.toString(maxLng)); scriptContents = scriptContents.replace("{{BBOX_B_FILE}}", inputFile.getName().replace(".osm.pbf", "_b.osm.pbf")); scriptContents = scriptContents.replace("{{BBOX_C_BOTTOM}}", Double.toString(minLat)); scriptContents = scriptContents.replace("{{BBOX_C_LEFT}}", Double.toString(minLng)); scriptContents = scriptContents.replace("{{BBOX_C_TOP}}", Double.toString(newLat)); scriptContents = scriptContents.replace("{{BBOX_C_RIGHT}}", Double.toString(newLng)); scriptContents = scriptContents.replace("{{BBOX_C_FILE}}", inputFile.getName().replace(".osm.pbf", "_c.osm.pbf")); scriptContents = scriptContents.replace("{{BBOX_D_BOTTOM}}", Double.toString(minLat)); scriptContents = scriptContents.replace("{{BBOX_D_LEFT}}", Double.toString(newLng)); scriptContents = scriptContents.replace("{{BBOX_D_TOP}}", Double.toString(newLat)); scriptContents = scriptContents.replace("{{BBOX_D_RIGHT}}", Double.toString(maxLng)); scriptContents = scriptContents.replace("{{BBOX_D_FILE}}", inputFile.getName().replace(".osm.pbf", "_d.osm.pbf")); // write the file try { File newFile = new File( outputDir.getCanonicalPath() + File.separator + inputFile.getName() + ".sh"); System.out.println("writing script file:\n" + newFile.getCanonicalPath()); org.apache.commons.io.FileUtils.writeStringToFile(newFile, scriptContents); newFile.setExecutable(true); } catch (IOException e) { throw new IOException("unable to write the script file.", e); } } } }
From source file:jenkins.model.Jenkins.java
private synchronized TaskBuilder loadTasks() throws IOException { File projectsDir = new File(root, "jobs"); if (!projectsDir.getCanonicalFile().isDirectory() && !projectsDir.mkdirs()) { if (projectsDir.exists()) throw new IOException(projectsDir + " is not a directory"); throw new IOException("Unable to create " + projectsDir + "\nPermission issue? Please create this directory manually."); }//from w w w . ja v a2 s .c o m File[] subdirs = projectsDir.listFiles(); final Set<String> loadedNames = Collections.synchronizedSet(new HashSet<String>()); TaskGraphBuilder g = new TaskGraphBuilder(); Handle loadJenkins = g.requires(EXTENSIONS_AUGMENTED).attains(JOB_LOADED).add("Loading global config", new Executable() { public void run(Reactor session) throws Exception { XmlFile cfg = getConfigFile(); if (cfg.exists()) { // reset some data that may not exist in the disk file // so that we can take a proper compensation action later. primaryView = null; views.clear(); // load from disk cfg.unmarshal(Jenkins.this); } // if we are loading old data that doesn't have this field if (slaves != null && !slaves.isEmpty() && nodes.isLegacy()) { nodes.setNodes(slaves); slaves = null; } else { nodes.load(); } clouds.setOwner(Jenkins.this); } }); for (final File subdir : subdirs) { g.requires(loadJenkins).attains(JOB_LOADED).notFatal().add("Loading job " + subdir.getName(), new Executable() { public void run(Reactor session) throws Exception { if (!Items.getConfigFile(subdir).exists()) { //Does not have job config file, so it is not a jenkins job hence skip it return; } TopLevelItem item = (TopLevelItem) Items.load(Jenkins.this, subdir); items.put(item.getName(), item); loadedNames.add(item.getName()); } }); } g.requires(JOB_LOADED).add("Cleaning up old builds", new Executable() { public void run(Reactor reactor) throws Exception { // anything we didn't load from disk, throw them away. // doing this after loading from disk allows newly loaded items // to inspect what already existed in memory (in case of reloading) // retainAll doesn't work well because of CopyOnWriteMap implementation, so remove one by one // hopefully there shouldn't be too many of them. for (String name : items.keySet()) { if (!loadedNames.contains(name)) items.remove(name); } } }); g.requires(JOB_LOADED).add("Finalizing set up", new Executable() { public void run(Reactor session) throws Exception { rebuildDependencyGraph(); {// recompute label objects - populates the labels mapping. for (Node slave : nodes.getNodes()) // Note that not all labels are visible until the slaves have connected. slave.getAssignedLabels(); getAssignedLabels(); } // initialize views by inserting the default view if necessary // this is both for clean Jenkins and for backward compatibility. if (views.size() == 0 || primaryView == null) { View v = new AllView(Messages.Hudson_ViewName()); setViewOwner(v); views.add(0, v); primaryView = v.getViewName(); } if (useSecurity != null && !useSecurity) { // forced reset to the unsecure mode. // this works as an escape hatch for people who locked themselves out. authorizationStrategy = AuthorizationStrategy.UNSECURED; setSecurityRealm(SecurityRealm.NO_AUTHENTICATION); } else { // read in old data that doesn't have the security field set if (authorizationStrategy == null) { if (useSecurity == null) authorizationStrategy = AuthorizationStrategy.UNSECURED; else authorizationStrategy = new LegacyAuthorizationStrategy(); } if (securityRealm == null) { if (useSecurity == null) setSecurityRealm(SecurityRealm.NO_AUTHENTICATION); else setSecurityRealm(new LegacySecurityRealm()); } else { // force the set to proxy setSecurityRealm(securityRealm); } } // Initialize the filter with the crumb issuer setCrumbIssuer(crumbIssuer); // auto register root actions for (Action a : getExtensionList(RootAction.class)) if (!actions.contains(a)) actions.add(a); } }); return g; }
From source file:com.facebook.buck.util.unarchive.Unzip.java
private void writeZipContents(ZipFile zip, ZipArchiveEntry entry, ProjectFilesystem filesystem, Path target) throws IOException { // Write file try (InputStream is = zip.getInputStream(entry)) { if (entry.isUnixSymlink()) { filesystem.createSymLink(target, filesystem.getPath(new String(ByteStreams.toByteArray(is), Charsets.UTF_8)), /* force */ true); } else {/*from www . ja v a 2s.c o m*/ try (OutputStream out = filesystem.newFileOutputStream(target)) { ByteStreams.copy(is, out); } } } Path filePath = filesystem.resolve(target); File file = filePath.toFile(); // restore mtime for the file file.setLastModified(entry.getTime()); // TODO(simons): Implement what the comment below says we should do. // // Sets the file permissions of the output file given the information in {@code entry}'s // extra data field. According to the docs at // http://www.opensource.apple.com/source/zip/zip-6/unzip/unzip/proginfo/extra.fld there // are two extensions that might support file permissions: Acorn and ASi UNIX. We shall // assume that inputs are not from an Acorn SparkFS. The relevant section from the docs: // // <pre> // The following is the layout of the ASi extra block for Unix. The // local-header and central-header versions are identical. // (Last Revision 19960916) // // Value Size Description // ----- ---- ----------- // (Unix3) 0x756e Short tag for this extra block type ("nu") // TSize Short total data size for this block // CRC Long CRC-32 of the remaining data // Mode Short file permissions // SizDev Long symlink'd size OR major/minor dev num // UID Short user ID // GID Short group ID // (var.) variable symbolic link filename // // Mode is the standard Unix st_mode field from struct stat, containing // user/group/other permissions, setuid/setgid and symlink info, etc. // </pre> // // From the stat man page, we see that the following mask values are defined for the file // permissions component of the st_mode field: // // <pre> // S_ISUID 0004000 set-user-ID bit // S_ISGID 0002000 set-group-ID bit (see below) // S_ISVTX 0001000 sticky bit (see below) // // S_IRWXU 00700 mask for file owner permissions // // S_IRUSR 00400 owner has read permission // S_IWUSR 00200 owner has write permission // S_IXUSR 00100 owner has execute permission // // S_IRWXG 00070 mask for group permissions // S_IRGRP 00040 group has read permission // S_IWGRP 00020 group has write permission // S_IXGRP 00010 group has execute permission // // S_IRWXO 00007 mask for permissions for others // (not in group) // S_IROTH 00004 others have read permission // S_IWOTH 00002 others have write permission // S_IXOTH 00001 others have execute permission // </pre> // // For the sake of our own sanity, we're going to assume that no-one is using symlinks, // but we'll check and throw if they are. // // Before we do anything, we should check the header ID. Pfft! // // Having jumped through all these hoops, it turns out that InfoZIP's "unzip" store the // values in the external file attributes of a zip entry (found in the zip's central // directory) assuming that the OS creating the zip was one of an enormous list that // includes UNIX but not Windows, it first searches for the extra fields, and if not found // falls through to a code path that supports MS-DOS and which stores the UNIX file // attributes in the upper 16 bits of the external attributes field. // // We'll support neither approach fully, but we encode whether this file was executable // via storing 0100 in the fields that are typically used by zip implementations to store // POSIX permissions. If we find it was executable, use the platform independent java // interface to make this unpacked file executable. Set<PosixFilePermission> permissions = MorePosixFilePermissions .fromMode(entry.getExternalAttributes() >> 16); if (permissions.contains(PosixFilePermission.OWNER_EXECUTE) && file.getCanonicalFile().exists()) { MostFiles.makeExecutable(filePath); } }
From source file:org.rhq.core.util.updater.DeployerCanonicalPathTest.java
public void testInitialDeployRawFilesWithCanonicalPaths() throws Exception { File tmpDirDest = FileUtil.createTempDirectory("DeployerCanonicalPathTest", ".dest", null); File tmpDirSrc = FileUtil.createTempDirectory("DeployerCanonicalPathTest", ".src", null); File rawFileRelativeDest = new File("dir-does-not-existA/../rawA.txt"); // relative to "tmpDirDest" that we just created above File rawFileRelativeDest2 = new File("dir-does-not-existA/../../rawA.txt"); // relative to "tmpDirDest" but it takes us above it File rawFileAbsoluteDest = new File(System.getProperty("java.io.tmpdir"), "dir-does-not-existB/../rawB.txt"); try {//from ww w.j a va 2 s.co m // put some source files in our tmpDirSrc location File testRawFileA = new File(tmpDirSrc, "updater-testA.txt"); File testRawFileA2 = new File(tmpDirSrc, "updater-testA2.txt"); File testRawFileB = new File(tmpDirSrc, "updater-testB.txt"); FileUtil.copyFile(new File("target/test-classes/updater-testA.txt"), testRawFileA); FileUtil.copyFile(new File("target/test-classes/updater-testA.txt"), testRawFileA2); FileUtil.copyFile(new File("target/test-classes/updater-testB.txt"), testRawFileB); DeploymentProperties deploymentProps = new DeploymentProperties(0, "testbundle", "1.0.test", null, DestinationComplianceMode.full); HashMap<File, File> zipFiles = null; Map<File, File> rawFiles = new HashMap<File, File>(3); rawFiles.put(testRawFileA, rawFileRelativeDest); // we will realize this one ... rawFiles.put(testRawFileA2, rawFileRelativeDest2); // and this one ... rawFiles.put(testRawFileB, rawFileAbsoluteDest); // and we will realize this one, too File destDir = tmpDirDest; Pattern ignoreRegex = null; Set<File> realizeRawFiles = new HashSet<File>(3); realizeRawFiles.add(testRawFileA); realizeRawFiles.add(testRawFileA2); realizeRawFiles.add(testRawFileB); DeploymentData dd = new DeploymentData(deploymentProps, tmpDirSrc, destDir, rawFiles, realizeRawFiles, zipFiles, null, templateEngine, ignoreRegex, null); Deployer deployer = new Deployer(dd); DeployDifferences diff = new DeployDifferences(); FileHashcodeMap map = deployer.deploy(diff); System.out.println("map-->\n" + map); System.out.println("diff->\n" + diff); String rawFileRelativeDestAbsolute = FileUtil .normalizePath(new File(tmpDirDest, rawFileRelativeDest.getPath())).getAbsolutePath(); String rawFileRelativeDestAbsolute2 = FileUtil .normalizePath(new File(tmpDirDest, rawFileRelativeDest2.getPath())).getAbsolutePath(); String rawFileAbsoluteDestAbsolute = FileUtil.normalizePath(rawFileAbsoluteDest).getAbsolutePath(); assert map.size() == 3 : map; assert map.containsKey("rawA.txt") : map; assert new File(rawFileRelativeDestAbsolute).exists(); assert new File(rawFileRelativeDestAbsolute2).exists(); assert MessageDigestGenerator.getDigestString(new File(rawFileRelativeDestAbsolute)) .equals(map.get("rawA.txt")); // rawFileRelativeDestAbsolute2 should be treated just like an absolute, external file assert MessageDigestGenerator.getDigestString(new File(rawFileRelativeDestAbsolute2)) .equals(map.get(rawFileRelativeDestAbsolute2)); assert !MessageDigestGenerator.getDigestString(testRawFileA) .equals(map.get("rawA.txt")) : "should have different hash, we realize this one!"; assert map.containsKey(rawFileAbsoluteDestAbsolute) : map; assert new File(rawFileAbsoluteDestAbsolute).exists(); assert MessageDigestGenerator.getDigestString(new File(rawFileAbsoluteDestAbsolute)) .equals(map.get(rawFileAbsoluteDestAbsolute)); assert !MessageDigestGenerator.getDigestString(testRawFileB).equals( map.get(rawFileAbsoluteDestAbsolute)) : "should have different hash, we realized this one"; assert diff.getAddedFiles().size() == 3 : diff; assert diff.getAddedFiles().contains(diff.convertPath("rawA.txt")) : diff; assert diff.getAddedFiles().contains(diff.convertPath(rawFileRelativeDestAbsolute2)) : diff; assert diff.getAddedFiles().contains(diff.convertPath(rawFileAbsoluteDestAbsolute)) : diff; assert diff.getRealizedFiles().size() == 3 : diff; assert diff.getRealizedFiles().keySet().contains(diff.convertPath("rawA.txt")) : diff; assert diff.getRealizedFiles().keySet().contains(diff.convertPath(rawFileRelativeDestAbsolute2)) : diff; assert diff.getRealizedFiles().keySet().contains(diff.convertPath(rawFileAbsoluteDestAbsolute)) : diff; } finally { FileUtil.purge(tmpDirDest, true); FileUtil.purge(tmpDirSrc, true); rawFileAbsoluteDest.getCanonicalFile().delete(); } }
From source file:com.google.bitcoin.core.Wallet.java
/** Saves the wallet first to the given temp file, then renames to the dest file. */ public void saveToFile(File temp, File destFile) throws IOException { FileOutputStream stream = null; lock.lock();/* w w w . j a v a 2 s .co m*/ try { stream = new FileOutputStream(temp); saveToFileStream(stream); // Attempt to force the bits to hit the disk. In reality the OS or hard disk itself may still decide // to not write through to physical media for at least a few seconds, but this is the best we can do. stream.flush(); stream.getFD().sync(); stream.close(); stream = null; if (Utils.isWindows()) { // Work around an issue on Windows whereby you can't rename over existing files. File canonical = destFile.getCanonicalFile(); canonical.delete(); if (temp.renameTo(canonical)) return; // else fall through. throw new IOException("Failed to rename " + temp + " to " + canonical); } else if (!temp.renameTo(destFile)) { throw new IOException("Failed to rename " + temp + " to " + destFile); } } catch (RuntimeException e) { log.error("Failed whilst saving wallet", e); throw e; } finally { lock.unlock(); if (stream != null) { stream.close(); } } }