List of usage examples for java.io File setLastModified
public boolean setLastModified(long time)
From source file:org.jenkinsci.modules.optpluginhelper.PluginHelper.java
/** * Refreshes the list of plugins that should be loaded. This will re-examine the full list of plugins provided * by all the {@link PluginSource} extensions and filter them through all the {@link PluginWrapperFilter} * extensions to see if there are any plugins that can be installed. An attempt will be made to dynamically load * the plugins./*from w w w. ja va2s . co m*/ * * @return {@code true} if a restart is required to complete activation, {@code false} if either nothing changed * or the additional plugins were successfully dynamically loaded. */ public boolean refresh() { final Jenkins jenkins = Jenkins.getInstance(); if (jenkins == null) { return false; } PluginManager pm = jenkins.getPluginManager(); PluginStrategy ps = pm.getPluginStrategy(); // now figure out which plugins are included LOGGER.log(Level.FINE, "Enumerating available optional plugins and filtering to determine set for activation"); Map<PluginWrapper, File> wrapperToFile = new HashMap<PluginWrapper, File>(); Map<PluginWrapper, PluginWrapperFilter.Decision> wrapperToDecision = new HashMap<PluginWrapper, PluginWrapperFilter.Decision>(); for (File plugin : listPlugins()) { try { PluginWrapper wrapper = ps.createPluginWrapper(plugin); final PluginWrapper existing = pm.getPlugin(wrapper.getShortName()); if (existing != null && (existing.isEnabled() || existing.isActive()) && !(wrapper.getVersionNumber().isNewerThan(existing.getVersionNumber()))) { LOGGER.log(Level.FINER, "Excluding {0} version {1} as version {2} is already installed", new Object[] { wrapper.getShortName(), wrapper.getVersion(), existing.getVersion() }); continue; } final PluginWrapperFilter.Decision decision = PluginWrapperFilter.decide(wrapper, plugin); if (decision == PluginWrapperFilter.Decision.EXCLUDE) { LOGGER.log(Level.FINER, "Excluding {0} version {1} based on decision from filters", new Object[] { wrapper.getShortName(), wrapper.getVersion() }); } else { wrapperToFile.put(wrapper, plugin); wrapperToDecision.put(wrapper, decision); } } catch (IOException e) { LOGGER.log(Level.WARNING, "IO exception processing " + plugin, e); } } LOGGER.log(Level.FINE, "Initial filtered set determined: {0}", wrapperToDecision); // now any non-optional dependencies of an included plugin get upped to included boolean changed = true; while (changed) { changed = false; Set<String> upscale = new HashSet<String>(); for (Map.Entry<PluginWrapper, PluginWrapperFilter.Decision> entry : wrapperToDecision.entrySet()) { if (entry.getValue() != PluginWrapperFilter.Decision.INCLUDE) { continue; } for (PluginWrapper.Dependency d : entry.getKey().getDependencies()) { // we need all non-optional dependencies upscale.add(d.shortName); } for (PluginWrapper.Dependency d : entry.getKey().getOptionalDependencies()) { // we only need optional dependencies if they are already installed and are an incompatible version final PluginWrapper existing = pm.getPlugin(d.shortName); if (existing != null && (existing.isEnabled() || existing.isActive())) { if (existing.isOlderThan(new VersionNumber(d.version))) { upscale.add(d.shortName); } } } } for (Map.Entry<PluginWrapper, PluginWrapperFilter.Decision> entry : wrapperToDecision.entrySet()) { if (entry.getValue() == PluginWrapperFilter.Decision.INCLUDE) { continue; } if (upscale.contains(entry.getKey().getShortName())) { changed = true; entry.setValue(PluginWrapperFilter.Decision.INCLUDE); } } } for (Iterator<Map.Entry<PluginWrapper, PluginWrapperFilter.Decision>> iterator = wrapperToDecision .entrySet().iterator(); iterator.hasNext();) { Map.Entry<PluginWrapper, PluginWrapperFilter.Decision> entry = iterator.next(); if (entry.getValue() == PluginWrapperFilter.Decision.INCLUDE) { continue; } wrapperToFile.remove(entry.getKey()); iterator.remove(); } LOGGER.log(Level.FINE, "After adding required dependencies: {0}", wrapperToDecision.keySet()); if (wrapperToFile.isEmpty()) { // bail early if the list is empty LOGGER.log(Level.FINE, "No new optional plugins to install"); return false; } LOGGER.log(Level.FINE, "Checking if dynamic loading of plugins is possible..."); boolean cannotDynamicLoad = false; for (PluginWrapper wrapper : wrapperToFile.keySet()) { final PluginWrapper existing = pm.getPlugin(wrapper.getShortName()); if (existing != null && (existing.isActive() || existing.isEnabled()) && !existing.isPinned()) { LOGGER.log(Level.INFO, "Cannot dynamically load optional plugins because {0} is already installed", existing.getShortName()); cannotDynamicLoad = true; } else if (YesNoMaybe.NO == wrapper.supportsDynamicLoad()) { LOGGER.log(Level.INFO, "Cannot dynamically load optional plugins because {0} does not support dynamic load", wrapper.getShortName()); cannotDynamicLoad = true; } } Map<String, VersionNumber> finalVersions = new HashMap<String, VersionNumber>(); // start with the active/enabled plugins that are currently installed for (PluginWrapper w : pm.getPlugins()) { if (w.isActive() || w.isEnabled()) { finalVersions.put(w.getShortName(), w.getVersionNumber()); } } // now add any new versions for (PluginWrapper w : wrapperToFile.keySet()) { VersionNumber existing = finalVersions.get(w.getShortName()); if (existing == null || w.getVersionNumber().isNewerThan(existing)) { finalVersions.put(w.getShortName(), w.getVersionNumber()); } } LOGGER.log(Level.FINE, "Expected final plugin version map: {0}", finalVersions); Set<String> pluginsToEnable = new HashSet<String>(); for (PluginWrapper w : wrapperToFile.keySet()) { LOGGER.log(Level.FINE, "Checking if {0} can be enabled, i.e. all dependencies can be satisfied", w.getShortName()); boolean missingDependency = false; for (PluginWrapper.Dependency d : w.getDependencies()) { VersionNumber v = finalVersions.get(d.shortName); if (v == null || v.isOlderThan(new VersionNumber(d.version))) { missingDependency = true; LOGGER.log(Level.FINER, "{0} is missing a dependency on {1} version {2}", new Object[] { w.getShortName(), d.shortName, d.version }); } } for (PluginWrapper.Dependency d : w.getOptionalDependencies()) { VersionNumber v = finalVersions.get(d.shortName); if (v != null && v.isOlderThan(new VersionNumber(d.version))) { missingDependency = true; LOGGER.log(Level.FINER, "{0} is missing a dependency on {1} version {2}", new Object[] { w.getShortName(), d.shortName, d.version }); } } if (missingDependency) { LOGGER.log(Level.FINE, "{0} cannot be enabled due to missing dependencies", w.getShortName()); } else { LOGGER.log(Level.FINE, "{0} can be enabled", w.getShortName()); pluginsToEnable.add(w.getShortName()); } } Map<String, File> newPlugins = new HashMap<String, File>(); for (Map.Entry<PluginWrapper, File> entry : wrapperToFile.entrySet()) { final String shortName = entry.getKey().getShortName(); final PluginWrapper existing = pm.getPlugin(shortName); final PluginWrapper proposed = entry.getKey(); if (existing != null && existing.isActive()) { if (existing.getVersionNumber().equals(proposed.getVersionNumber())) { LOGGER.log(Level.FINE, "Ignoring installing plugin {0} as current version is desired", shortName); // ignore as we are fine continue; } if (existing.getVersionNumber().isNewerThan(proposed.getVersionNumber())) { LOGGER.log(Level.INFO, "Ignoring installing plugin {0} as current version {1} is newer that bundled " + "version {2}", new Object[] { shortName, existing.getVersion(), proposed.getVersion() }); continue; } if (existing.isPinned()) { LOGGER.log(Level.INFO, "Ignoring installing plugin {0} as it is pinned. You might want to unpin this plugin.", new Object[] { shortName }); continue; } LOGGER.log(Level.INFO, "Restart required as plugin {0} is already installed", shortName); cannotDynamicLoad = true; } String fileName = shortName + ".jpi"; String legacyName = fileName.replace(".jpi", ".hpi"); File file = new File(pm.rootDir, fileName); File pinFile = new File(pm.rootDir, fileName + ".pinned"); File disableFile = new File(pm.rootDir, fileName + ".disabled"); // normalization first, if the old file exists. try { rename(new File(pm.rootDir, legacyName), file); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not move legacy %s.hpi to %s.jpi", shortName, shortName), e); } try { rename(new File(pm.rootDir, legacyName + ".pinned"), pinFile); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not move legacy %s.hpi.pinned to %s.jpi.pinned", shortName, shortName), e); } try { rename(new File(pm.rootDir, legacyName + ".disabled"), disableFile); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not move legacy %s.hpi.disabled to %s.jpi.disabled", shortName, shortName), e); } // update file if: // - no file exists today // - bundled version and current version differs (by timestamp), and the file isn't pinned. final long lastModified = entry.getValue().lastModified(); if (!file.exists() || (file.lastModified() != lastModified && !pinFile.exists())) { try { FileUtils.copyFile(entry.getValue(), file); if (lastModified != -1 && !file.setLastModified(lastModified)) { LOGGER.log(Level.WARNING, "Could not set last modified timestamp on {0}.jpi", shortName); } // lastModified is set for two reasons: // - to avoid unpacking as much as possible, but still do it on both upgrade and downgrade // - to make sure the value is not changed after each restart, so we can avoid // unpacking the plugin itself in ClassicPluginStrategy.explode newPlugins.put(shortName, file); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not write %s.jpi", shortName), e); } } if (!pluginsToEnable.contains(shortName)) { try { new FileOutputStream(disableFile).close(); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Could not flag %s as a disabled plugin", shortName), e); } } } if (cannotDynamicLoad) { return true; } LOGGER.log(Level.FINE, "Sorting plugins to determine loading order..."); // now we need to sort plugins and try and dynamically load them final List<PluginWrapper> plugins = new ArrayList<PluginWrapper>(newPlugins.size()); for (File p : newPlugins.values()) { try { plugins.add(ps.createPluginWrapper(p)); } catch (IOException e) { LOGGER.log(Level.WARNING, "IO exception processing " + p, e); cannotDynamicLoad = true; } } if (cannotDynamicLoad) { return true; } CyclicGraphDetector<PluginWrapper> cgd = new CyclicGraphDetector<PluginWrapper>() { @Override protected List<PluginWrapper> getEdges(PluginWrapper p) { List<PluginWrapper> next = new ArrayList<PluginWrapper>(); addTo(p.getDependencies(), next); addTo(p.getOptionalDependencies(), next); return next; } private void addTo(List<PluginWrapper.Dependency> dependencies, List<PluginWrapper> r) { for (PluginWrapper.Dependency d : dependencies) { for (PluginWrapper p : plugins) { if (p.getShortName().equals(d.shortName)) { r.add(p); } } } } }; try { cgd.run(plugins); } catch (CyclicGraphDetector.CycleDetectedException e) { LOGGER.log(Level.WARNING, "Cyclic reference detected amongst bundled plugins: " + plugins, e); cannotDynamicLoad = true; } LOGGER.log(Level.FINE, "Sorted plugin load order: {0}", cgd.getSorted()); LOGGER.log(Level.INFO, "Starting dynamic loading of optional bundled plugins"); for (PluginWrapper plugin : cgd.getSorted()) { File archive = newPlugins.get(plugin.getShortName()); if (archive == null) { // cannot happen, we put only plugins from newPlugins into the list and sorting should never // add, so the sorting should be a 1:1 mapping. We have this NPE check for safety only. continue; } try { pm.dynamicLoad(archive); } catch (IOException e) { LOGGER.log(Level.WARNING, String.format("Failed to dynamic load plugin %s version %s", plugin.getShortName(), plugin.getVersion()), e); cannotDynamicLoad = true; break; } catch (InterruptedException e) { LOGGER.log(Level.WARNING, String.format("Interrupted while trying to dynamic load plugin %s version %s", plugin.getShortName(), plugin.getVersion()), e); cannotDynamicLoad = true; break; } catch (RestartRequiredException e) { LOGGER.log(Level.WARNING, String.format("Plugin %s version %s does not support dynamic loading", plugin.getShortName(), plugin.getVersion()), e); cannotDynamicLoad = true; break; } } LOGGER.log(Level.INFO, "Finished dynamic loading of optional bundled plugins, restart required {0}", cannotDynamicLoad); return cannotDynamicLoad; }
From source file:org.latticesoft.util.common.FileUtil.java
private static void copyFileBytes(File fSrc, File fTgt) { if (fSrc == null || fTgt == null) { return;//from www .ja v a 2 s . c o m } if (fSrc.isDirectory() || fTgt.isDirectory()) { return; } InputStream is = null; OutputStream os = null; byte[] b = new byte[BUFFER_SIZE]; long len = fSrc.length(); int cnt = (int) len / (BUFFER_SIZE); int readCnt = 0; boolean failedLastPart = false; try { is = new FileInputStream(fSrc); os = new FileOutputStream(fTgt); for (readCnt = 0; readCnt < cnt; readCnt++) { is.read(b); os.write(b); } /* int readSize = readCnt*BUFFER_SIZE; int finalChuckSize = (int)len-readSize; try { is.read(b, readSize, finalChuckSize); os.write(b, 0, finalChuckSize); } catch (Exception e) { failedLastPart = true; }//*/ failedLastPart = true; if (failedLastPart) { int data = is.read(); while (data > -1) { os.write(data); data = is.read(); } } } catch (Exception e) { if (log.isErrorEnabled()) { log.error("Copy fail", e); } } finally { try { is.close(); } catch (Exception e) { } try { os.close(); } catch (Exception e) { } try { fTgt.setLastModified(fSrc.lastModified()); } catch (Exception e) { } } }
From source file:org.jvnet.hudson.update_center.MavenRepositoryImpl.java
/** * Loads a remote repository index (.zip or .gz), convert it to Lucene index and return it. *///from ww w .j av a2s .c o m private File loadIndex(String id, URL url) throws IOException, UnsupportedExistingLuceneIndexException { File dir = new File(new File(System.getProperty("java.io.tmpdir")), "maven-index/" + id); File local = new File(dir, "index" + getExtension(url)); File expanded = new File(dir, "expanded"); URLConnection con = url.openConnection(); if (url.getUserInfo() != null) { con.setRequestProperty("Authorization", "Basic " + new sun.misc.BASE64Encoder().encode(url.getUserInfo().getBytes())); } if (!expanded.exists() || !local.exists() || (local.lastModified() != con.getLastModified() && !offlineIndex)) { System.out.println("Downloading " + url); // if the download fail in the middle, only leave a broken tmp file dir.mkdirs(); File tmp = new File(dir, "index_" + getExtension(url)); FileOutputStream o = new FileOutputStream(tmp); IOUtils.copy(con.getInputStream(), o); o.close(); if (expanded.exists()) FileUtils.deleteDirectory(expanded); expanded.mkdirs(); if (url.toExternalForm().endsWith(".gz")) { System.out.println("Reconstructing index from " + url); FSDirectory directory = FSDirectory.getDirectory(expanded); NexusIndexWriter w = new NexusIndexWriter(directory, new NexusAnalyzer(), true); FileInputStream in = new FileInputStream(tmp); try { IndexDataReader dr = new IndexDataReader(in); IndexDataReadResult result = dr.readIndex(w, new DefaultIndexingContext(id, id, null, expanded, null, null, NexusIndexer.DEFAULT_INDEX, true)); } finally { IndexUtils.close(w); IOUtils.closeQuietly(in); directory.close(); } } else if (url.toExternalForm().endsWith(".zip")) { Expand e = new Expand(); e.setSrc(tmp); e.setDest(expanded); e.execute(); } else { throw new UnsupportedOperationException("Unsupported index format: " + url); } // as a proof that the expansion was properly completed tmp.renameTo(local); local.setLastModified(con.getLastModified()); } else { System.out.println("Reusing the locally cached " + url + " at " + local); } return expanded; }
From source file:io.sloeber.core.managers.Manager.java
public static IStatus extract(ArchiveInputStream in, File destFolder, int stripPath, boolean overwrite, IProgressMonitor pMonitor) throws IOException, InterruptedException { // Folders timestamps must be set at the end of archive extraction // (because creating a file in a folder alters the folder's timestamp) Map<File, Long> foldersTimestamps = new HashMap<>(); String pathPrefix = ""; //$NON-NLS-1$ Map<File, File> hardLinks = new HashMap<>(); Map<File, Integer> hardLinksMode = new HashMap<>(); Map<File, String> symLinks = new HashMap<>(); Map<File, Long> symLinksModifiedTimes = new HashMap<>(); // Cycle through all the archive entries while (true) { ArchiveEntry entry = in.getNextEntry(); if (entry == null) { break; }//from www . j av a 2s . c om // Extract entry info long size = entry.getSize(); String name = entry.getName(); boolean isDirectory = entry.isDirectory(); boolean isLink = false; boolean isSymLink = false; String linkName = null; Integer mode = null; Long modifiedTime = new Long(entry.getLastModifiedDate().getTime()); pMonitor.subTask("Processing " + name); //$NON-NLS-1$ { // Skip MacOSX metadata // http://superuser.com/questions/61185/why-do-i-get-files-like-foo-in-my-tarball-on-os-x int slash = name.lastIndexOf('/'); if (slash == -1) { if (name.startsWith("._")) { //$NON-NLS-1$ continue; } } else { if (name.substring(slash + 1).startsWith("._")) { //$NON-NLS-1$ continue; } } } // Skip git metadata // http://www.unix.com/unix-for-dummies-questions-and-answers/124958-file-pax_global_header-means-what.html if (name.contains("pax_global_header")) { //$NON-NLS-1$ continue; } if (entry instanceof TarArchiveEntry) { TarArchiveEntry tarEntry = (TarArchiveEntry) entry; mode = new Integer(tarEntry.getMode()); isLink = tarEntry.isLink(); isSymLink = tarEntry.isSymbolicLink(); linkName = tarEntry.getLinkName(); } // On the first archive entry, if requested, detect the common path // prefix to be stripped from filenames int localstripPath = stripPath; if (localstripPath > 0 && pathPrefix.isEmpty()) { int slash = 0; while (localstripPath > 0) { slash = name.indexOf("/", slash); //$NON-NLS-1$ if (slash == -1) { throw new IOException(Messages.Manager_no_single_root_folder); } slash++; localstripPath--; } pathPrefix = name.substring(0, slash); } // Strip the common path prefix when requested if (!name.startsWith(pathPrefix)) { throw new IOException(Messages.Manager_no_single_root_folder_while_file + name + Messages.Manager_is_outside + pathPrefix); } name = name.substring(pathPrefix.length()); if (name.isEmpty()) { continue; } File outputFile = new File(destFolder, name); File outputLinkedFile = null; if (isLink && linkName != null) { if (!linkName.startsWith(pathPrefix)) { throw new IOException(Messages.Manager_no_single_root_folder_while_file + linkName + Messages.Manager_is_outside + pathPrefix); } linkName = linkName.substring(pathPrefix.length()); outputLinkedFile = new File(destFolder, linkName); } if (isSymLink) { // Symbolic links are referenced with relative paths outputLinkedFile = new File(linkName); if (outputLinkedFile.isAbsolute()) { System.err.println(Messages.Manager_Warning_file + outputFile + Messages.Manager_links_to_absolute_path + outputLinkedFile); System.err.println(); } } // Safety check if (isDirectory) { if (outputFile.isFile() && !overwrite) { throw new IOException( Messages.Manager_Cant_create_folder + outputFile + Messages.Manager_File_exists); } } else { // - isLink // - isSymLink // - anything else if (outputFile.exists() && !overwrite) { throw new IOException( Messages.Manager_Cant_extract_file + outputFile + Messages.Manager_File_already_exists); } } // Extract the entry if (isDirectory) { if (!outputFile.exists() && !outputFile.mkdirs()) { throw new IOException(Messages.Manager_Cant_create_folder + outputFile); } foldersTimestamps.put(outputFile, modifiedTime); } else if (isLink) { hardLinks.put(outputFile, outputLinkedFile); hardLinksMode.put(outputFile, mode); } else if (isSymLink) { symLinks.put(outputFile, linkName); symLinksModifiedTimes.put(outputFile, modifiedTime); } else { // Create the containing folder if not exists if (!outputFile.getParentFile().isDirectory()) { outputFile.getParentFile().mkdirs(); } copyStreamToFile(in, size, outputFile); outputFile.setLastModified(modifiedTime.longValue()); } // Set file/folder permission if (mode != null && !isSymLink && outputFile.exists()) { chmod(outputFile, mode.intValue()); } } for (Map.Entry<File, File> entry : hardLinks.entrySet()) { if (entry.getKey().exists() && overwrite) { entry.getKey().delete(); } link(entry.getValue(), entry.getKey()); Integer mode = hardLinksMode.get(entry.getKey()); if (mode != null) { chmod(entry.getKey(), mode.intValue()); } } for (Map.Entry<File, String> entry : symLinks.entrySet()) { if (entry.getKey().exists() && overwrite) { entry.getKey().delete(); } symlink(entry.getValue(), entry.getKey()); entry.getKey().setLastModified(symLinksModifiedTimes.get(entry.getKey()).longValue()); } // Set folders timestamps for (Map.Entry<File, Long> entry : foldersTimestamps.entrySet()) { entry.getKey().setLastModified(entry.getValue().longValue()); } return Status.OK_STATUS; }
From source file:hudson.FilePath.java
private void unzip(File dir, InputStream in) throws IOException { dir = dir.getAbsoluteFile(); // without absolutization, getParentFile below seems to fail ZipInputStream zip = new ZipInputStream(new BufferedInputStream(in)); java.util.zip.ZipEntry e;//from ww w . j a v a 2 s . co m try { while ((e = zip.getNextEntry()) != null) { File f = new File(dir, e.getName()); if (e.isDirectory()) { f.mkdirs(); } else { File p = f.getParentFile(); if (p != null) p.mkdirs(); FileOutputStream out = new FileOutputStream(f); try { IOUtils.copy(zip, out); } finally { out.close(); } f.setLastModified(e.getTime()); zip.closeEntry(); } } } finally { zip.close(); } }
From source file:nl.opengeogroep.filesetsync.client.FilesetSyncer.java
/** * Removes files which are locally up-to-date from the list of files to * transfer. Updates lastModified date.//w w w . ja va2 s.c o m */ private void compareFilesetList() throws IOException { MutableLong hashTime = new MutableLong(); long hashBytes = 0; long startTime = System.currentTimeMillis(); long progressTime = startTime; int processed = 0; int newerLocalFiles = 0; boolean setLastModifiedToServer = "true".equals(fs.getProperty("setLastModifiedToServer")); for (int index = 0; index < fileList.size(); index++) { FileRecord fr = fileList.get(index); if (Shutdown.isHappening()) { return; } File localFile; if (fileList.size() == 1 && fr.getType() == TYPE_FILE) { localFile = new File(fs.getLocal()); } else { localFile = new File(fs.getLocal() + File.separator + fr.getName()); } if (fr.getType() == TYPE_DIRECTORY && localFile.exists()) { if (!localFile.isDirectory()) { log.error("Local file in is the way for remote directory: " + localFile.getCanonicalPath()); } if (fr.getLastModified() != localFile.lastModified()) { log.trace(String.format("later updating last modified for directory %s", localFile.getCanonicalPath())); directoriesLastModifiedTimes.add(Pair.of(localFile, fr.getLastModified())); } fileList.set(index, null); alreadyLocal++; } if (fr.getType() == TYPE_FILE && localFile.exists()) { if (!localFile.isFile()) { log.error("Local non-file is in the way for remote file: " + localFile.getCanonicalPath()); } if (fs.isHash()) { try { String hash = FileRecord.calculateHash(localFile, hashTime); //localFilesByHash.put(hash, localFile.getCanonicalPath()); hashBytes += localFile.length(); if (hash.equals(fr.getHash())) { if (log.isTraceEnabled()) { log.trace("Same hash for " + fr.getName()); } if (fr.getLastModified() > localFile.lastModified()) { if (log.isTraceEnabled()) { log.trace("Same hash, updating last modified for " + fr.getName()); } localFile.setLastModified(fr.getLastModified()); } fileList.set(index, null); alreadyLocal++; } else { if (log.isTraceEnabled()) { log.trace("Hash mismatch for " + fr.getName()); } } } catch (Exception e) { log.error("Error hashing " + localFile.getCanonicalPath() + ": " + ExceptionUtils.getMessage(e)); } } else { if (fr.getLastModified() > localFile.lastModified()) { if (log.isTraceEnabled()) { log.trace("Remote file newer: " + fr.getName()); } } else if (fr.getLastModified() < localFile.lastModified()) { if (setLastModifiedToServer) { localFile.setLastModified(fr.getLastModified()); } else { if (log.isTraceEnabled()) { log.trace(String.format( "Keeping local file last modified at %s, later than remote file at %s: ", dateToString(new Date(localFile.lastModified())), dateToString(new Date(fr.getLastModified())), fr.getName())); } } newerLocalFiles++; fileList.set(index, null); alreadyLocal++; } else { if (log.isTraceEnabled()) { log.trace("Local file unmodified: " + fr.getName()); } fileList.set(index, null); alreadyLocal++; } } } processed++; long time = System.currentTimeMillis(); if (time - progressTime > 30000) { log.info(String.format("Still comparing files, processed %d files", processed)); progressTime = time; } } // TODO: if file in file list already in localFilesByHash OR, remove them // Also remove duplicate hashes in fileList String hashInfo; if (fs.isHash()) { hashInfo = String.format(", hashed hashed %d KB, hash speed %s", hashBytes / 1024, (hashTime.getValue() < 100 ? "n/a" : Math.round(hashBytes / 1024.0 / (hashTime.getValue() / 1000.0)) + " KB/s")); } else { hashInfo = ""; } log.info(String.format("Compared file list to local files in %s, %d files up-to-date%s", DurationFormatUtils.formatDurationWords(System.currentTimeMillis() - startTime, true, false), alreadyLocal, hashInfo)); if (newerLocalFiles != 0) { log.warn(String.format( "Not overwriting %d local files with newer local last modified date compared to files on server", newerLocalFiles)); } }
From source file:madkitgroupextension.export.Export.java
public static void updateFTP(FTPClient ftpClient, String _directory_dst, File _directory_src, File _current_file_transfert) throws IOException, TransfertException { ftpClient.changeWorkingDirectory("./"); FTPListParseEngine ftplpe = ftpClient.initiateListParsing(_directory_dst); FTPFile files[] = ftplpe.getFiles(); File current_file_transfert = _current_file_transfert; try {/*from ww w .ja v a2 s . com*/ for (File f : _directory_src.listFiles()) { if (f.isDirectory()) { if (!f.getName().equals("./") && !f.getName().equals("../")) { if (_current_file_transfert != null) { if (!_current_file_transfert.getCanonicalPath().startsWith(f.getCanonicalPath())) continue; else _current_file_transfert = null; } boolean found = false; for (FTPFile ff : files) { if (f.getName().equals(ff.getName())) { if (ff.isFile()) { ftpClient.deleteFile(_directory_dst + ff.getName()); } else found = true; break; } } if (!found) { ftpClient.changeWorkingDirectory("./"); if (!ftpClient.makeDirectory(_directory_dst + f.getName() + "/")) System.err.println( "Impossible to create directory " + _directory_dst + f.getName() + "/"); } updateFTP(ftpClient, _directory_dst + f.getName() + "/", f, _current_file_transfert); } } else { if (_current_file_transfert != null) { if (!_current_file_transfert.equals(f.getCanonicalPath())) continue; else _current_file_transfert = null; } current_file_transfert = _current_file_transfert; FTPFile found = null; for (FTPFile ff : files) { if (f.getName().equals(ff.getName())) { if (ff.isDirectory()) { FileTools.removeDirectory(ftpClient, _directory_dst + ff.getName()); } else found = ff; break; } } if (found == null || (found.getTimestamp().getTimeInMillis() - f.lastModified()) < 0 || found.getSize() != f.length()) { FileInputStream fis = new FileInputStream(f); ftpClient.setFileType(FTP.BINARY_FILE_TYPE); if (!ftpClient.storeFile(_directory_dst + f.getName(), fis)) System.err.println("Impossible to send file: " + _directory_dst + f.getName()); fis.close(); for (FTPFile ff : ftplpe.getFiles()) { if (f.getName().equals(ff.getName())) { f.setLastModified(ff.getTimestamp().getTimeInMillis()); break; } } } } } } catch (IOException e) { throw new TransfertException(current_file_transfert, null, e); } for (FTPFile ff : files) { if (!ff.getName().equals(".") && !ff.getName().equals("..")) { boolean found = false; for (File f : _directory_src.listFiles()) { if (f.getName().equals(ff.getName()) && f.isDirectory() == ff.isDirectory()) { found = true; break; } } if (!found) { if (ff.isDirectory()) { FileTools.removeDirectory(ftpClient, _directory_dst + ff.getName()); } else { ftpClient.deleteFile(_directory_dst + ff.getName()); } } } } }
From source file:org.apache.cocoon.servlet.CocoonServlet.java
private File extractLibraries() { try {//from w w w.j a v a2 s. co m URL manifestURL = this.servletContext.getResource("/META-INF/MANIFEST.MF"); if (manifestURL == null) { this.getLogger().fatalError("Unable to get Manifest"); return null; } Manifest mf = new Manifest(manifestURL.openStream()); Attributes attr = mf.getMainAttributes(); String libValue = attr.getValue("Cocoon-Libs"); if (libValue == null) { this.getLogger().fatalError("Unable to get 'Cocoon-Libs' attribute from the Manifest"); return null; } List libList = new ArrayList(); for (StringTokenizer st = new StringTokenizer(libValue, " "); st.hasMoreTokens();) { libList.add(st.nextToken()); } File root = new File(this.workDir, "lib"); root.mkdirs(); File[] oldLibs = root.listFiles(); for (int i = 0; i < oldLibs.length; i++) { String oldLib = oldLibs[i].getName(); if (!libList.contains(oldLib)) { this.getLogger().debug("Removing old library " + oldLibs[i]); oldLibs[i].delete(); } } this.getLogger().warn("Extracting libraries into " + root); byte[] buffer = new byte[65536]; for (Iterator i = libList.iterator(); i.hasNext();) { String libName = (String) i.next(); long lastModified = -1; try { lastModified = Long.parseLong(attr.getValue("Cocoon-Lib-" + libName.replace('.', '_'))); } catch (Exception e) { this.getLogger().debug("Failed to parse lastModified: " + attr.getValue("Cocoon-Lib-" + libName.replace('.', '_'))); } File lib = new File(root, libName); if (lib.exists() && lib.lastModified() != lastModified) { this.getLogger().debug("Removing modified library " + lib); lib.delete(); } InputStream is = null; OutputStream os = null; try { is = this.servletContext.getResourceAsStream("/WEB-INF/lib/" + libName); if (is != null) { this.getLogger().debug("Extracting " + libName); os = new FileOutputStream(lib); int count; while ((count = is.read(buffer)) > 0) { os.write(buffer, 0, count); } } else { this.getLogger().warn("Skipping " + libName); } } finally { if (os != null) os.close(); if (is != null) is.close(); } if (lastModified != -1) { lib.setLastModified(lastModified); } } return root; } catch (IOException e) { this.getLogger().fatalError("Exception while processing Manifest file", e); return null; } }
From source file:org.codehaus.mojo.webstart.JnlpMojo.java
/** * //from ww w . j a v a2 s .co m * @param relativeName * @throws IOException */ private void signJar(File jarFile, String relativeName) throws MojoExecutionException, IOException { getLog().debug("signing: " + relativeName); JarSignMojo signJar = new JarSignMojo(); signJar.setSkipAttachSignedArtifact(true); signJar.setLog(getLog()); signJar.setAlias(sign.getAlias()); signJar.setBasedir(basedir); signJar.setKeypass(sign.getKeypass()); signJar.setKeystore(sign.getKeystore()); signJar.setSigFile(sign.getSigfile()); signJar.setStorepass(sign.getStorepass()); signJar.setType(sign.getStoretype()); signJar.setVerbose(this.verbose); signJar.setWorkingDir(getWorkDirectory()); signJar.setTsa(sign.getTsa()); // we do our own verification because the jarsignmojo doesn't pass // the log object, to the jarsignverifymojo, so lot signJar.setVerify(false); signJar.setJarPath(jarFile); File signedJar = new File(jarFile.getParentFile(), jarFile.getName() + ".signed"); // If the signedJar is set to null then the jar is signed // in place. signJar.setSignedJar(signedJar); long lastModified = jarFile.lastModified(); signJar.execute(); FileUtils.rename(signedJar, jarFile); jarFile.setLastModified(lastModified); }
From source file:org.opencms.test.OpenCmsTestCase.java
/** * Copies the configuration files from the given folder to the "config" folder. * //from w ww. ja va 2s . c om * @param newConfig the folder with the configuration files to copy */ private static void copyConfiguration(String newConfig) { File configDir = new File( getTestDataPath("WEB-INF" + File.separatorChar + CmsSystemInfo.FOLDER_CONFIG_DEFAULT)); File configOriDir = new File(newConfig); FileFilter filter = FileFilterUtils.orFileFilter(FileFilterUtils.suffixFileFilter(".xml"), FileFilterUtils.suffixFileFilter(".properties")); if (configOriDir.exists()) { File[] oriFiles = configOriDir.listFiles(filter); boolean initConfigDates = false; if (m_dateConfigFiles == null) { m_dateConfigFiles = new long[oriFiles.length]; initConfigDates = true; } for (int i = 0; i < oriFiles.length; i++) { File source = oriFiles[i]; if (source.isFile()) { // only copy files String sourceName = source.getAbsolutePath(); File target = new File(configDir, source.getName()); if (initConfigDates) { m_dateConfigFiles[i] = target.lastModified(); } String targetName = target.getAbsolutePath(); try { CmsFileUtil.copy(sourceName, targetName); target.setLastModified(m_dateConfigFiles[i]); } catch (IOException e) { e.printStackTrace(); } } } } }