List of usage examples for java.io File equals
public boolean equals(Object obj)
From source file:org.apache.maven.plugin.resources.remote.ProcessRemoteResourcesMojo.java
protected boolean copyResourceIfExists(File file, String relFileName, VelocityContext context) throws IOException, MojoExecutionException { for (Resource resource : resources) { File resourceDirectory = new File(resource.getDirectory()); if (!resourceDirectory.exists()) { continue; }// w w w. j a v a 2s .c om // TODO - really should use the resource includes/excludes and name mapping File source = new File(resourceDirectory, relFileName); File templateSource = new File(resourceDirectory, relFileName + TEMPLATE_SUFFIX); if (!source.exists() && templateSource.exists()) { source = templateSource; } if (source.exists() && !source.equals(file)) { if (source == templateSource) { Reader reader = null; Writer writer = null; DeferredFileOutputStream os = new DeferredFileOutputStream(velocityFilterInMemoryThreshold, file); try { if (encoding != null) { reader = new InputStreamReader(new FileInputStream(source), encoding); writer = new OutputStreamWriter(os, encoding); } else { reader = ReaderFactory.newPlatformReader(source); writer = WriterFactory.newPlatformWriter(os); } velocity.evaluate(context, writer, "", reader); } catch (ParseErrorException e) { throw new MojoExecutionException("Error rendering velocity resource: " + source, e); } catch (MethodInvocationException e) { throw new MojoExecutionException("Error rendering velocity resource: " + source, e); } catch (ResourceNotFoundException e) { throw new MojoExecutionException("Error rendering velocity resource: " + source, e); } finally { IOUtil.close(writer); IOUtil.close(reader); } fileWriteIfDiffers(os); } else if (resource.isFiltering()) { MavenFileFilterRequest req = setupRequest(resource, source, file); try { fileFilter.copyFile(req); } catch (MavenFilteringException e) { throw new MojoExecutionException("Error filtering resource: " + source, e); } } else { FileUtils.copyFile(source, file); } // exclude the original (so eclipse doesn't complain about duplicate resources) resource.addExclude(relFileName); return true; } } return false; }
From source file:org.atomserver.core.filestore.FileBasedContentStorage.java
private void cleanUpToCollection(EntryDescriptor descriptor, File cleanDir) throws IOException { // under no circumstances do we want to clean the collection directory. File stopDir = pathFromRoot(descriptor.getWorkspace(), descriptor.getCollection()); if (log.isTraceEnabled()) { log.trace("cleaning " + cleanDir + ", stopping at " + stopDir); }//w ww.ja v a 2s . c om // if the stop dir is not an ancestor of the clean dir, we are in trouble. if (!cleanDir.getAbsolutePath().startsWith(stopDir.getAbsolutePath())) { throw new AtomServerException("the directory to clean (" + cleanDir + ") is not " + "within the collection of the provided entry (" + descriptor + ")."); } // as long as we are underneath the stop dir, and we are pointing at a directory that has // no files at all, we should delete the directory and walk up to our parent. while (!cleanDir.equals(stopDir) && cleanDir.isDirectory() && cleanDir.listFiles().length == 0) { if (log.isTraceEnabled()) { log.trace("deleting empty directory " + cleanDir); } FileUtils.deleteDirectory(cleanDir); cleanDir = cleanDir.getParentFile(); } }
From source file:madkitgroupextension.export.Export.java
public static void updateFTP(FTPClient ftpClient, String _directory_dst, File _directory_src, File _current_file_transfert) throws IOException, TransfertException { ftpClient.changeWorkingDirectory("./"); FTPListParseEngine ftplpe = ftpClient.initiateListParsing(_directory_dst); FTPFile files[] = ftplpe.getFiles(); File current_file_transfert = _current_file_transfert; try {/* w ww. j ava 2 s . co m*/ for (File f : _directory_src.listFiles()) { if (f.isDirectory()) { if (!f.getName().equals("./") && !f.getName().equals("../")) { if (_current_file_transfert != null) { if (!_current_file_transfert.getCanonicalPath().startsWith(f.getCanonicalPath())) continue; else _current_file_transfert = null; } boolean found = false; for (FTPFile ff : files) { if (f.getName().equals(ff.getName())) { if (ff.isFile()) { ftpClient.deleteFile(_directory_dst + ff.getName()); } else found = true; break; } } if (!found) { ftpClient.changeWorkingDirectory("./"); if (!ftpClient.makeDirectory(_directory_dst + f.getName() + "/")) System.err.println( "Impossible to create directory " + _directory_dst + f.getName() + "/"); } updateFTP(ftpClient, _directory_dst + f.getName() + "/", f, _current_file_transfert); } } else { if (_current_file_transfert != null) { if (!_current_file_transfert.equals(f.getCanonicalPath())) continue; else _current_file_transfert = null; } current_file_transfert = _current_file_transfert; FTPFile found = null; for (FTPFile ff : files) { if (f.getName().equals(ff.getName())) { if (ff.isDirectory()) { FileTools.removeDirectory(ftpClient, _directory_dst + ff.getName()); } else found = ff; break; } } if (found == null || (found.getTimestamp().getTimeInMillis() - f.lastModified()) < 0 || found.getSize() != f.length()) { FileInputStream fis = new FileInputStream(f); ftpClient.setFileType(FTP.BINARY_FILE_TYPE); if (!ftpClient.storeFile(_directory_dst + f.getName(), fis)) System.err.println("Impossible to send file: " + _directory_dst + f.getName()); fis.close(); for (FTPFile ff : ftplpe.getFiles()) { if (f.getName().equals(ff.getName())) { f.setLastModified(ff.getTimestamp().getTimeInMillis()); break; } } } } } } catch (IOException e) { throw new TransfertException(current_file_transfert, null, e); } for (FTPFile ff : files) { if (!ff.getName().equals(".") && !ff.getName().equals("..")) { boolean found = false; for (File f : _directory_src.listFiles()) { if (f.getName().equals(ff.getName()) && f.isDirectory() == ff.isDirectory()) { found = true; break; } } if (!found) { if (ff.isDirectory()) { FileTools.removeDirectory(ftpClient, _directory_dst + ff.getName()); } else { ftpClient.deleteFile(_directory_dst + ff.getName()); } } } } }
From source file:org.commonjava.maven.ext.manip.io.PomIO.java
/** * Read {@link Model} instances by parsing the POM directly. This is useful to escape some post-processing that happens when the * {@link MavenProject#getOriginalModel()} instance is set. * * @param executionRoot the top level pom file. * @param peeked a collection of poms resolved from the top level file. * @return a collection of Projects//w ww .jav a2 s. com * @throws ManipulationException if an error occurs. */ private List<Project> readModelsForManipulation(File executionRoot, final List<PomPeek> peeked) throws ManipulationException { final List<Project> projects = new ArrayList<>(); for (final PomPeek peek : peeked) { final File pom = peek.getPom(); // Sucks, but we have to brute-force reading in the raw model. // The effective-model building, below, has a tantalizing getRawModel() // method on the result, BUT this seems to return models that have // the plugin versions set inside profiles...so they're not entirely // raw. Model raw = null; InputStream in = null; try { in = new FileInputStream(pom); raw = new MavenXpp3Reader().read(in); } catch (final IOException | XmlPullParserException e) { throw new ManipulationException("Failed to build model for POM: %s.\n--> %s", e, pom, e.getMessage()); } finally { closeQuietly(in); } if (raw == null) { continue; } final Project project = new Project(pom, raw); project.setInheritanceRoot(peek.isInheritanceRoot()); if (executionRoot.equals(pom)) { logger.debug( "Setting execution root to {} with file {}" + (project.isInheritanceRoot() ? " and is the inheritance root. " : ""), project, pom); project.setExecutionRoot(); try { if (FileUtils.readFileToString(pom).contains(MODIFIED_BY)) { project.setIncrementalPME(true); } } catch (final IOException e) { throw new ManipulationException("Failed to read POM: %s", e, pom); } } projects.add(project); } return projects; }
From source file:de.ailis.xadrian.frames.MainFrame.java
/** * Returns the complex editor for the specified file. If no complex editor * is open for this file then null is returned. * * @param file//w w w . j a va 2 s . c o m * The file * @return The complex editor or null if none is open for this file. */ public ComplexEditor getEditor(final File file) { for (int i = this.tabs.getTabCount() - 1; i >= 0; i -= 1) { final Component component = this.tabs.getComponentAt(i); if (!(component instanceof ComplexEditor)) continue; final ComplexEditor editor = (ComplexEditor) component; final File editorFile = editor.getFile(); if (editorFile == null) continue; try { if (file.getCanonicalFile().equals(editorFile.getCanonicalFile())) return editor; } catch (final IOException e) { if (file.equals(editorFile)) return editor; } } return null; }
From source file:com.vladium.emma.report.html.ReportGenerator.java
public void process(final IMetaData mdata, final ICoverageData cdata, final SourcePathCache cache, final IProperties properties) throws EMMARuntimeException { initialize(mdata, cdata, cache, properties); m_pageTitle = null;//from w w w.j ava 2s .co m m_footerBottom = null; File outDir = m_settings.getOutDir(); if ((outDir == null) /* this should never happen */ || (outDir .equals(new File(Property.getSystemProperty("user.dir", ""))))) { outDir = new File("coverage"); m_settings.setOutDir(outDir); } long start = 0, end; final boolean trace1 = m_log.atTRACE1(); if (trace1) start = System.currentTimeMillis(); { m_queue = new LinkedList(); m_reportIDNamespace = new IDGenerator(mdata.size()); for (m_queue.add(m_view.getRoot()); !m_queue.isEmpty();) { final IItem head = (IItem) m_queue.removeFirst(); head.accept(this, null); } m_reportIDNamespace = null; } if (trace1) { end = System.currentTimeMillis(); m_log.trace1("process", "[" + getType() + "] report generated in " + (end - start) + " ms"); } }
From source file:com.sikulix.core.SX.java
static void sxinit(String[] args) { if (null == sxInstance) { sxInstance = "SX INIT DONE"; //<editor-fold desc="*** shutdown hook"> Runtime.getRuntime().addShutdownHook(new Thread() { @Override/*from ww w . j a v a2 s .co m*/ public void run() { if (shouldLock && isSet(isRunningFile)) { try { isRunningFile.close(); } catch (IOException ex) { } } for (File f : getFile(getSYSTEMP()).listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { File aFile = new File(dir, name); boolean isObsolete = false; long lastTime = aFile.lastModified(); if (lastTime == 0) { return false; } if (lastTime < ((new Date().getTime()) - 7 * 24 * 60 * 60 * 1000)) { isObsolete = true; } if (name.contains("BridJExtractedLibraries") && isObsolete) { return true; } if (name.toLowerCase().contains("sikuli")) { if (name.contains("Sikulix_")) { if (isObsolete || aFile.equals(getFile(getSXTEMP()))) { return true; } } else { return true; } } return false; } })) { trace("cleanTemp: " + f.getName()); Content.deleteFileOrFolder("#" + f.getAbsolutePath()); } } }); //</editor-fold> // TODO Content class must be initialized for use in shutdown Content.start(); //<editor-fold desc="*** sx lock (not active)"> if (shouldLock) { File fLock = new File(getSYSTEMP(), "SikuliX2-i-s-r-u-n-n-i-n-g"); String shouldTerminate = ""; try { fLock.createNewFile(); isRunningFile = new FileOutputStream(fLock); if (isNull(isRunningFile.getChannel().tryLock())) { shouldTerminate = "SikuliX2 already running"; isRunningFile = null; } } catch (Exception ex) { shouldTerminate = "cannot access SX2 lock: " + ex.toString(); isRunningFile = null; } if (isSet(shouldTerminate)) { terminate(1, shouldTerminate); } } //</editor-fold> // *** command line args if (!isNull(args)) { checkArgs(args); } trace("!sxinit: entry"); // *** get SX options loadOptions(); // *** get the version info getSXVERSION(); // *** check how we are running sxRunningAs(); // *** get monitor setup globalGetMonitors(); //TODO i18n SXGlobal_sxinit_complete=complete %.3f trace("!sxinit: exit %.3f", (new Date().getTime() - startTime) / 1000.0f); } }
From source file:org.geoserver.config.GeoServerLoader.java
/** * Reads the legacy (1.x) catalog from disk. *//*w w w.j av a2s . c o m*/ Catalog readLegacyCatalog(File f, XStreamPersister xp) throws Exception { Catalog catalog2 = new CatalogImpl(); catalog2.setResourceLoader(resourceLoader); //add listener now as a converter which will convert from the old style // data directory to the new GeoServerPersister p = new GeoServerPersister(resourceLoader, xp); if (!legacy) { catalog2.addListener(p); } LegacyCatalogImporter importer = new LegacyCatalogImporter(catalog2); importer.setResourceLoader(resourceLoader); importer.imprt(resourceLoader.getBaseDirectory()); if (!legacy) { catalog2.removeListener(p); } if (!legacy) { //copy files from old feature type directories to new File featureTypesDir = resourceLoader.find("featureTypes"); if (featureTypesDir != null) { LegacyCatalogReader creader = new LegacyCatalogReader(); creader.read(f); Map<String, Map<String, Object>> dataStores = creader.dataStores(); for (File featureTypeDir : featureTypesDir.listFiles()) { if (!featureTypeDir.isDirectory()) { continue; } File featureTypeInfo = new File(featureTypeDir, "info.xml"); if (!featureTypeInfo.exists()) { continue; } LegacyFeatureTypeInfoReader reader = new LegacyFeatureTypeInfoReader(); reader.read(featureTypeInfo); Map<String, Object> dataStore = dataStores.get(reader.dataStore()); if (dataStore == null) { continue; } String namespace = (String) dataStore.get("namespace"); File destFeatureTypeDir = resourceLoader.find("workspaces", namespace, reader.dataStore(), reader.name()); if (destFeatureTypeDir != null) { //copy all the files over for (File file : featureTypeDir.listFiles()) { if (file.isFile() && !featureTypeInfo.equals(file)) { FileUtils.copyFile(file, new File(destFeatureTypeDir, file.getName())); } } } } } //rename catalog.xml f.renameTo(new File(f.getParentFile(), "catalog.xml.old")); } return catalog2; }
From source file:github.daneren2005.dsub.service.DownloadServiceImpl.java
private synchronized void doPlay(final DownloadFile downloadFile, final int position, final boolean start) { try {//from w ww . ja v a 2 s . c o m downloadFile.setPlaying(true); final File file = downloadFile.isCompleteFileAvailable() ? downloadFile.getCompleteFile() : downloadFile.getPartialFile(); isPartial = file.equals(downloadFile.getPartialFile()); downloadFile.updateModificationDate(); mediaPlayer.setOnCompletionListener(null); mediaPlayer.reset(); setPlayerState(IDLE); mediaPlayer.setAudioStreamType(AudioManager.STREAM_MUSIC); String dataSource = file.getPath(); if (isPartial) { if (proxy == null) { proxy = new StreamProxy(this); proxy.start(); } dataSource = String.format("http://127.0.0.1:%d/%s", proxy.getPort(), URLEncoder.encode(dataSource, Constants.UTF_8)); Log.i(TAG, "Data Source: " + dataSource); } else if (proxy != null) { proxy.stop(); proxy = null; } mediaPlayer.setDataSource(dataSource); setPlayerState(PREPARING); mediaPlayer.setOnBufferingUpdateListener(new MediaPlayer.OnBufferingUpdateListener() { public void onBufferingUpdate(MediaPlayer mp, int percent) { Log.i(TAG, "Buffered " + percent + "%"); if (percent == 100) { mediaPlayer.setOnBufferingUpdateListener(null); } } }); mediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() { public void onPrepared(MediaPlayer mediaPlayer) { try { setPlayerState(PREPARED); synchronized (DownloadServiceImpl.this) { if (position != 0) { Log.i(TAG, "Restarting player from position " + position); mediaPlayer.seekTo(position); } cachedPosition = position; if (start) { mediaPlayer.start(); setPlayerState(STARTED); } else { setPlayerState(PAUSED); } } lifecycleSupport.serializeDownloadQueue(); } catch (Exception x) { handleError(x); } } }); setupHandlers(downloadFile, isPartial); mediaPlayer.prepareAsync(); } catch (Exception x) { handleError(x); } }