List of usage examples for java.io FilenameFilter FilenameFilter
FilenameFilter
From source file:com.mendhak.gpslogger.common.PrefsIO.java
public Dialog ChooseFileDialog() { File myDir = new File(defPath); if (!myDir.exists()) return null; Utilities.LogDebug("Asking user the file to use for import of settings"); File[] enumeratedFiles = myDir.listFiles(new FilenameFilter() { @Override/*from w w w.j a va 2 s. c o m*/ public boolean accept(File dir, String name) { return name.endsWith("." + extension); } }); final int len = enumeratedFiles.length; List<String> fileList = new ArrayList<String>(len); for (File f : enumeratedFiles) { fileList.add(f.getName()); } fileList.add(context.getString(R.string.Browse)); final String[] files = fileList.toArray(new String[fileList.size()]); AlertDialog.Builder builder = new AlertDialog.Builder(context); builder.setTitle(context.getString(R.string.SelectFile)); builder.setItems(files, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int item) { if (item < len) { curFileName = defPath + File.separator + files[item]; ImportFile(); } else BrowseFile(); } }); builder.setCancelable(true); return builder.create(); }
From source file:com.openedit.users.filesystem.FileSystemUserManager.java
public List listUserNames() { List all = new ArrayList(); ContentItem item = getPageManager().getRepository().get(getUserDirectory()); File users = new File(item.getAbsolutePath()); File[] files = users.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".xml"); }/*from ww w . j a v a 2 s.c om*/ }); if (files != null) { for (int i = 0; i < files.length; i++) { String username = PathUtilities.extractPageName(files[i].getName()); all.add(username); } } // Temporary item = getPageManager().getRepository().get("/WEB-INF/users"); users = new File(item.getAbsolutePath()); files = users.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".xml"); } }); if (files != null) { for (int i = 0; i < files.length; i++) { String username = PathUtilities.extractPageName(files[i].getName()); if (!all.contains(username)) { all.add(username); } } } return all; }
From source file:com.aegiswallet.utils.BasicUtils.java
public static String[] loadFileList() { String[] fileList = null;//ww w.ja va 2 s . c o m File filesPath = Constants.WALLET_BACKUP_DIRECTORY; if (Constants.WALLET_BACKUP_DIRECTORY.exists() && Constants.WALLET_BACKUP_DIRECTORY.isDirectory()) { final String filePrefix = "AegisWalletBackup"; try { filesPath.mkdirs(); } catch (SecurityException e) { Log.e(TAG, "Could not write to SD card - " + e.toString()); } if (filesPath.exists()) { FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String filename) { File sel = new File(dir, filename); return filename.contains(filePrefix) || sel.isDirectory(); } }; fileList = filesPath.list(filter); } else { fileList = new String[0]; } } return fileList; }
From source file:azkaban.execapp.FlowRunnerManager.java
private Map<Pair<Integer, Integer>, ProjectVersion> loadExistingProjects() { Map<Pair<Integer, Integer>, ProjectVersion> allProjects = new HashMap<Pair<Integer, Integer>, ProjectVersion>(); for (File project : projectDirectory.listFiles(new FilenameFilter() { String pattern = "[0-9]+\\.[0-9]+"; @Override//from w ww . ja v a 2s .c o m public boolean accept(File dir, String name) { return name.matches(pattern); } })) { if (project.isDirectory()) { try { String fileName = new File(project.getAbsolutePath()).getName(); int projectId = Integer.parseInt(fileName.split("\\.")[0]); int versionNum = Integer.parseInt(fileName.split("\\.")[1]); ProjectVersion version = new ProjectVersion(projectId, versionNum, project); allProjects.put(new Pair<Integer, Integer>(projectId, versionNum), version); } catch (Exception e) { e.printStackTrace(); } } } return allProjects; }
From source file:com.openteach.diamond.container.Container.java
private static void buildBundlesString(String parent, String child, StringBuilder sb, boolean first) { File file = new File(parent, child); if (file.exists()) { String filePath = file.getAbsolutePath(); String[] pluginFiles = file.list(new FilenameFilter() { public boolean accept(File dir, String name) { if (name.endsWith(".plugin")) { return true; }/* www . j a v a 2s. co m*/ return false; } }); StringBuilder osgiBundlesBuilder = new StringBuilder(); int pluginFilesSize = pluginFiles.length; if (pluginFilesSize > 0) { osgiBundlesBuilder.append(filePath + File.separator + pluginFiles[0]); osgiBundlesBuilder.append("@start"); } for (int i = 1; i < pluginFilesSize; i++) { osgiBundlesBuilder.append(","); osgiBundlesBuilder.append(filePath + File.separator + pluginFiles[i]); osgiBundlesBuilder.append("@start"); } if (first) { sb.append(osgiBundlesBuilder.toString()); } else { sb.append("," + osgiBundlesBuilder.toString()); } } else { if (first) { throw new RuntimeException("diamond??? " + parent + " ??" + child + "!"); } } }
From source file:com.cyberway.issue.crawler.admin.CrawlJobHandler.java
/** * Find the state.job file in the job directory. * @param jobDir Directory to look in.//from w w w . ja va2s .c om * @return Full path to 'state.job' file or null if none found. */ protected File getStateJobFile(final File jobDir) { // Need to find job file ('state.job'). File[] jobFiles = jobDir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".job") && (new File(dir, name)).canRead(); } }); return (jobFiles.length == 1) ? jobFiles[0] : null; }
From source file:dk.netarkivet.common.distribute.arcrepository.LocalArcRepositoryClient.java
/** * Runs a batch job on each file in the ArcRepository. * * @param job An object that implements the FileBatchJob interface. The * initialize() method will be called before processing and the finish() * method will be called afterwards. The process() method will be called * with each File entry. An optional function postProcess() allows handling * the combined results of the batchjob, e.g. summing the results, sorting, * etc./*from w w w .j ava 2 s. co m*/ * * @param replicaId The archive to execute the job on. * @param args The arguments for the batchjob. This can be null. * @return The status of the batch job after it ended. * @throws ArgumentNotValid If the job is null or the replicaId is either * null or the empty string. * @throws IOFailure If a problem occurs during processing the batchjob. */ @Override public BatchStatus batch(final FileBatchJob job, String replicaId, String... args) throws ArgumentNotValid, IOFailure { ArgumentNotValid.checkNotNull(job, "FileBatchJob job"); ArgumentNotValid.checkNotNullOrEmpty(replicaId, "String replicaId"); OutputStream os = null; File resultFile; try { resultFile = File.createTempFile("batch", replicaId, FileUtils.getTempDir()); os = new FileOutputStream(resultFile); List<File> files = new ArrayList<File>(); final FilenameFilter filenameFilter = new FilenameFilter() { public boolean accept(File dir, String name) { Pattern filenamePattern = job.getFilenamePattern(); return new File(dir, name).isFile() && (filenamePattern == null || filenamePattern.matcher(name).matches()); } }; for (File dir : storageDirs) { File[] filesInDir = dir.listFiles(filenameFilter); if (filesInDir != null) { files.addAll(Arrays.asList(filesInDir)); } } BatchLocalFiles batcher = new BatchLocalFiles(files.toArray(new File[files.size()])); batcher.run(job, os); } catch (IOException e) { throw new IOFailure("Cannot perform batch '" + job + "'", e); } finally { if (os != null) { try { os.close(); } catch (IOException e) { log.warn("Error closing batch output stream '" + os + "'", e); } } } return new BatchStatus(replicaId, job.getFilesFailed(), job.getNoOfFilesProcessed(), new FileRemoteFile(resultFile), job.getExceptions()); }
From source file:gov.nasa.ensemble.common.io.FileUtilities.java
/** * /*from w w w .j av a 2 s . c o m*/ * @param directory * @param extensions * the list of acceptable extensions * @return A list of fileNames in the directory with the specified extension. */ public static File[] getAllFilesWithExtensions(File directory, final String... extensions) { File[] ret = directory.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { boolean ret = false; for (String extension : extensions) ret |= name.endsWith(extension); return ret; } }); return ret == null ? new File[0] : ret; }
From source file:de.clusteval.tools.ClustQualityEval.java
public ClustQualityEval(final String absRepoPath, final String dataConfigName, final String... qualityMeasures) throws RepositoryAlreadyExistsException, InvalidRepositoryException, RepositoryConfigNotFoundException, RepositoryConfigurationException, UnknownClusteringQualityMeasureException, InterruptedException, UnknownDataSetFormatException, UnknownGoldStandardFormatException, GoldStandardNotFoundException, GoldStandardConfigurationException, DataSetConfigurationException, DataSetNotFoundException, DataSetConfigNotFoundException, GoldStandardConfigNotFoundException, NoDataSetException, DataConfigurationException, DataConfigNotFoundException, NumberFormatException, RunResultParseException, ConfigurationException, RegisterException, UnknownContextException, UnknownParameterType, IOException, UnknownRunResultFormatException, InvalidRunModeException, UnknownParameterOptimizationMethodException, NoOptimizableProgramParameterException, UnknownProgramParameterException, InvalidConfigurationFileException, NoRepositoryFoundException, InvalidOptimizationParameterException, RunException, UnknownDataStatisticException, UnknownProgramTypeException, UnknownRProgramException, IncompatibleParameterOptimizationMethodException, UnknownDistanceMeasureException, UnknownRunStatisticException, UnknownDataSetTypeException, UnknownRunDataStatisticException, UnknownDataPreprocessorException, IncompatibleDataSetConfigPreprocessorException, IncompatibleContextException, InvalidDataSetFormatVersionException, RNotAvailableException, FormatConversionException {//ww w . j a v a 2 s. co m super(); ClustevalBackendServer.logLevel(Level.INFO); ClustevalBackendServer.getBackendServerConfiguration().setNoDatabase(true); ClustevalBackendServer.getBackendServerConfiguration().setCheckForRunResults(false); this.log = LoggerFactory.getLogger(this.getClass()); final Repository parent = new Repository( new File(absRepoPath).getParentFile().getParentFile().getAbsolutePath(), null); parent.initialize(); this.repo = new RunResultRepository(absRepoPath, parent); this.repo.initialize(); List<ParameterOptimizationResult> result = new ArrayList<ParameterOptimizationResult>(); final ParameterOptimizationRun run = (ParameterOptimizationRun) ParameterOptimizationResult .parseFromRunResultFolder(parent, new File(absRepoPath), result, false, false, false); this.dataConfig = this.repo.getStaticObjectWithName(DataConfig.class, dataConfigName); final List<ClusteringQualityMeasure> measures = new ArrayList<ClusteringQualityMeasure>(); if (qualityMeasures.length == 0) { log.error("Please add at least one quality measure to the command line arguments."); this.repo.terminateSupervisorThread(); return; } for (String measureSimpleName : qualityMeasures) { measures.add(ClusteringQualityMeasure.parseFromString(this.repo, measureSimpleName)); } Set<Thread> threads = new HashSet<Thread>(); System.out.println("Program configurations:"); System.out.println(run.getProgramConfigs()); for (final ProgramConfig pc : run.getProgramConfigs()) { // get the dataset for this program config DataSet dsIn = Parser.parseFromFile(DataSet.class, new File(FileUtils.buildPath(absRepoPath, "inputs", pc.toString() + "_" + dataConfig.toString(), dataConfig.getDatasetConfig().getDataSet().getMajorName(), dataConfig.getDatasetConfig().getDataSet().getMinorName()))); // get dataset in standard format final DataSet ds = dsIn.preprocessAndConvertTo(run.getContext(), run.getContext().getStandardInputFormat(), dataConfig.getDatasetConfig().getConversionInputToStandardConfiguration(), dataConfig.getDatasetConfig().getConversionStandardToInputConfiguration()); ds.loadIntoMemory(); Thread t = new Thread() { public void run() { try { DataConfig dc = dataConfig.clone(); dc.getDatasetConfig().setDataSet(ds); File f = new File(FileUtils.buildPath(repo.getBasePath(), "clusters")); File[] childs = f.listFiles(new FilenameFilter() { /* * (non-Javadoc) * * @see java.io.FilenameFilter#accept(java.io.File, * java.lang.String) */ @Override public boolean accept(File dir, String name) { return name.startsWith(pc.getName() + "_" + dataConfig.getName()) && name.endsWith(".results.conv"); } }); // printer = new MyProgressPrinter(childs.length, true); ((ch.qos.logback.classic.Logger) LoggerFactory.getLogger(Logger.ROOT_LOGGER_NAME)) .info("Assessing qualities of clusterings ..."); final Map<Long, ClusteringQualitySet> qualsMap = new HashMap<Long, ClusteringQualitySet>(); for (File clusteringFile : childs) { try { Clustering cl = Clustering .parseFromFile(repo, clusteringFile.getAbsoluteFile(), true).getSecond(); // only recalculate for those, for which the // measure // hasn't // been evaluated List<ClusteringQualityMeasure> toEvaluate = new ArrayList<ClusteringQualityMeasure>( measures); try { if (cl.getQualities() != null) toEvaluate.removeAll(cl.getQualities().keySet()); } catch (NullPointerException e) { System.out.println(clusteringFile); throw e; } ClusteringQualitySet quals = new ClusteringQualitySet(); // evaluate the new quality measures if (!toEvaluate.isEmpty()) { quals.putAll(cl.assessQuality(dc, toEvaluate)); System.out.println(quals); // write the new qualities into the // results.qual // file for (ClusteringQualityMeasure m : quals.keySet()) FileUtils.appendStringToFile( clusteringFile.getAbsolutePath().replaceFirst(".results.conv", ".results.qual"), String.format("%s\t%s", m.toString(), quals.get(m).getValue()) + "\n"); } long iterationNumber = Long.parseLong(clusteringFile.getName() .replaceFirst(String.format("%s_%s.", pc.toString(), dc.toString()), "") .replaceFirst(".results.conv", "")); // store all qualities of the clustering in one // set ClusteringQualitySet allQuals = new ClusteringQualitySet(); if (cl.getQualities() != null) allQuals.putAll(cl.getQualities()); allQuals.putAll(quals); qualsMap.put(iterationNumber, allQuals); } catch (IOException e) { e.printStackTrace(); } catch (UnknownGoldStandardFormatException e) { e.printStackTrace(); } catch (UnknownDataSetFormatException e) { e.printStackTrace(); } catch (InvalidDataSetFormatVersionException e) { e.printStackTrace(); } } // update complete quality file // we want to have the same lines conserving the same NT // and // skipped // iterations infos (missing lines), therefore we parse // the // old file // first, iterate over all lines and write the same // lines // but add // the additional infos (if there are any) TextFileParser parser = new TextFileParser( FileUtils.buildPath(repo.getBasePath(), "clusters", String.format("%s_%s.results.qual.complete", pc.toString(), dc.toString())), new int[0], new int[0], FileUtils.buildPath(repo.getBasePath(), "clusters", String .format("%s_%s.results.qual.complete.new", pc.toString(), dc.toString())), OUTPUT_MODE.STREAM) { protected List<ClusteringQualityMeasure> measures; /* * (non-Javadoc) * * @see * utils.parse.TextFileParser#processLine(java.lang. * String[], java.lang.String[]) */ @Override protected void processLine(String[] key, String[] value) { } /* * (non-Javadoc) * * @see * utils.parse.TextFileParser#getLineOutput(java * .lang .String[], java.lang.String[]) */ @Override protected String getLineOutput(String[] key, String[] value) { StringBuffer sb = new StringBuffer(); // sb.append(combineColumns(value)); sb.append(combineColumns(Arrays.copyOf(value, 2))); if (currentLine == 0) { sb.append(outSplit); sb.append(combineColumns(Arrays.copyOfRange(value, 2, value.length))); measures = new ArrayList<ClusteringQualityMeasure>(); for (int i = 2; i < value.length; i++) try { measures.add( ClusteringQualityMeasure.parseFromString(parent, value[i])); } catch (UnknownClusteringQualityMeasureException e) { e.printStackTrace(); this.terminate(); } // get measures, which are not in the // complete // file // header if (qualsMap.keySet().iterator().hasNext()) { Set<ClusteringQualityMeasure> requiredMeasures = qualsMap .get(qualsMap.keySet().iterator().next()).keySet(); requiredMeasures.removeAll(measures); for (ClusteringQualityMeasure m : requiredMeasures) { sb.append(outSplit); sb.append(m.toString()); } measures.addAll(requiredMeasures); } } else if (value[0].contains("*")) { // do nothing } else { long iterationNumber = Long.parseLong(value[0]); ClusteringQualitySet quals = qualsMap.get(iterationNumber); boolean notTerminated = value[3].equals("NT"); // for (int i = value.length - 2; i < // measures // .size(); i++) { // sb.append(outSplit); // if (notTerminated) // sb.append("NT"); // else // sb.append(quals.get(measures.get(i))); // } for (int i = 0; i < measures.size(); i++) { sb.append(outSplit); if (notTerminated) sb.append("NT"); else if (quals.containsKey(measures.get(i))) sb.append(quals.get(measures.get(i))); else sb.append(value[i + 2]); } } sb.append(System.getProperty("line.separator")); return sb.toString(); } }; try { parser.process(); } catch (Exception e) { e.printStackTrace(); } ds.unloadFromMemory(); } catch (Exception e) { e.printStackTrace(); } } }; threads.add(t); t.start(); } // add the new clustering quality measures into the run config file TextFileParser p = new TextFileParser(run.getAbsolutePath(), null, null, false, "", run.getAbsolutePath() + ".new", OUTPUT_MODE.STREAM) { /* * (non-Javadoc) * * @see utils.parse.TextFileParser#processLine(java.lang.String[], * java.lang.String[]) */ @Override protected void processLine(String[] key, String[] value) { } /* * (non-Javadoc) * * @see utils.parse.TextFileParser#getLineOutput(java.lang.String[], * java.lang.String[]) */ @Override protected String getLineOutput(String[] key, String[] value) { StringBuilder sb = new StringBuilder(); sb.append(value[0]); if (value[0].contains("qualityMeasures = ")) for (ClusteringQualityMeasure m : measures) if (!value[0].contains(m.toString())) { sb.append(","); sb.append(m.toString()); } sb.append(System.getProperty("line.separator")); return sb.toString(); } }.process(); for (Thread t : threads) t.join(); System.exit(0); }