List of usage examples for java.io FilenameFilter FilenameFilter
FilenameFilter
From source file:edu.jhuapl.openessence.i18n.InspectableResourceBundleMessageSource.java
public Collection<Locale> getLocales() throws IOException { String basename = basenames[0]; Resource resource = resourceLoader.getResource(basename + ".properties"); if (!resource.exists()) { return Collections.emptyList(); }/*from w ww .j av a2s.c o m*/ File baseFile = resource.getFile(); final String bundleName = FilenameUtils.getBaseName(baseFile.getPath()); File[] files = resource.getFile().getParentFile().listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith(bundleName + "_") && name.endsWith(".properties"); } }); List<Locale> locales = new ArrayList<Locale>(); for (File f : files) { String prefix = bundleName + "_"; String code = f.getName().substring(prefix.length()).replace(".properties", ""); locales.add(StringUtils.parseLocaleString(code)); } return locales; }
From source file:course_generator.param.frmEditCurve.java
/** * Refresh the curve list/* w w w . ja va 2s. com*/ */ private void RefreshCurveList() { File[] files = new File(Utils.GetHomeDir() + "/" + CgConst.CG_DIR + "/").listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.toLowerCase().endsWith(".par"); } }); model.clear(); for (int i = 0; i < files.length; i++) { if (files[i].isFile()) { model.addElement(Utils.getFileNameWithoutExtension(files[i].getName())); } } model.sort(); }
From source file:de.mprengemann.intellij.plugin.androidicons.settings.PluginSettings.java
private void scanForAndroidIconsAssets() { int colorCount = 0; int assetCount = 0; if (this.selectedAndroidIconsFile != null && this.selectedAndroidIconsFile.getCanonicalPath() != null) { File assetRoot = new File(this.selectedAndroidIconsFile.getCanonicalPath()); final FilenameFilter folderFilter = new FilenameFilter() { @Override//w w w .j a v a 2 s. c o m public boolean accept(File file, String s) { return !s.startsWith(".") && new File(file, s).isDirectory(); } }; final FilenameFilter drawableFilter = new FilenameFilter() { @Override public boolean accept(File file, String s) { return FilenameUtils.isExtension(s, "png") && !(new File(file, s).isDirectory()); } }; File[] colorDirs = assetRoot.listFiles(folderFilter); if (colorDirs != null) { colorCount = colorDirs.length; if (colorDirs.length >= 1) { File exColorDir = colorDirs[0]; File[] densities = exColorDir.listFiles(folderFilter); if (densities != null && densities.length >= 1) { File exDensity = densities[0]; File[] assets = exDensity.listFiles(drawableFilter); if (assets != null) { assetCount = assets.length; } } } } } androidIconsFoundColorsText.setText(colorCount + " colors"); androidIconsFoundDrawablesText.setText(assetCount + " drawables per color"); }
From source file:dk.netarkivet.common.arcrepository.TrivialArcRepositoryClient.java
/** * Runs a batch batch job on each file in the ArcRepository. * * @param job An object that implements the FileBatchJob interface. The initialize() method will be called before * processing and the finish() method will be called afterwards. The process() method will be called with each File * entry. An optional function postProcess() allows handling the combined results of the batchjob, e.g. summing the * results, sorting, etc.//from w w w . j a va2 s . co m * @param replicaId The archive to execute the job on (not used in this implementation) * @param args The arguments for the batchjob. * @return The status of the batch job after it ended. */ public BatchStatus batch(final FileBatchJob job, String replicaId, String... args) { ArgumentNotValid.checkNotNull(job, "job"); OutputStream os = null; File resultFile; try { resultFile = File.createTempFile("batch", replicaId, FileUtils.getTempDir()); os = new FileOutputStream(resultFile); File[] files = dir.listFiles(new FilenameFilter() { public boolean accept(File dir, String name) { Pattern filenamePattern = job.getFilenamePattern(); return new File(dir, name).isFile() && (filenamePattern == null || filenamePattern.matcher(name).matches()); } }); BatchLocalFiles batcher = new BatchLocalFiles(files); batcher.run(job, os); } catch (IOException e) { throw new IOFailure("Cannot perform batch job '" + job + "'", e); } finally { if (os != null) { try { os.close(); } catch (IOException e) { log.info("Error closing batch output stream '" + os + "'", e); } } } return new BatchStatus(replicaId, job.getFilesFailed(), job.getNoOfFilesProcessed(), RemoteFileFactory.getMovefileInstance(resultFile), job.getExceptions()); }
From source file:eu.sisob.uma.crawler.ResearchersCrawlerTask.java
public void executeCallBackOfTask() { if (this.split) { FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { if (name.startsWith(ResearchersCrawlerTask.SPLITTED_PREFIX)) { return true; } else { return false; }//from w w w .ja v a2 s . c om } }; // File dir = new File(middle_data_dir + File.separator + SPLITTED_XML_FILES_FOLDERNAME); // if(!dir.exists()) // dir.mkdirs(); // else // { // dir.delete(); // dir.mkdir(); // } File splitted_xml_dir = new File(middledata_dir, SPLITTED_XML_FILES_FOLDERNAME); File[] files = splitted_xml_dir.listFiles(filter); try { File output_file_notfound_csv = new File(this.output_file_csv.getParentFile(), NOTFOUND_RESEARCHERS_PREFIX + this.output_file_csv.getName()); //boolean success = FileFormatConversor.createResearchersCSVFileFromXML(files, this.results_dir + File.separator + output_csv_filename); boolean success = FileFormatConversor.createResearchersCSVFileFromXML(files, this.output_file_csv, output_file_notfound_csv); if (success) Logger.getLogger("root") .error("Create CSV results file '" + this.output_file_csv.getName() + "'"); } catch (FileNotFoundException ex) { ProjectLogger.LOGGER.error("Error creating csv from xml' " + this.output_file_csv.getName()); } catch (IOException ex) { ProjectLogger.LOGGER.error("Error creating csv from xml' " + this.output_file_csv.getName()); } //Build result matrix HashMap<String, HashMap<String, Map.Entry<Integer, Integer>>> resultsMatrix = new HashMap<String, HashMap<String, Map.Entry<Integer, Integer>>>(); HashMap<String, Integer> universities_axis = new HashMap<String, Integer>(); HashMap<String, Integer> dept_axis = new HashMap<String, Integer>(); for (File file : files) { org.dom4j.io.SAXReader reader = new org.dom4j.io.SAXReader(); org.dom4j.Document new_doc = null; try { new_doc = reader.read(file); } catch (DocumentException ex) { ProjectLogger.LOGGER.error(ex.getMessage(), ex); new_doc = null; } if (new_doc != null) { MatrixResultBuilder o = new MatrixResultBuilder(new_doc, middledata_dir, universities_axis, dept_axis, resultsMatrix); try { ProjectLogger.LOGGER.info("Begin Count - " + file.getName()); o.iterate(); } catch (Exception ex) { ProjectLogger.LOGGER.error(ex.getMessage(), ex); } finally { ProjectLogger.LOGGER.info("End Count" + file.getName()); } } else { ProjectLogger.LOGGER.error("Split file cannot be openend '" + file.getPath() + "'"); } } //MatrixResultBuilder.writeResultsMatrix(new File(this.results_dir + File.separator + "results." + output_csv_filename), universities_axis, dept_axis, resultsMatrix); MatrixResultBuilder.writeResultsList( new File(this.resultsdata_dir, MATRIX_RESULTS_PREFIX + this.output_file_csv.getName()), universities_axis, dept_axis, resultsMatrix); } else { try { File output_file_notfound_csv = new File(this.output_file_csv.getParentFile(), NOTFOUND_RESEARCHERS_PREFIX); boolean success = FileFormatConversor.createResearchersCSVFileFromXML(this.document, this.output_file_csv, output_file_notfound_csv); } catch (FileNotFoundException ex) { ProjectLogger.LOGGER.error("Error creating csv from xml' " + this.output_file_csv.getName()); } catch (IOException ex) { ProjectLogger.LOGGER.error("Error creating csv from xml' " + this.output_file_csv.getName()); } } setFinished(true); }
From source file:gr.abiss.calipso.config.CalipsoConfigurer.java
private void configureCalipso(ConfigurableListableBeanFactory beanFactory) throws Exception { String calipsoHome = null;/*from w ww . jav a2 s .c o m*/ InputStream is = this.getClass().getResourceAsStream("/calipso-init.properties"); Properties props = loadProps(is); logger.info("found 'calipso-init.properties' on classpath, processing..."); calipsoHome = props.getProperty("calipso.home"); if (calipsoHome.equals("${calipso.home}")) { calipsoHome = null; } if (StringUtils.isBlank(calipsoHome)) { logger.info( "valid 'calipso.home' property not available in 'calipso-init.properties', trying system properties."); calipsoHome = System.getProperty("calipso.home"); if (StringUtils.isNotBlank(calipsoHome)) { logger.info("'calipso.home' property initialized from system properties as '" + calipsoHome + "'"); } } if (StringUtils.isBlank(calipsoHome)) { logger.info( "valid 'calipso.home' property not available in system properties, trying servlet init paramters."); calipsoHome = servletContext.getInitParameter("calipso.home"); if (StringUtils.isNotBlank(calipsoHome)) { logger.info("Servlet init parameter 'calipso.home' exists: '" + calipsoHome + "'"); } } if (StringUtils.isBlank(calipsoHome)) { calipsoHome = System.getProperty("user.home") + "/.calipso"; logger.warn("Servlet init paramter 'calipso.home' does not exist. Will use 'user.home' directory '" + calipsoHome + "'"); } if (StringUtils.isNotBlank(calipsoHome) && !calipsoHome.equals("${calipso.home}")) { logger.info( "'calipso.home' property initialized from 'calipso-init.properties' as '" + calipsoHome + "'"); } //====================================================================== FilenameFilter ff = new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.startsWith("messages_") && name.endsWith(".properties"); } }; //File[] messagePropsFiles = jtracInitResource.getFile().getParentFile().listFiles(ff); String locales = props.getProperty("calipso.locales", "en,el,ja"); // for(File f : messagePropsFiles) { // int endIndex = f.getName().indexOf('.'); // String localeCode = f.getName().substring(9, endIndex); // locales += "," + localeCode; // } logger.info("locales available configured are '" + locales + "'"); props.setProperty("calipso.locales", locales); //====================================================================== //====================================================================== File calipsoHomeDir = new File(calipsoHome); createIfNotExisting(calipsoHomeDir); props.setProperty("calipso.home", calipsoHomeDir.getAbsolutePath()); //====================================================================== File attachmentsFile = new File(calipsoHome + "/attachments"); createIfNotExisting(attachmentsFile); File indexesFile = new File(calipsoHome + "/indexes"); createIfNotExisting(indexesFile); //====================================================================== File propsFile = new File(calipsoHomeDir, "calipso.properties"); if (!propsFile.exists()) { logger.info("properties file does not exist, creating '" + propsFile.getPath() + "'"); propsFile.createNewFile(); OutputStream os = new FileOutputStream(propsFile); Writer out = new PrintWriter(os); try { out.write("database.driver=org.hsqldb.jdbcDriver\n"); out.write("database.url=jdbc:hsqldb:file:${calipso.home}/db/calipso\n"); out.write("database.username=sa\n"); out.write("database.password=\n"); out.write("hibernate.dialect=org.hibernate.dialect.HSQLDialect\n"); out.write("hibernate.show_sql=false\n"); // Can be used to set mysql as default, commenting out // to preserve HSQLDB as default // out.write("database.driver=com.mysql.jdbc.Driver\n"); // out.write("database.url=jdbc:mysql://localhost/calipso21\n"); // out.write("database.username=root\n"); // out.write("database.password=\n"); // out.write("hibernate.dialect=org.hibernate.dialect.MySQLDialect\n"); // out.write("hibernate.show_sql=false\n"); } finally { out.close(); os.close(); } logger.info("HSQLDB will be used. Finished creating '" + propsFile.getPath() + "'"); } else { logger.info("'calipso.properties' file exists: '" + propsFile.getPath() + "'"); } //====================================================================== String version = getClass().getPackage().getImplementationVersion(); String timestamp = "0000"; // ClassPathResource versionResource = new ClassPathResource("calipso-version.properties"); // if(versionResource.exists()) { // logger.info("found 'calipso-version.properties' on classpath, processing..."); // Properties versionProps = loadProps(versionResource.getFile()); // version = versionProps.getProperty("calipso.version"); // timestamp = versionProps.getProperty("calipso.timestamp"); // } else { // logger.info("did not find 'calipso-version.properties' on classpath"); // } props.setProperty("calipso.version", version); props.setProperty("calipso.timestamp", timestamp); /* * TODO: A better way (default value) to check the database should be used for Apache DBCP. * The current "SELECT...FROM DUAL" only works on Oracle (and MySQL). * Other databases also support "SELECT 1+1" as query * (e.g. PostgreSQL, Hypersonic 2 (H2), MySQL, etc.). */ props.setProperty("database.validationQuery", "SELECT 1 FROM DUAL"); props.setProperty("ldap.url", ""); props.setProperty("ldap.activeDirectoryDomain", ""); props.setProperty("ldap.searchBase", ""); props.setProperty("database.datasource.jndiname", ""); // set default properties that can be overridden by user if required setProperties(props); // finally set the property that spring is expecting, manually FileSystemResource fsr = new FileSystemResource(propsFile); setLocation(fsr); Log.info("Calipso configured, calling postProcessBeanFactory with:" + beanFactory); }
From source file:ddf.test.itests.platform.TestSolrCommands.java
private Set<File> getBackupDirectories(String coreName) { File solrDir = getSolrDataPath(coreName); File[] backupDirs;/*from w ww . ja v a 2 s. c o m*/ backupDirs = solrDir.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { // Only match on snapshot.<timestamp> directories, filter out snapshot_metadata return name.startsWith("snapshot."); } }); return Sets.newHashSet(backupDirs); }
From source file:com.sshtools.j2ssh.configuration.ConfigurationLoader.java
/** * <p>/*from ww w . j av a 2s. c o m*/ * Initializes the J2SSH api with a specified configuration context. This * method will attempt to load the Bouncycastle JCE if it detects the java * version is 1.3.1. * </p> * * @param force force the configuration to load even if a configuration * already exists * @param context the configuration context to load * * @throws ConfigurationException if the configuration is invalid or if a * security provider is not available */ public static void initialize(boolean force, ConfigurationContext context) throws ConfigurationException { // } try { String javaversion = System.getProperty("java.version"); log.info("JAVA version is " + javaversion); if (javaversion.startsWith("1.3")) { boolean provider = false; for (int i = 0; i < Security.getProviders().length; i++) { log.info(Security.getProviders()[i].getName() + " security provider found"); if (Security.getProviders()[i].getClass().getName() .equals("org.bouncycastle.jce.provider.BouncyCastleProvider")) { provider = true; } } if (provider == false) { log.info("Attempting to load the bouncycastle jce provider"); // Attempt to load a JCE Provider - replace or remove these statements // depending upon how you want to initialize your JCE provider Class cls; cls = Class.forName("org.bouncycastle.jce.provider.BouncyCastleProvider"); java.security.Security.addProvider((java.security.Provider) cls.newInstance()); } } } catch (Exception ex) { log.info("Failed to load the bouncycastle jce provider", ex); if (java.security.Security.getProviders().length <= 0) { throw new ConfigurationException( "There are no security providers available; install jce-jdk13-*.jar available from http://www.bouncycastle.org"); } else { log.info("An existing provider has been detected"); } } synchronized (initializationLock) { if (initialized && !force) { return; } if (ext == null) { // We need to setup the dynamic class loading with the extension jars ext = new ExtensionClassLoader(ConfigurationLoader.class.getClassLoader()); try { // Jar files to add to the classpath File dir = new File(homedir + "lib" + File.separator + "ext"); // Filter for .jar files FilenameFilter filter = new FilenameFilter() { public boolean accept(File dir, String name) { return name.endsWith(".jar"); } }; // Get the list File[] children = dir.listFiles(filter); List classpath = new Vector(); if (children != null) { for (int i = 0; i < children.length; i++) { // Get filename of file or directory log.info("Extension " + children[i].getAbsolutePath() + " being added to classpath"); ext.add(children[i]); } } } catch (AccessControlException ex) { log.info("Cannot access lib/ext directory, extension classes will not be loaded"); } } SshCipherFactory.initialize(); SshPrivateKeyFormatFactory.initialize(); SshPublicKeyFormatFactory.initialize(); SshCompressionFactory.initialize(); SshHmacFactory.initialize(); SshKeyPairFactory.initialize(); SshKeyExchangeFactory.initialize(); context.initialize(); contexts.add(context); initialized = true; } }
From source file:com.coinblesk.server.utilTest.Client.java
public void deleteWallet() { File[] walletFiles = tmpDir.listFiles(new FilenameFilter() { @Override/*from ww w. j av a2 s.c o m*/ public boolean accept(File dir, String name) { return name.startsWith(p2shAddress.toString()); } }); for (File f : walletFiles) { f.delete(); } tmpDir.delete(); }
From source file:fr.gael.dhus.service.SystemService.java
@PreAuthorize("hasRole('ROLE_SYSTEM_MANAGER')") public List<Date> getDumpDatabaseList() { List<Date> timestamps = new ArrayList<Date>(); File path_file = new File(cfgManager.getDatabaseConfiguration().getDumpPath()); File[] lst = path_file.listFiles(new FilenameFilter() { @Override/*from w w w. j ava2 s .c o m*/ public boolean accept(File dir, String name) { if (name.startsWith("dump-")) return true; return false; } }); if (lst == null) { return timestamps; } for (File f : lst) { String stimesamp = f.getName().replaceAll("dump-(.*)", "$1"); long timestamp = Long.parseLong(stimesamp); Date date = new Date(timestamp); timestamps.add(date); } Collections.sort(timestamps, Collections.reverseOrder()); return timestamps; }