List of usage examples for java.io FileFilter FileFilter
FileFilter
From source file:com.symbian.driver.remoting.packaging.build.PackageBuilder.java
/** * Builds a Test Pacakge./*from w w w. j ava 2 s . c o m*/ * * @see com.symbian.driver.remoting.packaging.build.Builder#Build(java.lang.String) * @param aTestPackage * String : a test package path name. */ public void Build(File aTestPackage) { // get TestDriver properties String lReposRoot = null; String lXmlRoot = null; String lEpocRoot = null; File xmlZip = new File("Xml.zip"); File reposZip = new File("Repository.zip"); File depZip = new File("Dependencies.zip"); File manifest = new File("Manifest.mf"); File StatZip = new File("Stat.zip"); String lPlatform = null; // winscw String lBuild = null; // urel String lSuiteFragment = null; // full URI boolean lPlatsec = false; boolean lTestexec = false; String lKernel = null; String lBuildNumber = null; URI lSuite = null; List<URI> lUriList = null; try { TDConfig CONFIG = TDConfig.getInstance(); lEpocRoot = CONFIG.getPreferenceFile(TDConfig.EPOC_ROOT).getCanonicalPath(); // m: lReposRoot = CONFIG.getPreferenceFile(TDConfig.REPOSITORY_ROOT).getCanonicalPath(); // d:/td2_testing/repository File lXmlR = CONFIG.getPreferenceFile(TDConfig.XML_ROOT); lXmlRoot = lXmlR.getCanonicalPath(); // m:/epoc32/TestDriver/Plattest lPlatform = CONFIG.getPreference(TDConfig.PLATFORM); lBuild = CONFIG.getPreference(TDConfig.VARIANT); //lPlatsec = CONFIG.isPreference(TDConfig.PLATSEC); lPlatsec = !CONFIG.isPreference(TDConfig.SYS_BIN); lTestexec = CONFIG.isPreference(TDConfig.TEST_EXECUTE); lKernel = CONFIG.getPreference(TDConfig.KERNEL); lBuildNumber = CONFIG.getPreference(TDConfig.BUILD_NUMBER); lSuite = CONFIG.getPreferenceURI(TDConfig.ENTRY_POINT_ADDRESS); lSuiteFragment = lSuite.fragment(); // build the .driver path if (lSuite.fileExtension() == null) { // CASE: no .driver String lRoot = lSuite.toString(); int lLast = (lRoot.indexOf('.') > 0) ? lRoot.indexOf('.') : lRoot.length(); File lRootDriver = new File(lXmlR, lRoot.substring(1, lLast).concat(".driver")); if (lRootDriver.isFile()) { lSuite = URI.createFileURI(lRootDriver.getCanonicalPath()); // the fragment is correct because x.driver exists } else { // x.driver does not exist File[] lDriverFiles = lXmlR.listFiles(new FileFilter() { public boolean accept(File lFile) { if (lFile.getName().endsWith(".driver")) { return true; } return false; } }); if (lDriverFiles.length == 1) { // run the one lSuite = URI.createFileURI(lDriverFiles[0].getCanonicalPath()); lSuiteFragment = lDriverFiles[0].getName().replaceAll("\\.driver", "") + "." + lSuiteFragment; } else { // more complain + return LOGGER.log(Level.SEVERE, "There are either no or more than one .driver files in : " + lXmlR.toString()); return; } } } else { lSuite = URI.createFileURI(lSuite.devicePath()); } } catch (IOException lIOException) { LOGGER.log(Level.SEVERE, "Could not get values from config", lIOException); return; } catch (ParseException e) { LOGGER.log(Level.SEVERE, "Could not get values from config", e); return; } if (lBuildNumber == null) { LOGGER.log(Level.SEVERE, "Could not get the build number."); return; } if (lKernel == null) { LOGGER.log(Level.SEVERE, "Kernel is not available in the configuration."); return; } if (lSuiteFragment == null) { LOGGER.log(Level.SEVERE, "A Suite/test must be provided."); return; } try { // get all refrences. Task lTask = ResourceLoader.load(lSuite); lUriList = new ArrayList<URI>(); for (Iterator lIterator = lTask.eAllContents(); lIterator.hasNext();) { Task lReferenceTask = (Task) new DriverSwitch().doSwitch((EObject) lIterator.next()); if (lReferenceTask != null) { lUriList.add(lReferenceTask.eResource().getURI()); } } } catch (IOException lIOException) { LOGGER.log(Level.SEVERE, "Could not get all refrences from " + lSuite.toFileString(), lIOException); return; } // where to pick the repository from File lRepos = new File((lReposRoot + File.separator + lBuildNumber + File.separator + lPlatform + File.separator + lBuild + File.separator + lSuiteFragment.replace('.', File.separatorChar)) .replaceAll("\\\\+", "\\\\")); if (!lRepos.isDirectory()) { LOGGER.log(Level.SEVERE, "Repository root " + lRepos.toString() + " does not exist."); return; } // where to pick the dependencies from String lDep = (lEpocRoot + File.separator + EPOC32 + File.separator + "release" + File.separator + lPlatform + File.separator + lBuild + File.separator).replaceAll("\\\\+", "\\\\"); if (!new File(lDep).isDirectory()) { LOGGER.log(Level.SEVERE, "Epoc tree " + lDep + " does not exist."); return; } File lXml = new File(lSuite.toFileString()); if (!lXml.exists()) { LOGGER.log(Level.SEVERE, "XML file " + lXml.toString() + " does not exist"); return; } Zipper lZip = new Zipper(); // Add The main xml file. lZip.addFile(lXml); // Add files referenced made by the main file. for (URI lItem : lUriList) { // get path part only path like ${variable}\... are allowed String lItemPathString = lItem.path(); // remove \/ from beginning lItemPathString = lItemPathString.replaceFirst("^[\\/]+", ""); if (lItemPathString.startsWith("\\$\\{\\w+\\}")) { LOGGER.log(Level.SEVERE, "Reference : " + lItem.toFileString() + " is not allowed."); return; } // do the substitution of variables String lRealFilePath = ModelUtils.subsituteVariables(lItemPathString); File lRealFile = new File(lRealFilePath); if (!lRealFile.isFile()) { LOGGER.log(Level.WARNING, "XML file " + lItem.toFileString() + " does not exist"); } else { lZip.addFile(lRealFile, lItemPathString); } } // Zip the xml file lZip.zip(xmlZip, lXmlRoot); lZip.clear(); // Zip the repository // pick the files from lRepos downward through the fragment File lStart = new File( (lReposRoot + File.separator + lBuildNumber + File.separator + lPlatform + File.separator + lBuild) .replaceAll("\\\\+", "\\\\")); String[] lFragmentBits = lSuiteFragment.split("\\."); File[] lFiles = null; for (int i = 0; i < lFragmentBits.length; i++) { lStart = new File(lStart, lFragmentBits[i]); if (i == lFragmentBits.length - 1) { lFiles = listFilesAsArray(lStart, null, true); // the whole // tree } else { lFiles = listFilesAsArray(lStart, null, false); // just files } for (int j = 0; j < lFiles.length; j++) { if (lFiles[j].isFile()) { if (lPlatsec) { if (lFiles[j].getName().toLowerCase().endsWith(".pkg") || lFiles[j].getName().toLowerCase().endsWith(".sis")) { lZip.addFile(lFiles[j]); } } else { if (!lFiles[j].getName().toLowerCase().endsWith(".sis") && !lFiles[j].getName().toLowerCase().endsWith(".pkg")) { lZip.addFile(lFiles[j]); } } } } } lZip.zip(reposZip, lReposRoot); lZip.clear(); // Zip dependencies final String lEpoc32DataZSystemData = (lEpocRoot + File.separator + com.symbian.driver.core.environment.ILiterals.EPOC32 + File.separator + com.symbian.driver.core.environment.ILiterals.DATA + File.separator + "z" + File.separator + com.symbian.driver.core.environment.ILiterals.SYSTEM + File.separator + com.symbian.driver.core.environment.ILiterals.DATA + File.separator).replaceAll("\\\\+", "\\\\"); // add testexecute dependencies. if (!Epoc.isTargetEmulator(lPlatform)) { String[] tefFiles = null; Map<String, String> tefOptFiles = null; final String lEpoc32Tools = EPOC32 + File.separator + "tools" + File.separator; if (lTestexec) { try { String[] lTefFiles = TDConfig.getInstance().getTEFDependencies(); //add optional files into the array //<file,condition> pair in lTefOptFiles, get set of keys/files tefOptFiles = TDConfig.getInstance().getTEFOptionalDependencies(); Set<String> lTefOptFilesSet = tefOptFiles.keySet(); String[] lTefOptFiles = lTefOptFilesSet.toArray(new String[tefOptFiles.size()]); tefFiles = new String[lTefFiles.length + lTefOptFiles.length]; System.arraycopy(lTefFiles, 0, tefFiles, 0, lTefFiles.length); System.arraycopy(lTefOptFiles, 0, tefFiles, lTefFiles.length, lTefOptFiles.length); } catch (ParseException lParseException) { LOGGER.log(Level.SEVERE, lParseException.getMessage(), lParseException); } for (int i = 0; i < tefFiles.length; i++) { if (tefFiles[i].endsWith(".ini")) { lZip.addFile(new File(lEpoc32DataZSystemData, tefFiles[i])); } else { File lFile = new File(lDep, tefFiles[i]); if (lFile.exists()) { lZip.addFile(lFile); } else { LOGGER.warning("File does not exist : " + lFile); } } } } if (lPlatsec && Epoc.is9x(lBuildNumber)) { lZip.addFile(new File(lEpocRoot, lEpoc32Tools + "makesis.exe")); lZip.addFile(new File(lEpocRoot, lEpoc32Tools + "signsis.exe")); } lZip.addFile(new File(lEpocRoot, EPOC32 + File.separator + "data" + File.separator + "buildinfo.txt")); LOGGER.fine("Generating " + depZip.getName()); lZip.zip(depZip, lEpocRoot); lZip.clear(); // add stat.dll, symbianUsb.dll, buildinfo.txt, stat.ini, // makesis.exe and signsis.exe: // the following files should be installed with TD2 lZip.addFile(new File(lEpoc32DataZSystemData, "stat.ini")); lZip.addFile(new File(lEpocRoot, lEpoc32Tools + "stat" + File.separator + "stat.dll")); if (Epoc.is92plus(lBuildNumber)) { lZip.addFile(new File(lEpocRoot, lEpoc32Tools + "stat" + File.separator + "SymbianUsb.dll")); } LOGGER.fine("Generating " + StatZip.getName()); lZip.zip(StatZip, lEpocRoot); lZip.clear(); } // generate manifest file: Properties pr = new Properties(); String header = "TestDriver Package file v2.0"; pr.setProperty("suite", lSuiteFragment); pr.setProperty("xmldriver", lXml.getName()); pr.setProperty("platform", lPlatform); pr.setProperty("build", lBuild); pr.setProperty("buildNumber", lBuildNumber); pr.setProperty("kernel", lKernel); try { FileOutputStream mf = new FileOutputStream(manifest); pr.store(mf, header); mf.close(); } catch (IOException lE) { LOGGER.log(Level.SEVERE, "Error while generating manifest", lE); } // zip everything together if (xmlZip.exists()) { lZip.addFile(xmlZip); } if (reposZip.exists()) { lZip.addFile(reposZip); } if (depZip.exists()) { lZip.addFile(depZip); } if (manifest.exists()) { lZip.addFile(manifest); } if (StatZip.exists()) { lZip.addFile(StatZip); } File f = null; if (aTestPackage == null) { // No test package name defined before (first run) String lSuiteEnd = lSuiteFragment; if (lSuiteFragment.indexOf(".") != -1) { String[] lPaths = lSuiteFragment.split("\\."); lSuiteEnd = lPaths[lPaths.length - 1]; } f = new File((lReposRoot + File.separator + lSuiteEnd + ".tpkg").replaceAll("\\\\+", "\\\\")); } else { f = aTestPackage; } lZip.zip(f, ""); if (f.exists()) { LOGGER.info("Generated test package file v2.0: " + f.getAbsolutePath()); } else { LOGGER.severe("Error generating test package file v2.0: " + f.getName()); } if (!xmlZip.delete() || !reposZip.delete() || !manifest.delete() || !depZip.delete() || !StatZip.delete()) { LOGGER.fine("Could not delete Zip files"); } }
From source file:com.fimagena.filepicker.FilePickerFragment.java
@Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setHasOptionsMenu(true);/* w w w . j a v a 2s . co m*/ setRetainInstance(true); File mnt = new File("/storage"); if (!mnt.exists()) mnt = new File("/mnt"); mFileSystemRoots = mnt.listFiles(new FileFilter() { @Override public boolean accept(File f) { try { File canon = (f.getParent() == null) ? f : new File(f.getParentFile().getCanonicalFile(), f.getName()); boolean isSymlink = !canon.getCanonicalFile().equals(canon.getAbsoluteFile()); return f.isDirectory() && f.exists() && f.canWrite() && !f.isHidden() && !isSymlink; } catch (Exception e) { return false; } } }); mAdapter = new FileItemAdapter(this, mParams, mStartPath); }
From source file:ctd.services.getCleanData2.java
public String cleanData() { String message = ""; String timestamp = new java.util.Date().getTime() + ""; try {//from www . j a v a 2 s .com CleanDataResult result = new CleanDataResult(); String error_message = ""; //get parameters. ResourceBundle res = ResourceBundle.getBundle("settings"); ResourceBundle cdf_list = ResourceBundle.getBundle("cdf"); //Base directory ftp folder: Here the temporary subfolders are found for each set of CEL-files, and the final assaytoken-based folder. String ftp_folder = res.getString("ws.upload_folder"); String rscript_cleandata = res.getString("ws.rscript_cleandata"); String rscript = res.getString("ws.rscript"); //db String db_username = res.getString("db.username"); String db_password = res.getString("db.password"); String db_database = res.getString("db.database"); //retrieve the information on the assignment from the database SessionFactory sessionFactory = new Configuration().configure().buildSessionFactory(); Session session = sessionFactory.openSession(); Transaction tr = session.beginTransaction(); Query q = session.createQuery( "from Ticket where password='" + getPassword() + "' AND ctd_REF='" + getCTD_REF() + "'"); Ticket ticket = null; String closed = ""; if (q.list().size() != 0) { ticket = (Ticket) q.list().get(0); closed = ticket.getClosed(); } if (ticket == null) { error_message = "Ticket password and CTD_REF don't match."; } if (closed.equals("yes")) { error_message = "Ticket is already used for normalization of these CEL-files."; ticket = null; } if (ticket != null) { //get the folder String folder = ticket.getFolder(); String zip_folder = ftp_folder + folder; //get contents File dir = new File(zip_folder); //find the zip file. File[] files = dir.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.isFile(); } }); String cel_zip_file = ""; String zip_file = ""; String gct_file = ""; for (int i = 0; i < files.length; i++) { String file = files[i].getName(); if (file.contains("zip")) { // Add the timestamp to the zip files[i].renameTo(new File(zip_folder + "/" + timestamp + "_zip.zip")); file = timestamp + "_zip.zip"; cel_zip_file = file; zip_file = zip_folder + "/" + cel_zip_file; gct_file = zip_folder + "/" + timestamp + "_gctfile"; } } Process p3 = Runtime.getRuntime().exec("chmod 777 " + zip_file); ////////////////////////////////////////////////////////////////// //Do a system call to normalize. R. (zip_folder zip_file gct_file rscript) String args = rscript + " --verbose --vanilla " + rscript_cleandata + " -i" + zip_file + " -o" + gct_file + " -w" + zip_folder; Logger.getLogger(getTicket.class.getName()).log(Level.INFO, timestamp + ": Running: " + args); Process p = Runtime.getRuntime().exec(args); // Check if CEL files are unzipped allready // This is done by checking every 5 seconds for the existence of a .chip file // This is a bad way of doing this, in future versions of CTD // the output of the R scripts should be parsed boolean do_loop = true; while (do_loop) { File dir2 = new File(zip_folder); String[] files2 = dir2.list(); //Check if CEL files are allready there for (int i = 0; i < files2.length; i++) { String file = files2[i]; if (file.endsWith("chip")) { do_loop = false; try { Thread.sleep(5000); } catch (InterruptedException ex) { Logger.getLogger(getCleanData.class.getName()).log(Level.SEVERE, null, timestamp + ": " + ex); } } } } Logger.getLogger(getTicket.class.getName()).log(Level.INFO, timestamp + ": rscript has finished."); File dir2 = new File(zip_folder); String[] files2 = dir2.list(); String chip_file = ""; String chip_file_db = ""; ArrayList<String> unziped_files = new ArrayList<String>(); for (int i = 0; i < files2.length; i++) { String file = files2[i]; if (file.endsWith("CEL")) { unziped_files.add(file); } if (file.endsWith("chip")) { chip_file = file; chip_file_db = chip_file.split("_CDF_")[1]; File fileFile = new File(chip_file); fileFile.renameTo(new File(zip_folder + "/" + chip_file_db)); //Making the file correspond to the database entry. Duplicates can be safely overwritten, and will be. } } //Check if all CEL files are derived from the same chip. //This is essential for normalization. //initiate check hashmap. This map contains all the unique chip definition file names. There should be only one per analysis. ArrayList<StudySampleAssay> map = new ArrayList<StudySampleAssay>(); for (int i = 0; i < unziped_files.size(); i++) { String cel_file = unziped_files.get(i); StudySampleAssay ssa = new StudySampleAssay(); // Open the file that is the first // command line parameter //String cel_file_path = zip_folder + "/" + cel_file; String name = cel_file; ssa.setNameRawfile(name); ssa.setXREF(getCTD_REF()); map.add(ssa); } ticket.getStudySampleAssaies().addAll(map); session.saveOrUpdate(ticket); session.persist(ticket); tr.commit(); session.close(); //Storage chip definition file (CDF), creation gct file and database storage. SessionFactory sessionFactory1 = new Configuration().configure().buildSessionFactory(); Session session1 = sessionFactory1.openSession(); //check if cdf (chip definition file) is allready stored, if not, store it. List<ChipAnnotation> chip_annotation = null; Query q2 = session1.createQuery("from Chip Where Name='" + chip_file_db + "'"); if (q2.uniqueResult() != null) { Chip chip = (Chip) q2.list().get(0); chip_annotation = chip.getChipAnnotation(); } if (q2.uniqueResult() == null) { //Add this chip and its annotation Chip chip_new = new Chip(); chip_new.setName(chip_file_db); //read chip file String chip_file_path = zip_folder + "/" + chip_file; chip_annotation = readChip(chip_file_path); //Store the whole chip_new.getChipAnnotation().addAll(chip_annotation); Transaction tr1 = session1.beginTransaction(); session1.save(chip_new); session1.persist(chip_new); tr1.commit(); session1.close(); } //create the temp file for storage of the data_insert file. String data_file = zip_folder + "/expression.txt"; FileOutputStream out = null; PrintStream pr = null; out = new FileOutputStream(data_file); pr = new PrintStream(out); //create array data input file for the database table, find correct foreign keys. //get the study_sample_assay id and the probeset ids. SessionFactory sessionFactory2 = new Configuration().configure().buildSessionFactory(); Session session2 = sessionFactory2.openSession(); //Get the cip_annotation_id Query q3 = session2.createQuery("from Chip Where Name='" + chip_file_db + "'"); Chip chip = (Chip) q3.list().get(0); chip_annotation = chip.getChipAnnotation(); Iterator it2 = chip_annotation.iterator(); //for speed, put the chip annotation id in a hashmap HashMap<String, String> chip_annotation_ids = new HashMap<String, String>(); while (it2.hasNext()) { ChipAnnotation ca = (ChipAnnotation) it2.next(); String id = ca.getId().toString(); String ps = ca.getProbeset(); chip_annotation_ids.put(ps, id); } //Create the .gct-files try { Query qt = session2.createQuery("from Ticket where password='" + getPassword() + "' AND ctd_REF='" + getCTD_REF() + "'"); ticket = null; if (qt.list().size() != 0) { ticket = (Ticket) qt.list().get(0); } Iterator it3 = ticket.getStudySampleAssaies().iterator(); while (it3.hasNext()) { StudySampleAssay ssa = (StudySampleAssay) it3.next(); String name_raw_file = ssa.getNameRawfile(); String sampleToken = getSampletokens().get(name_raw_file); String ssa_id = ssa.getId().toString(); error_message = error_message + name_raw_file; String gct_file_generated = gct_file + ".gct"; ArrayList<Double> values = writeFile(pr, chip_annotation_ids, ssa_id, gct_file_generated, name_raw_file.replaceAll(".CEL", "")); Statistics stat = new Statistics(); stat.setData(values); Double average = stat.getAverage(); Double std = stat.getSTD(); ssa.setXREF(getCTD_REF()); ssa.setAverage(average); ssa.setStudyToken(getStudytoken()); ssa.setSampleToken(sampleToken); ssa.setStd(std); } } catch (IOException e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2: " + e.getMessage() + " " + e.getLocalizedMessage()); } pr.close(); out.close(); //update ticket Transaction tr2 = session2.beginTransaction(); session2.update(ticket); session2.persist(ticket); tr2.commit(); session2.close(); //import the data into the database String u = "--user=" + db_username; String passw = "--password=" + db_password; String[] commands = new String[] { "mysqlimport", u, passw, "--local", db_database, data_file }; Process p4 = Runtime.getRuntime().exec(commands); message = message + " RMA and GRSN on the CEL-files is done, data is stored."; //close the ticket when finished, normalization can only be performed once by the client. CloseTicket(); //Remove zip and data file (expression.txt) File fileFolderOld = new File(zip_folder); File fileFolderDest = new File(res.getString("ws.upload_folder") + getCTD_REF()); File[] listOfFiles = fileFolderOld.listFiles(); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].getPath().toLowerCase().endsWith(".zip") || listOfFiles[i].getPath().toLowerCase().endsWith("expression.txt")) { try { listOfFiles[i].delete(); } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2 (try to delete): " + e.toString()); } } else { try { FileUtils.copyFileToDirectory(listOfFiles[i], fileFolderDest, false); listOfFiles[i].delete(); } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2 (try to copy): " + e.toString()); } } } // Remove temporary folder try { fileFolderOld.delete(); } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2: " + e.toString()); } // -------------------------------------------- // This piece of code is added in order to cleanup all the files // of aborted upload procedures. It checks for these old folders // (more than a day old and a temporaty name (which is just a number // from 1 upwards. It is assumed that a temporary folder has a // name shorter than 10 chars) and removes these files and folders File folderData = new File(res.getString("ws.upload_folder")); long lngTimestamp = new java.util.Date().getTime(); listOfFiles = folderData.listFiles(); for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].lastModified() < (lngTimestamp - 10000) && listOfFiles[i].getName().length() < 10) { // This folder is more than a day old // We know it is a temporary folder because the name is less than 10 chars long File[] lstDelete = listOfFiles[i].listFiles(); for (int j = 0; j < lstDelete.length; j++) { // Delete all content of the old folder lstDelete[j].delete(); } // Delete the old folder if (!listOfFiles[i].delete()) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, "delSample(): Folder deletion failed: " + listOfFiles[i].getName()); } } } // -------------------------------------------- } // set the messages of the response result.setErrorMessage(error_message); result.setMessage(message); // Use SKARINGA in order to create the JSON response ObjectTransformer trans = null; try { trans = ObjectTransformerFactory.getInstance().getImplementation(); message = trans.serializeToString(result); } catch (NoImplementationException ex) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, "SKARINGA ERROR IN getCleanData2: " + ex.getLocalizedMessage()); } } catch (Exception e) { Logger.getLogger(getTicket.class.getName()).log(Level.SEVERE, timestamp + ": ERROR IN getCleanData2: " + e.toString()); } return message; }
From source file:net.timewalker.ffmq4.storage.data.impl.BlockBasedDataStoreTools.java
/** * Find existing journal files for a given base name * @param baseName//from ww w . j a v a2s. com * @param dataFolder * @return an array of journal files */ public static File[] findJournalFiles(String baseName, File dataFolder) { final String journalBase = baseName + JournalFile.SUFFIX; File[] journalFiles = dataFolder.listFiles(new FileFilter() { /* * (non-Javadoc) * @see java.io.FileFilter#accept(java.io.File) */ @Override public boolean accept(File pathname) { if (!pathname.isFile()) return false; return pathname.getName().startsWith(journalBase) && !pathname.getName().endsWith(JournalFile.RECYCLED_SUFFIX); } }); // Sort them in ascending order Arrays.sort(journalFiles, new Comparator<File>() { @Override public int compare(File f1, File f2) { return f1.getName().compareTo(f2.getName()); } }); return journalFiles; }
From source file:ml.shifu.shifu.core.processor.ManageModelProcessor.java
/** * switch to different model/*from w w w.j a va 2s . c om*/ * * @param modelName * @throws IOException */ private void switchModel(String modelName) throws IOException { //get current branch String currentModelName = null; try { currentModelName = getCurrentModelName(); } catch (IOException e) { log.info("Could not get the current model name"); currentModelName = "master"; } //log.info("The current model will backup to {} folder", currentModelName); //first, backup to currentModelName saveModel(currentModelName); //is it new ? File thisModel = new File(Constants.BACKUPNAME + File.separator + modelName); if (!thisModel.exists()) { //no exist } else { //exist //copy files File modelFile = new File(String.format("%s/%s/ModelConfig.json", Constants.BACKUPNAME, modelName)); File columnFile = new File(String.format("%s/%s/ModelConfig.json", Constants.BACKUPNAME, modelName)); File workspace = new File("./"); try { FileUtils.copyFileToDirectory(modelFile, workspace); if (columnFile.exists()) { FileUtils.copyFileToDirectory(columnFile, workspace); } } catch (IOException e) { //TODO e.printStackTrace(); } //copy models File sourceModelFolder = new File(String.format("./%s/%s/models/", Constants.BACKUPNAME, modelName)); File workspaceFolder = new File("./models"); if (sourceModelFolder.isDirectory()) { File[] files = sourceModelFolder.listFiles(new FileFilter() { @Override public boolean accept(File file) { return file.isFile() && file.getName().startsWith("model"); } }); if (files != null) { for (File model : files) { try { FileUtils.copyFileToDirectory(model, workspaceFolder); } catch (IOException e) { log.info("Fail to copy models file"); } } } else { throw new IOException( String.format("Failed to list files in %s", sourceModelFolder.getAbsolutePath())); } } else { log.error("{} does not exist or is not a directory!", sourceModelFolder.getAbsoluteFile()); } } File file = new File("./.HEAD"); BufferedWriter writer = null; try { FileUtils.forceDelete(file); writer = new BufferedWriter( new OutputStreamWriter(new FileOutputStream(file), Constants.DEFAULT_CHARSET)); writer.write(modelName); } catch (IOException e) { log.info("Fail to rewrite HEAD file"); } finally { if (writer != null) { writer.close(); } } log.info("Switch model: {} successfully", modelName); }
From source file:org.apache.carbondata.sdk.file.CSVNonTransactionalCarbonWriterTest.java
/** * Invoke CarbonWriter API to write carbon files and assert the file is rewritten * @param rows number of rows to write//from www. j ava 2 s . c om * @param schema schema of the file * @param path local write path * @param sortColumns sort columns * @param persistSchema true if want to persist schema file * @param blockletSize blockletSize in the file, -1 for default size * @param blockSize blockSize in the file, -1 for default size */ private void writeFilesAndVerify(int rows, Schema schema, String path, String[] sortColumns, boolean persistSchema, int blockletSize, int blockSize) { try { CarbonWriterBuilder builder = CarbonWriter.builder().isTransactionalTable(false) .uniqueIdentifier(System.currentTimeMillis()).taskNo(System.nanoTime()).outputPath(path); if (sortColumns != null) { builder = builder.sortBy(sortColumns); } if (persistSchema) { builder = builder.persistSchemaFile(true); } if (blockletSize != -1) { builder = builder.withBlockletSize(blockletSize); } if (blockSize != -1) { builder = builder.withBlockSize(blockSize); } CarbonWriter writer = builder.buildWriterForCSVInput(schema); for (int i = 0; i < rows; i++) { writer.write( new String[] { "robot" + (i % 10), String.valueOf(i), String.valueOf((double) i / 2) }); } writer.close(); } catch (IOException e) { e.printStackTrace(); Assert.fail(e.getMessage()); } catch (InvalidLoadOptionException l) { l.printStackTrace(); Assert.fail(l.getMessage()); } File segmentFolder = new File(path); Assert.assertTrue(segmentFolder.exists()); File[] dataFiles = segmentFolder.listFiles(new FileFilter() { @Override public boolean accept(File pathname) { return pathname.getName().endsWith(CarbonCommonConstants.FACT_FILE_EXT); } }); Assert.assertNotNull(dataFiles); Assert.assertTrue(dataFiles.length > 0); }
From source file:uk.org.openeyes.diagnostics.AbstractFieldProcessor.java
/** * /*from www. j a v a 2 s. c o m*/ */ protected void checkDir() { // get file list - all XML files File[] files = this.dir.listFiles(new FileFilter() { public boolean accept(File pathname) { return pathname.getName().toLowerCase().endsWith(".xml"); } }); for (File file : files) { System.out.println("processing " + file.getName()); this.processFile(file); } }
From source file:com.googlecode.fascinator.portal.services.impl.PortalManagerImpl.java
private void loadPortals() { File[] portalDirs = portalsDir.listFiles(new FileFilter() { @Override/* w ww.j a v a 2 s .co m*/ public boolean accept(File file) { String name = file.getName(); return file.isDirectory() && !name.equals(".svn"); } }); for (File dir : portalDirs) { loadPortal(dir.getName()); } }
From source file:com.snp.site.init.SystemInit.java
public static File[] get_video_filelist(File file) { FileFilter fileFilter = new FileFilter() { public boolean accept(File file) { // ?? String name = file.getName().toLowerCase(); // ??? String[] format = StringUtils.split(video_support_format, "|"); for (int i = 0; i < format.length; i++) { if (name.endsWith(format[i])) { return true; }//from w w w. j a v a2 s .co m } return false; } }; return file.listFiles(fileFilter); }