List of usage examples for java.io File isDirectory
public boolean isDirectory()
From source file:org.waarp.common.filemonitor.FileMonitor.java
public static void main(String[] args) { if (args.length < 3) { System.err.println("Need a statusfile, a stopfile and a directory to test"); return;// ww w . ja v a 2 s. c o m } File file = new File(args[0]); if (file.exists() && !file.isFile()) { System.err.println("Not a correct status file"); return; } File stopfile = new File(args[1]); if (file.exists() && !file.isFile()) { System.err.println("Not a correct stop file"); return; } File dir = new File(args[2]); if (!dir.isDirectory()) { System.err.println("Not a directory"); return; } FileMonitorCommandRunnableFuture filemonitor = new FileMonitorCommandRunnableFuture() { public void run(FileItem file) { System.out.println("File New: " + file.file.getAbsolutePath()); finalize(true, 0); } }; FileMonitor monitor = new FileMonitor("test", file, stopfile, dir, null, 0, new RegexFileFilter(RegexFileFilter.REGEX_XML_EXTENSION), false, filemonitor, new FileMonitorCommandRunnableFuture() { public void run(FileItem file) { System.err.println("File Del: " + file.file.getAbsolutePath()); } }, new FileMonitorCommandRunnableFuture() { public void run(FileItem unused) { System.err.println("Check done"); } }); filemonitor.setMonitor(monitor); monitor.start(); monitor.waitForStopFile(); }
From source file:com.act.lcms.db.analysis.PathwayProductAnalysis.java
public static void main(String[] args) throws Exception { Options opts = new Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//from w w w.j a v a 2s. c om } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } File lcmsDir = new File(cl.getOptionValue(OPTION_DIRECTORY)); if (!lcmsDir.isDirectory()) { System.err.format("File at %s is not a directory\n", lcmsDir.getAbsolutePath()); HELP_FORMATTER.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } Double fontScale = null; if (cl.hasOption("font-scale")) { try { fontScale = Double.parseDouble(cl.getOptionValue("font-scale")); } catch (IllegalArgumentException e) { System.err.format("Argument for font-scale must be a floating point number.\n"); System.exit(1); } } try (DB db = DB.openDBFromCLI(cl)) { Set<Integer> takeSamplesFromPlateIds = null; if (cl.hasOption(OPTION_FILTER_BY_PLATE_BARCODE)) { String[] plateBarcodes = cl.getOptionValues(OPTION_FILTER_BY_PLATE_BARCODE); System.out.format("Considering only sample wells in plates: %s\n", StringUtils.join(plateBarcodes, ", ")); takeSamplesFromPlateIds = new HashSet<>(plateBarcodes.length); for (String plateBarcode : plateBarcodes) { Plate p = Plate.getPlateByBarcode(db, plateBarcode); if (p == null) { System.err.format("WARNING: unable to find plate in DB with barcode %s\n", plateBarcode); } else { takeSamplesFromPlateIds.add(p.getId()); } } // Allow filtering on barcode even if we couldn't find any in the DB. } System.out.format("Loading/updating LCMS scan files into DB\n"); ScanFile.insertOrUpdateScanFilesInDirectory(db, lcmsDir); System.out.format("Processing LCMS scans\n"); Pair<List<LCMSWell>, Set<Integer>> positiveWellsAndPlateIds = Utils.extractWellsAndPlateIds(db, cl.getOptionValues(OPTION_STRAINS), cl.getOptionValues(OPTION_CONSTRUCT), takeSamplesFromPlateIds, false); List<LCMSWell> positiveWells = positiveWellsAndPlateIds.getLeft(); if (positiveWells.size() == 0) { throw new RuntimeException("Found no LCMS wells for specified strains/constructs"); } // Only take negative samples from the plates where we found the positive samples. Pair<List<LCMSWell>, Set<Integer>> negativeWellsAndPlateIds = Utils.extractWellsAndPlateIds(db, cl.getOptionValues(OPTION_NEGATIVE_STRAINS), cl.getOptionValues(OPTION_NEGATIVE_CONSTRUCTS), positiveWellsAndPlateIds.getRight(), true); List<LCMSWell> negativeWells = negativeWellsAndPlateIds.getLeft(); if (negativeWells == null || negativeWells.size() == 0) { System.err.format("WARNING: no valid negative samples found in same plates as positive samples\n"); } // Extract the chemicals in the pathway and their product masses, then look up info on those chemicals List<Pair<ChemicalAssociatedWithPathway, Double>> productMasses = Utils .extractMassesForChemicalsAssociatedWithConstruct(db, cl.getOptionValue(OPTION_CONSTRUCT)); List<Pair<String, Double>> searchMZs = new ArrayList<>(productMasses.size()); List<ChemicalAssociatedWithPathway> pathwayChems = new ArrayList<>(productMasses.size()); for (Pair<ChemicalAssociatedWithPathway, Double> productMass : productMasses) { String chemName = productMass.getLeft().getChemical(); searchMZs.add(Pair.of(chemName, productMass.getRight())); pathwayChems.add(productMass.getLeft()); } System.out.format("Searching for intermediate/side-reaction products:\n"); for (Pair<String, Double> searchMZ : searchMZs) { System.out.format(" %s: %.3f\n", searchMZ.getLeft(), searchMZ.getRight()); } // Look up the standard by name. List<StandardWell> standardWells = new ArrayList<>(); if (cl.hasOption(OPTION_STANDARD_WELLS)) { Plate standardPlate = Plate.getPlateByBarcode(db, cl.getOptionValue(OPTION_STANDARD_PLATE_BARCODE)); Map<Integer, StandardWell> pathwayIdToStandardWell = extractStandardWellsFromOptionsList(db, pathwayChems, cl.getOptionValues(OPTION_STANDARD_WELLS), standardPlate); for (ChemicalAssociatedWithPathway c : pathwayChems) { // TODO: we can avoid this loop. StandardWell well = pathwayIdToStandardWell.get(c.getId()); if (well != null) { standardWells.add(well); } } } else { for (ChemicalAssociatedWithPathway c : pathwayChems) { String standardName = c.getChemical(); System.out.format("Searching for well containing standard %s\n", standardName); List<StandardWell> wells = StandardIonAnalysis.getStandardWellsForChemical(db, c.getChemical()); if (wells != null) { standardWells.addAll(wells); } } } boolean useFineGrainedMZ = cl.hasOption("fine-grained-mz"); boolean useSNR = cl.hasOption(OPTION_USE_SNR); /* Process the standard, positive, and negative wells, producing ScanData containers that will allow them to be * iterated over for graph writing. We do not need to specify granular includeIons and excludeIons since * this would not take advantage of our caching strategy which uses a list of metlin ions as an index. */ HashMap<Integer, Plate> plateCache = new HashMap<>(); Pair<List<ScanData<StandardWell>>, Double> allStandardScans = AnalysisHelper.processScans(db, lcmsDir, searchMZs, ScanData.KIND.STANDARD, plateCache, standardWells, useFineGrainedMZ, EMPTY_SET, EMPTY_SET, useSNR); Pair<List<ScanData<LCMSWell>>, Double> allPositiveScans = AnalysisHelper.processScans(db, lcmsDir, searchMZs, ScanData.KIND.POS_SAMPLE, plateCache, positiveWells, useFineGrainedMZ, EMPTY_SET, EMPTY_SET, useSNR); Pair<List<ScanData<LCMSWell>>, Double> allNegativeScans = AnalysisHelper.processScans(db, lcmsDir, searchMZs, ScanData.KIND.NEG_CONTROL, plateCache, negativeWells, useFineGrainedMZ, EMPTY_SET, EMPTY_SET, useSNR); String fmt = "pdf"; String outImg = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + "." + fmt; String outData = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + ".data"; String outAnalysis = cl.getOptionValue(OPTION_OUTPUT_PREFIX) + ".tsv"; System.err.format("Writing combined scan data to %s and graphs to %s\n", outData, outImg); String plottingDirectory = cl.getOptionValue(OPTION_PLOTTING_DIR); List<ScanData<LCMSWell>> posNegWells = new ArrayList<>(); posNegWells.addAll(allPositiveScans.getLeft()); posNegWells.addAll(allNegativeScans.getLeft()); Map<Integer, String> searchIons; if (cl.hasOption(OPTION_PATHWAY_SEARCH_IONS)) { searchIons = extractPathwayStepIons(pathwayChems, cl.getOptionValues(OPTION_PATHWAY_SEARCH_IONS), cl.getOptionValue(OPTION_SEARCH_ION, "M+H")); /* This is pretty lazy, but works with the existing API. Extract all selected ions for all search masses when * performing the scan, then filter down to the desired ions for the plot at the end. * TODO: specify the masses and scans per sample rather than batching everything together. It might be slower, * but it'll be clearer to read. */ } else { // We need to make sure that the standard metlin ion we choose is consistent with the ion modes of // the given positive, negative and standard scan files. For example, we should not pick a negative // metlin ion if all our available positive control scan files are in the positive ion mode. Map<Integer, Pair<Boolean, Boolean>> ionModes = new HashMap<>(); for (ChemicalAssociatedWithPathway chemical : pathwayChems) { boolean isPositiveScanPresent = false; boolean isNegativeScanPresent = false; for (ScanData<StandardWell> scan : allStandardScans.getLeft()) { if (chemical.getChemical().equals(scan.getWell().getChemical()) && chemical.getChemical().equals(scan.getTargetChemicalName())) { if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.POS) { isPositiveScanPresent = true; } if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.NEG) { isNegativeScanPresent = true; } } } for (ScanData<LCMSWell> scan : posNegWells) { if (chemical.getChemical().equals(scan.getWell().getChemical()) && chemical.getChemical().equals(scan.getTargetChemicalName())) { if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.POS) { isPositiveScanPresent = true; } if (MS1.IonMode.valueOf( scan.getScanFile().getMode().toString().toUpperCase()) == MS1.IonMode.NEG) { isNegativeScanPresent = true; } } } ionModes.put(chemical.getId(), Pair.of(isPositiveScanPresent, isNegativeScanPresent)); } // Sort in descending order of media where MeOH and Water related media are promoted to the top and // anything derived from yeast media are demoted. We do this because we want to first process the water // and meoh media before processing the yeast media since the yeast media depends on the analysis of the former. Collections.sort(standardWells, new Comparator<StandardWell>() { @Override public int compare(StandardWell o1, StandardWell o2) { if (StandardWell.doesMediaContainYeastExtract(o1.getMedia()) && !StandardWell.doesMediaContainYeastExtract(o2.getMedia())) { return 1; } else { return 0; } } }); searchIons = extractPathwayStepIonsFromStandardIonAnalysis(pathwayChems, lcmsDir, db, standardWells, plottingDirectory, ionModes); } produceLCMSPathwayHeatmaps(lcmsDir, outData, outImg, outAnalysis, pathwayChems, allStandardScans, allPositiveScans, allNegativeScans, fontScale, cl.hasOption(OPTION_USE_HEATMAP), searchIons); } }
From source file:de.prozesskraft.pkraft.Clone.java
public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException { /*---------------------------- get options from ini-file/*from w w w . j a va 2s . co m*/ ----------------------------*/ java.io.File inifile = new java.io.File( WhereAmI.getInstallDirectoryAbsolutePath(Clone.class) + "/" + "../etc/pkraft-clone.ini"); if (inifile.exists()) { try { ini = new Ini(inifile); } catch (InvalidFileFormatException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else { System.err.println("ini file does not exist: " + inifile.getAbsolutePath()); System.exit(1); } /*---------------------------- create boolean options ----------------------------*/ Option ohelp = new Option("help", "print this message"); Option ov = new Option("v", "prints version and build-date"); /*---------------------------- create argument options ----------------------------*/ Option oinstance = OptionBuilder.withArgName("File").hasArg() .withDescription("[mandatory] process you want to clone.") // .isRequired() .create("instance"); Option obasedir = OptionBuilder.withArgName("DIR").hasArg().withDescription( "[optional, default: <basedirOfInstance>] base directory you want to place the root directory of the clone. this directory must exist at call time.") // .isRequired() .create("basedir"); /*---------------------------- create options object ----------------------------*/ Options options = new Options(); options.addOption(ohelp); options.addOption(ov); options.addOption(oinstance); options.addOption(obasedir); /*---------------------------- create the parser ----------------------------*/ CommandLineParser parser = new GnuParser(); // parse the command line arguments commandline = parser.parse(options, args); /*---------------------------- usage/help ----------------------------*/ if (commandline.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("clone", options); System.exit(0); } if (commandline.hasOption("v")) { System.out.println("author: alexander.vogel@prozesskraft.de"); System.out.println("version: [% version %]"); System.out.println("date: [% date %]"); System.exit(0); } /*---------------------------- ueberpruefen ob eine schlechte kombination von parametern angegeben wurde ----------------------------*/ if (!(commandline.hasOption("instance"))) { System.err.println("option -instance is mandatory"); exiter(); } /*---------------------------- die lizenz ueberpruefen und ggf abbrechen ----------------------------*/ // check for valid license ArrayList<String> allPortAtHost = new ArrayList<String>(); allPortAtHost.add(ini.get("license-server", "license-server-1")); allPortAtHost.add(ini.get("license-server", "license-server-2")); allPortAtHost.add(ini.get("license-server", "license-server-3")); MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1"); // lizenz-logging ausgeben for (String actLine : (ArrayList<String>) lic.getLog()) { System.err.println(actLine); } // abbruch, wenn lizenz nicht valide if (!lic.isValid()) { System.exit(1); } /*---------------------------- die eigentliche business logic ----------------------------*/ String pathToInstance = commandline.getOptionValue("instance"); java.io.File fileInstance = new java.io.File(pathToInstance); java.io.File fileBaseDir = null; // wenn es nicht vorhanden ist, dann mit fehlermeldung abbrechen if (!fileInstance.exists()) { System.err.println("instance file does not exist."); exiter(); } // testen ob eventuell vorhandene angaben basedir if (commandline.hasOption("basedir")) { fileBaseDir = new java.io.File(commandline.getOptionValue("basedir")); if (!fileBaseDir.exists()) { System.err.println("error: -basedir: directory does not exist"); exiter(); } if (!fileBaseDir.isDirectory()) { System.err.println("error: -basedir: is not a directory"); exiter(); } } // den main-prozess trotzdem nochmal einlesen um subprozesse extrahieren zu koennen Process p1 = new Process(); p1.setInfilebinary(pathToInstance); Process process = p1.readBinary(); // directories setzen, falls angegeben if (fileBaseDir != null) { process.setBaseDir(fileBaseDir.getCanonicalPath()); } // den main-prozess ueber die static function klonen Process clonedProcess = cloneProcess(process, null); // alle steps durchgehen und falls subprocesses existieren auch fuer diese ein cloning durchfuehren for (Step actStep : process.getStep()) { if (actStep.getSubprocess() != null) { Process pDummy = new Process(); pDummy.setInfilebinary(actStep.getAbsdir() + "/process.pmb"); Process processInSubprocess = pDummy.readBinary(); // System.err.println("info: reading process freshly from file: " + actStep.getAbsdir() + "/process.pmb"); if (processInSubprocess != null) { cloneProcess(processInSubprocess, clonedProcess); } } } }
From source file:DIA_Umpire_Quant.DIA_Umpire_LCMSIDGen.java
/** * @param args the command line arguments *///from ww w . j ava 2 s . c om public static void main(String[] args) throws FileNotFoundException, IOException, Exception { System.out.println( "================================================================================================="); System.out.println("DIA-Umpire LCMSID geneartor (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length != 1) { System.out.println( "command format error, the correct format should be: java -jar -Xmx10G DIA_Umpire_LCMSIDGen.jar diaumpire_module.params"); return; } try { ConsoleLogger.SetConsoleLogger(Level.INFO); ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_lcmsidgen.log"); } catch (Exception e) { } Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + args[0]); BufferedReader reader = new BufferedReader(new FileReader(args[0])); String line = ""; String WorkFolder = ""; int NoCPUs = 2; TandemParam tandemPara = new TandemParam(DBSearchParam.SearchInstrumentType.TOF5600); HashMap<String, File> AssignFiles = new HashMap<>(); //<editor-fold defaultstate="collapsed" desc="Reading parameter file"> while ((line = reader.readLine()) != null) { line = line.trim(); Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==File list begin")) { do { line = reader.readLine(); line = line.trim(); if (line.equals("==File list end")) { continue; } else if (!"".equals(line)) { File newfile = new File(line); if (newfile.exists()) { AssignFiles.put(newfile.getAbsolutePath(), newfile); } else { Logger.getRootLogger().info("File: " + newfile + " does not exist."); } } } while (!line.equals("==File list end")); } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); String value = line.split("=")[1].trim(); switch (type) { case "Path": { WorkFolder = value; break; } case "path": { WorkFolder = value; break; } case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "DecoyPrefix": { if (!"".equals(value)) { tandemPara.DecoyPrefix = value; } break; } case "PeptideFDR": { tandemPara.PepFDR = Float.parseFloat(value); break; } } } } //</editor-fold> //Initialize PTM manager using compomics library PTMManager.GetInstance(); //Generate DIA file list ArrayList<DIAPack> FileList = new ArrayList<>(); File folder = new File(WorkFolder); if (!folder.exists()) { Logger.getRootLogger().info("The path : " + WorkFolder + " cannot be found."); System.exit(1); } for (final File fileEntry : folder.listFiles()) { if (fileEntry.isFile() && (fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry.getAbsolutePath(), fileEntry); } if (fileEntry.isDirectory()) { for (final File fileEntry2 : fileEntry.listFiles()) { if (fileEntry2.isFile() && (fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzxml") | fileEntry2.getAbsolutePath().toLowerCase().endsWith(".mzml")) && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q1.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q2.mzxml") && !fileEntry2.getAbsolutePath().toLowerCase().endsWith("q3.mzxml")) { AssignFiles.put(fileEntry2.getAbsolutePath(), fileEntry2); } } } } Logger.getRootLogger().info("No. of files assigned :" + AssignFiles.size()); for (File fileEntry : AssignFiles.values()) { Logger.getRootLogger().info(fileEntry.getAbsolutePath()); } //process each DIA file to genearate untargeted identifications for (File fileEntry : AssignFiles.values()) { String mzXMLFile = fileEntry.getAbsolutePath(); if (mzXMLFile.toLowerCase().endsWith(".mzxml") | mzXMLFile.toLowerCase().endsWith(".mzml")) { long time = System.currentTimeMillis(); DIAPack DiaFile = new DIAPack(mzXMLFile, NoCPUs); FileList.add(DiaFile); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + mzXMLFile); if (!DiaFile.LoadDIASetting()) { Logger.getRootLogger().info("Loading DIA setting failed, job is incomplete"); System.exit(1); } if (!DiaFile.LoadParams()) { Logger.getRootLogger().info("Loading parameters failed, job is incomplete"); System.exit(1); } Logger.getRootLogger().info("Loading identification results " + mzXMLFile + "...."); DiaFile.ParsePepXML(tandemPara, null); DiaFile.BuildStructure(); if (!DiaFile.MS1FeatureMap.ReadPeakCluster()) { Logger.getRootLogger().info("Loading peak and structure failed, job is incomplete"); System.exit(1); } DiaFile.MS1FeatureMap.ClearMonoisotopicPeakOfCluster(); //Generate mapping between index of precursor feature and pseudo MS/MS scan index DiaFile.GenerateClusterScanNomapping(); //Doing quantification DiaFile.AssignQuant(); DiaFile.ClearStructure(); DiaFile.IDsummary.ReduceMemoryUsage(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(mzXMLFile + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } Logger.getRootLogger().info("Job done"); Logger.getRootLogger().info( "================================================================================================="); } }
From source file:edu.harvard.hul.ois.drs.pdfaconvert.PdfaConvert.java
public static void main(String[] args) throws IOException { if (logger == null) { System.out.println("About to initialize Log4j"); logger = LogManager.getLogger(); System.out.println("Finished initializing Log4j"); }/*from w ww . j av a 2 s . c o m*/ logger.debug("Entering main()"); // WIP: the following command line code was pulled from FITS Options options = new Options(); Option inputFileOption = new Option(PARAM_I, true, "input file"); options.addOption(inputFileOption); options.addOption(PARAM_V, false, "print version information"); options.addOption(PARAM_H, false, "help information"); options.addOption(PARAM_O, true, "output sub-directory"); CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args, true); } catch (ParseException e) { System.err.println(e.getMessage()); System.exit(1); } // print version info if (cmd.hasOption(PARAM_V)) { if (StringUtils.isEmpty(applicationVersion)) { applicationVersion = "<not set>"; System.exit(1); } System.out.println("Version: " + applicationVersion); System.exit(0); } // print help info if (cmd.hasOption(PARAM_H)) { displayHelp(); System.exit(0); } // input parameter if (cmd.hasOption(PARAM_I)) { String input = cmd.getOptionValue(PARAM_I); boolean hasValue = cmd.hasOption(PARAM_I); logger.debug("Has option {} value: [{}]", PARAM_I, hasValue); String paramVal = cmd.getOptionValue(PARAM_I); logger.debug("value of option: [{}] ****", paramVal); File inputFile = new File(input); if (!inputFile.exists()) { logger.warn("{} does not exist or is not readable.", input); System.exit(1); } String subDir = cmd.getOptionValue(PARAM_O); PdfaConvert convert; if (!StringUtils.isEmpty(subDir)) { convert = new PdfaConvert(subDir); } else { convert = new PdfaConvert(); } if (inputFile.isDirectory()) { if (inputFile.listFiles() == null || inputFile.listFiles().length < 1) { logger.warn("Input directory is empty, nothing to process."); System.exit(1); } else { logger.debug("Have directory: [{}] with file count: {}", inputFile.getAbsolutePath(), inputFile.listFiles().length); DirectoryStream<Path> dirStream = null; dirStream = Files.newDirectoryStream(inputFile.toPath()); for (Path filePath : dirStream) { logger.debug("Have file name: {}", filePath.toString()); // Note: only handling files, not recursively going into sub-directories if (filePath.toFile().isFile()) { // Catch possible exception for each file so can handle other files in directory. try { convert.examine(filePath.toFile()); } catch (Exception e) { logger.error("Problem processing file: {} -- Error message: {}", filePath.getFileName(), e.getMessage()); } } else { logger.warn("Not a file so not processing: {}", filePath.toString()); // could be a directory but not recursing } } dirStream.close(); } } else { logger.debug("About to process file: {}", inputFile.getPath()); try { convert.examine(inputFile); } catch (Exception e) { logger.error("Problem processing file: {} -- Error message: {}", inputFile.getName(), e.getMessage()); logger.debug("Problem processing file: {} -- Error message: {}", inputFile.getName(), e.getMessage(), e); } } } else { System.err.println("Missing required option: " + PARAM_I); displayHelp(); System.exit(-1); } System.exit(0); }
From source file:edu.wpi.khufnagle.webimagemanager.WebImageManager.java
/** * Defines information for the lighthouses, then runs the * photograph-collection process./*from w ww . j a v a 2s. c om*/ * @param args Command-line arguments for this program (not used in this * implementation) */ // Auto-boxing done "on the fly" to show progress of downloading images @SuppressWarnings("boxing") public static void main(final String[] args) { final long startTime = System.nanoTime(); System.out.println("***BEGIN PHOTO TRANSFER PROCESS***"); // Add data for lighthouses (next 375 lines or so) final List<LighthouseInfo> lighthouseData = new ArrayList<LighthouseInfo>(); /* * lighthouseData.add(new LighthouseInfo("Statue of Liberty", 40.689348, * -74.044726)); */// Statue of Liberty = 2080 photos w/out restrictions lighthouseData.add(new LighthouseInfo("Portland Head Light", 43.623104, -70.207867)); lighthouseData.add(new LighthouseInfo("Pemaquid Point Light", 43.836970, -69.505997)); lighthouseData.add(new LighthouseInfo("Five Mile Point (New Haven Harbor) Light", 41.248958, -72.903766)); lighthouseData.add(new LighthouseInfo("Cape Neddick (Nubble) Light", 43.165211, -70.591102)); lighthouseData.add(new LighthouseInfo("Portland Breakwater Light", 43.655516, -70.234813)); lighthouseData.add(new LighthouseInfo("Beavertail Light", 41.449368, -71.399372)); lighthouseData.add(new LighthouseInfo("Bass Harbor Head Light", 44.221976, -68.337214)); lighthouseData.add(new LighthouseInfo("Nobska Point Light", 41.515792, -70.655116)); lighthouseData.add(new LighthouseInfo("Spring Point Ledge Light", 43.652108, -70.223922)); lighthouseData.add(new LighthouseInfo("Gay Head Light", 41.348450, -70.834956)); lighthouseData.add(new LighthouseInfo("Derby Wharf Light", 42.516566, -70.883536)); lighthouseData.add(new LighthouseInfo("Rockland Breakwater Light", 44.104006, -69.077453)); lighthouseData.add(new LighthouseInfo("Sandy Neck Light", 41.722647, -70.280927)); lighthouseData.add(new LighthouseInfo("Marblehead Light", 42.505411, -70.833708)); lighthouseData.add(new LighthouseInfo("Portsmouth Harbor Light", 43.071061, -70.708634)); lighthouseData.add(new LighthouseInfo("Highland Light", 42.039122, -70.062025)); lighthouseData.add(new LighthouseInfo("Cape Elizabeth Light", 43.566058, -70.200042)); lighthouseData.add(new LighthouseInfo("Marshall Point Light", 43.917406, -69.261222)); lighthouseData.add(new LighthouseInfo("Chatham Light", 41.671407, -69.949884)); lighthouseData.add(new LighthouseInfo("Block Island Southeast Light", 41.153412, -71.552117)); lighthouseData.add(new LighthouseInfo("Edgartown Light", 41.390863, -70.503057)); lighthouseData.add(new LighthouseInfo("Watch Hill Light", 41.303884, -71.858575)); lighthouseData.add(new LighthouseInfo("Nauset Light", 41.858305, -69.951631)); lighthouseData .add(new LighthouseInfo("Fayerweather Island (Black Rock Harbor) Light", 41.142380, -73.217409)); lighthouseData.add(new LighthouseInfo("Owls Head Light", 44.092138, -69.044105)); lighthouseData.add(new LighthouseInfo("Point Judith Light", 41.361035, -71.481402)); lighthouseData.add(new LighthouseInfo("Sankaty Head Light", 41.284379, -69.966244)); lighthouseData.add(new LighthouseInfo("Eastern Point Light", 42.580229, -70.664537)); lighthouseData.add(new LighthouseInfo("Fort Pickering Light", 42.526473, -70.866465)); lighthouseData.add(new LighthouseInfo("Wood Island Light", 43.456788, -70.328976)); lighthouseData.add(new LighthouseInfo("Stonington Harbor Light", 41.328780, -71.905486)); lighthouseData.add(new LighthouseInfo("West Quoddy Head Light", 44.815073, -66.950742)); lighthouseData.add(new LighthouseInfo("Fort Point Light", 44.467265, -68.811717)); lighthouseData.add(new LighthouseInfo("Annisquam Light", 42.661874, -70.681488)); lighthouseData.add(new LighthouseInfo("Newport Harbor Light", 41.493299, -71.327038)); lighthouseData.add(new LighthouseInfo("Long Point Light", 42.033117, -70.168651)); lighthouseData.add(new LighthouseInfo("Castle Hill Light", 41.462116, -71.362919)); lighthouseData.add(new LighthouseInfo("Brant Point Light", 41.289918, -70.090287)); lighthouseData.add(new LighthouseInfo("Stratford Point Light", 41.151984, -73.103276)); lighthouseData.add(new LighthouseInfo("Boston Light", 42.327925, -70.890101)); lighthouseData.add(new LighthouseInfo("Lynde Point Light", 41.271452, -72.343142)); lighthouseData.add(new LighthouseInfo("Scituate Light", 42.204748, -70.715814)); lighthouseData.add(new LighthouseInfo("Prospect Harbor Light", 44.403285, -68.012922)); lighthouseData.add(new LighthouseInfo("Wood End Light", 42.021223, -70.193502)); lighthouseData.add(new LighthouseInfo("Rose Island Light", 41.495477, -71.342742)); lighthouseData.add(new LighthouseInfo("Saybrook Breakwater Light", 41.263158, -72.342813)); lighthouseData.add(new LighthouseInfo("Great Point Light", 41.390096, -70.048234)); lighthouseData.add(new LighthouseInfo("Cape Poge Light", 41.418798, -70.451923)); lighthouseData.add(new LighthouseInfo("Monhegan Light", 43.764779, -69.316204)); lighthouseData.add(new LighthouseInfo("Hendricks Head Light", 43.822589, -69.689761)); lighthouseData.add(new LighthouseInfo("Egg Rock Light", 44.354050, -68.138166)); lighthouseData.add(new LighthouseInfo("New London Ledge Light", 41.305826, -72.077448)); lighthouseData.add(new LighthouseInfo("Avery Point Lighthouse", 41.315245, -72.063579)); lighthouseData.add(new LighthouseInfo("Palmers Island Light", 41.626936, -70.909109)); lighthouseData.add(new LighthouseInfo("Cuckolds Light", 43.779663, -69.649982)); lighthouseData.add(new LighthouseInfo("Gull Rocks Light", 41.502451, -71.333140)); lighthouseData.add(new LighthouseInfo("Goat Island Light", 43.357826, -70.425109)); lighthouseData.add(new LighthouseInfo("East Chop Light", 41.470245, -70.567439)); lighthouseData.add(new LighthouseInfo("Neds Point Light", 41.650859, -70.795638)); lighthouseData.add(new LighthouseInfo("Sakonnet Point Light", 41.453090, -71.202382)); lighthouseData.add(new LighthouseInfo("Narrows (Bug) Light", 42.323137, -70.919158)); lighthouseData.add(new LighthouseInfo("Plum Island Light", 42.815119, -70.818981)); lighthouseData.add(new LighthouseInfo("Block Island North Light", 41.227639, -71.575811)); lighthouseData.add(new LighthouseInfo("Mount Desert Rock Light", 43.968582, -68.128306)); lighthouseData.add(new LighthouseInfo("Duxbury Pier Light", 41.987375, -70.648498)); lighthouseData.add(new LighthouseInfo("Long Island Head Light", 42.330197, -70.957712)); lighthouseData.add(new LighthouseInfo("Prudence Island Light", 41.605881, -71.303535)); lighthouseData.add(new LighthouseInfo("Plum Beach Light", 41.530248, -71.405202)); lighthouseData.add(new LighthouseInfo("Doubling Point Light", 43.882503, -69.806792)); lighthouseData.add(new LighthouseInfo("Dice Head Light", 44.382732, -68.819022)); lighthouseData.add(new LighthouseInfo("Ram Island Ledge Light", 43.631457, -70.187366)); lighthouseData.add(new LighthouseInfo("New London Harbor Light", 41.316619, -72.089743)); lighthouseData.add(new LighthouseInfo("Lime Rock Light", 41.477536, -71.325924)); lighthouseData.add(new LighthouseInfo("Ten Pound Island Light", 42.601865, -70.665556)); lighthouseData.add(new LighthouseInfo("Bristol Ferry Light", 41.642842, -71.260319)); lighthouseData.add(new LighthouseInfo("Musselbed Shoals Light", 41.636261, -71.259958)); lighthouseData.add(new LighthouseInfo("Conimicut Light", 41.716969, -71.345106)); lighthouseData.add(new LighthouseInfo("Tongue Point Light", 41.166590, -73.177497)); lighthouseData.add(new LighthouseInfo("Bass River Light", 41.651746, -70.169473)); lighthouseData.add(new LighthouseInfo("Hospital Point Light", 42.546413, -70.856164)); lighthouseData.add(new LighthouseInfo("Newburyport Range Light", 42.811524, -70.864838)); lighthouseData.add(new LighthouseInfo("Dutch Island Light", 41.496702, -71.404299)); lighthouseData.add(new LighthouseInfo("Heron Neck Light", 44.025216, -68.861966)); lighthouseData.add(new LighthouseInfo("Pumpkin Island Light", 44.309166, -68.742876)); lighthouseData.add(new LighthouseInfo("Whaleback Light", 43.058744, -70.696306)); lighthouseData.add(new LighthouseInfo("Hyannis Harbor Light", 41.636267, -70.288439)); lighthouseData.add(new LighthouseInfo("Stage Harbor Light", 41.658692, -69.983689)); lighthouseData.add(new LighthouseInfo("Lovells Island Range Light", 42.332440, -70.930214)); lighthouseData.add(new LighthouseInfo("Hog Island Shoal Light", 41.632338, -71.273198)); lighthouseData.add(new LighthouseInfo("Ram Island Light", 43.803935, -69.599349)); lighthouseData.add(new LighthouseInfo("Bridgeport Harbor Light", 41.156718, -73.179950)); lighthouseData.add(new LighthouseInfo("Straitsmouth Island Light", 42.662236, -70.588157)); lighthouseData.add(new LighthouseInfo("Squirrel Point Light", 43.816520, -69.802402)); lighthouseData.add(new LighthouseInfo("Mayos Beach Light", 41.930755, -70.032097)); lighthouseData.add(new LighthouseInfo("Race Point Light", 42.062314, -70.243084)); lighthouseData.add(new LighthouseInfo("Point Gammon Light", 41.609647, -70.266196)); lighthouseData.add(new LighthouseInfo("Wings Neck Light", 41.680235, -70.661250)); lighthouseData.add(new LighthouseInfo("West Chop Light", 41.480806, -70.599796)); lighthouseData.add(new LighthouseInfo("Bird Island Light", 41.669295, -70.717341)); lighthouseData.add(new LighthouseInfo("Clarks Point Light", 41.593176, -70.901416)); lighthouseData.add(new LighthouseInfo("Thacher Island Light", 42.639168, -70.574759)); lighthouseData.add(new LighthouseInfo("White Island Light", 42.967228, -70.623249)); lighthouseData.add(new LighthouseInfo("Wickford Harbor Light", 41.572618, -71.436831)); lighthouseData.add(new LighthouseInfo("Whale Rock Light", 41.444597, -71.423584)); lighthouseData.add(new LighthouseInfo("Burnt Island Light", 43.825133, -69.640262)); lighthouseData.add(new LighthouseInfo("Rockland Harbor Southwest Light", 44.082720, -69.096310)); lighthouseData.add(new LighthouseInfo("Saddleback Ledge Light", 44.014232, -68.726461)); lighthouseData.add(new LighthouseInfo("Grindle Point Light", 44.281451, -68.942967)); lighthouseData.add(new LighthouseInfo("Winter Harbor Light", 44.361421, -68.087742)); lighthouseData.add(new LighthouseInfo("Peck's Ledge Light", 41.077298, -73.369811)); lighthouseData.add(new LighthouseInfo("Sheffield Island Light", 41.048251, -73.419931)); lighthouseData.add(new LighthouseInfo("Whitlocks Mill Light", 45.162793, -67.227395)); lighthouseData.add(new LighthouseInfo("Boon Island Light", 43.121183, -70.475845)); lighthouseData.add(new LighthouseInfo("Southwest Ledge Light", 41.234443, -72.912092)); lighthouseData.add(new LighthouseInfo("Broad Sound Channel Inner Range Light", 42.326933, -70.984649)); lighthouseData.add(new LighthouseInfo("Spectacle Island Light", 42.326898, -70.984772)); lighthouseData.add(new LighthouseInfo("Deer Island Light", 42.339836, -70.954525)); lighthouseData.add(new LighthouseInfo("Nayatt Point Light", 41.725120, -71.338926)); lighthouseData.add(new LighthouseInfo("Doubling Point Range Lights", 43.882860, -69.795652)); lighthouseData.add(new LighthouseInfo("Burkehaven Light", 43.371669, -72.065869)); lighthouseData.add(new LighthouseInfo("Loon Island Light", 43.392123, -72.059977)); lighthouseData.add(new LighthouseInfo("Curtis Island Light", 44.201372, -69.048865)); lighthouseData.add(new LighthouseInfo("Butler Flats Light", 41.603775, -70.894556)); lighthouseData.add(new LighthouseInfo("Graves Light", 42.365098, -70.869191)); lighthouseData.add(new LighthouseInfo("Stamford Harbor Light", 41.013643, -73.542577)); lighthouseData.add(new LighthouseInfo("Billingsgate Light", 41.871624, -70.068982)); lighthouseData.add(new LighthouseInfo("Monomoy Point Light", 41.559310, -69.993650)); lighthouseData.add(new LighthouseInfo("Bishop & Clerks Light", 41.574154, -70.249963)); lighthouseData.add(new LighthouseInfo("Plymouth Light", 42.003737, -70.600565)); lighthouseData.add(new LighthouseInfo("Cleveland Ledge Light", 41.630927, -70.694201)); lighthouseData.add(new LighthouseInfo("Tarpaulin Cove Light", 41.468822, -70.757514)); lighthouseData.add(new LighthouseInfo("Minots Ledge Light", 42.269678, -70.759136)); lighthouseData.add(new LighthouseInfo("Dumpling Rock Light", 41.538167, -70.921427)); lighthouseData.add(new LighthouseInfo("Bakers Island Light", 42.536470, -70.785995)); lighthouseData.add(new LighthouseInfo("Cuttyhunk Light", 41.414391, -70.949558)); lighthouseData.add(new LighthouseInfo("Egg Rock Light", 42.433346, -70.897386)); lighthouseData.add(new LighthouseInfo("Ipswich Range Light", 42.685360, -70.766128)); lighthouseData.add(new LighthouseInfo("Borden Flats Light", 41.704450, -71.174395)); lighthouseData.add(new LighthouseInfo("Bullocks Point Light", 41.737740, -71.364179)); lighthouseData.add(new LighthouseInfo("Pomham Rocks Light", 41.777618, -71.369594)); lighthouseData.add(new LighthouseInfo("Sabin Point Light", 41.762010, -71.374234)); lighthouseData.add(new LighthouseInfo("Fuller Rock Light", 41.794055, -71.379720)); lighthouseData.add(new LighthouseInfo("Gould Island Light", 41.537826, -71.344804)); lighthouseData.add(new LighthouseInfo("Warwick Light", 41.667111, -71.378413)); lighthouseData.add(new LighthouseInfo("Sassafras Point Light", 41.802496, -71.390272)); lighthouseData.add(new LighthouseInfo("Conanicut Light", 41.573484, -71.371767)); lighthouseData.add(new LighthouseInfo("Poplar Point Light", 41.571053, -71.439189)); lighthouseData.add(new LighthouseInfo("Halfway Rock Light", 43.655873, -70.037402)); lighthouseData.add(new LighthouseInfo("Seguin Island Light", 43.707554, -69.758118)); lighthouseData.add(new LighthouseInfo("Pond Island Light", 43.740031, -69.770273)); lighthouseData.add(new LighthouseInfo("Perkins Island Light", 43.786764, -69.785256)); lighthouseData.add(new LighthouseInfo("Latimer Reef Light", 41.304503, -71.933292)); lighthouseData.add(new LighthouseInfo("Morgan Point Light", 41.316669, -71.989327)); lighthouseData.add(new LighthouseInfo("Franklin Island Light", 43.892184, -69.374842)); lighthouseData.add(new LighthouseInfo("Matinicus Rock Light", 43.783605, -68.854898)); lighthouseData.add(new LighthouseInfo("Tenants Harbor Light", 43.961107, -69.184877)); lighthouseData.add(new LighthouseInfo("Whitehead Light", 43.978706, -69.124285)); lighthouseData.add(new LighthouseInfo("Two Bush Island Light", 43.964239, -69.073942)); lighthouseData.add(new LighthouseInfo("Indian Island Light", 44.165470, -69.061004)); lighthouseData.add(new LighthouseInfo("Browns Head Light", 44.111774, -68.909482)); lighthouseData.add(new LighthouseInfo("Goose Rocks Light", 44.135394, -68.830526)); lighthouseData.add(new LighthouseInfo("Sperry Light", 41.221221, -72.423110)); lighthouseData.add(new LighthouseInfo("Isle au Haut Light", 44.064733, -68.651339)); lighthouseData.add(new LighthouseInfo("Deer Island Thorofare Light", 44.134338, -68.703202)); lighthouseData.add(new LighthouseInfo("Herrick Cove Light", 43.411136, -72.041706)); lighthouseData.add(new LighthouseInfo("Eagle Island Light", 44.217634, -68.767743)); lighthouseData.add(new LighthouseInfo("Burnt Coat Harbor Light", 44.134176, -68.447258)); lighthouseData.add(new LighthouseInfo("Faulkner's Island Light", 41.211612, -72.655088)); lighthouseData.add(new LighthouseInfo("Blue Hill Bay Light", 44.248746, -68.497880)); lighthouseData.add(new LighthouseInfo("Great Duck Island Light", 44.142193, -68.245836)); lighthouseData.add(new LighthouseInfo("Bear Island Light", 44.283485, -68.269858)); lighthouseData.add(new LighthouseInfo("Baker Island Light", 44.241266, -68.198923)); lighthouseData.add(new LighthouseInfo("Crabtree Ledge Light", 44.475613, -68.199383)); lighthouseData.add(new LighthouseInfo("Statford Shoal Light", 41.059557, -73.101394)); lighthouseData.add(new LighthouseInfo("Petit Manan Light", 44.367574, -67.864129)); lighthouseData.add(new LighthouseInfo("Penfield Reef Light", 41.117101, -73.222070)); lighthouseData.add(new LighthouseInfo("Narraguagus Light", 44.462467, -67.837844)); lighthouseData.add(new LighthouseInfo("Nash Island Light", 44.464305, -67.747299)); lighthouseData.add(new LighthouseInfo("Moose Peak Light", 44.474244, -67.533471)); lighthouseData.add(new LighthouseInfo("Green's Ledge Light", 41.041551, -73.443974)); lighthouseData.add(new LighthouseInfo("Libby Island Light", 44.568236, -67.367339)); lighthouseData.add(new LighthouseInfo("Great Captain Island Light", 40.982478, -73.623706)); lighthouseData.add(new LighthouseInfo("Avery Rock Light", 44.654358, -67.344137)); lighthouseData.add(new LighthouseInfo("Little River Light", 44.650873, -67.192325)); lighthouseData.add(new LighthouseInfo("Lubec Channel Light", 44.841955, -66.976731)); lighthouseData.add(new LighthouseInfo("St. Croix River Light", 45.128762, -67.133594)); /* * "Clean out" photo directories before beginning photo transfer process. */ final File photosDir = new File("photos"); final File[] photoLighthouseDirsToDelete = photosDir.listFiles(); if (photoLighthouseDirsToDelete != null) { for (final File photoLighthouseDir : photoLighthouseDirsToDelete) { // Use Apache Commons IO (again) to recursively delete the directory // and all of the files within it if (photoLighthouseDir.exists() && photoLighthouseDir.isDirectory()) { try { FileUtils.deleteDirectory(photoLighthouseDir); System.out.println("Deleted directory \"" + photoLighthouseDir + "\" successfully."); } catch (final IOException ioe) { System.err.println( "Could not delete directory: \"" + photoLighthouseDir + "\" successfully!"); } } } } // Keep track of elapsed time long estimatedTime = System.nanoTime() - startTime; String elapsedTime = WebImageManager.calculateElapsedTime(estimatedTime); System.out.println("Estimated elapsed time: " + elapsedTime + "."); System.out.println(); /* * Keep track of total number of photographs transferred from Flickr * websites to disks across _all_ lighthouses */ int totalNumPhotosTransferred = 0; /* * Keep track of total number of photographs that _should_ be transferred * from Flickr for _all_ lighthouses */ int totalNumPhotos = 0; for (final LighthouseInfo lighthousePieceOfData : lighthouseData) { System.out.println("Processing photos of " + lighthousePieceOfData.getName() + "..."); /* * URL for accessing Flickr APIs. For a given lighthouse, this URL * provides an XML file in response that lists information about every * geotagged, Creative Commons-enabled photograph for that lighthouse * on Flickr. */ // GET Parameter Explanation: // method - Use the "search photos" method for the Flickr APIs // // api_key - A unique key that I use to get the results // // text - Find all lighthouses whose title, tags, or description // contains the word "lighthouse" // // license - Find all photos with a Creative Commons license _except_ // those that do not allow for modification on my part // // content_type - Find photos only (no videos) // // has_geo - Implicitly set to true; implies that all photos are // geotagged // // lat - The latitude of the center of the "search circle" // // lon - The longitude of the center of the "search circle" // // radius - The radius of the "search circle," in _kilometers_ (NOT // miles) // // extras - Also include a URL to the "raw" photo (small version) final String inputURLText = "http://ycpi.api.flickr.com/services/rest/?" + "method=flickr.photos.search" + "&api_key=3ea8366b020383eb91f170c6f41748f5" + "&text=lighthouse" + "&license=1,2,4,5,7" + "&content_type=1" + "&has_geo" + "&lat=" + lighthousePieceOfData.getLatitude() + "&lon=" + lighthousePieceOfData.getLongitude() + "&radius=1" + "&extras=url_s"; // Output file where XML web response will be stored temporarily final String outputFileName = "output.xml"; /* * Convert the name of the lighthouse to a "computer friendly" version * with all lower-case letters and underscores replacing spaces, * apostrophes, and parenthesis */ String lighthouseName = lighthousePieceOfData.getName(); lighthouseName = lighthouseName.toLowerCase(); lighthouseName = lighthouseName.replace(' ', '_'); lighthouseName = lighthouseName.replace('\'', '_'); lighthouseName = lighthouseName.replace('(', '_'); lighthouseName = lighthouseName.replace(')', '_'); // Will contain the textual links to each "raw" photo website Set<String> rawPhotoURLs = new HashSet<String>(); // Make sure file for XML output does not exist at first // (don't want to use an old, incorrect version accidentally) final File outputXMLFile = new File(outputFileName); if (outputXMLFile.exists()) { outputXMLFile.delete(); } System.out.println("Cleaned output XML file containing photo URLs on disk successfully."); /* * Access the list of photographs for a given lighthouse and copy them * to the XML file on disk */ final WebDataExtractor extractor = new WebDataExtractor(inputURLText, outputFileName); System.out.println("Looking for XML file containing lighthosue photo information..."); extractor.transferURLToFile(); System.out.println("Found XML file containing lighthouse photo URLs."); /* * Object for extracting the "raw" URLs from each piece of photo data * in the XML file */ final XMLParser parser = new FlickrXMLOutputParser(outputFileName); // Complete the extraction process rawPhotoURLs = parser.parseFile("//photo/@url_s"); final int numPhotos = rawPhotoURLs.size(); totalNumPhotos += numPhotos; int i = 0; // Counter for keeping track of progress /* * Keep track of photos transferred successfully (which might be less * than the total number of photos defined int the XML output from * Flickr, especially if connection issues occur */ int numPhotosTransferred = 0; for (final String photoURL : rawPhotoURLs) { System.out.print("Transferring photos..."); i++; /* * Go to a website containing a "raw" JPEG image file and save it * accordingly on disk in the photo folder corresponding to the * lighthouse name */ final WebDataExtractor rawPhotoExtractor = new WebDataExtractor(photoURL, "photos/" + lighthouseName + "/lighthouse_photo_" + Integer.toString(i) + ".jpg"); final boolean transferSuccessful = rawPhotoExtractor.transferURLToFile(); if (transferSuccessful) { numPhotosTransferred++; } // Simple progress tracker System.out.printf("%d of %d (%.1f%%) complete.\n", i, numPhotos, i * 1.0 / numPhotos * 100.0); } // Indicate number of photos successfully transferred to disk if (numPhotosTransferred == numPhotos && numPhotos > 0) { System.out.println("All photos transferred to disk successfully!"); } else if (numPhotos == 0) { System.out.println("It appears there are no photos available for this lighthouse..."); } else if (numPhotosTransferred == 1 && numPhotos > 1) { System.out.println("1 photo transferred to disk successfully."); } else if (numPhotosTransferred == 1 && numPhotos == 1) { System.out.println("The photo transferred to disk successfully!"); } else { System.out.println(numPhotosTransferred + " photos transferred to disk successfully."); } // Keep track of elapsed time estimatedTime = System.nanoTime() - startTime; elapsedTime = WebImageManager.calculateElapsedTime(estimatedTime); System.out.println("Estimated elapsed time: " + elapsedTime + "."); // Add extra line in between lighthouses in output stream System.out.println(); /* * Keep track of total number of photos transferred so far across * _all_lighthouses */ totalNumPhotosTransferred += numPhotosTransferred; } // Display "grand" total (which is hopefully greater than 0) System.out.println("***GRAND TOTAL: " + totalNumPhotosTransferred + " OF " + totalNumPhotos + " PHOTOS TRANSFERRED SUCCESSFULLY***"); estimatedTime = System.nanoTime() - startTime; elapsedTime = WebImageManager.calculateElapsedTime(estimatedTime); System.out.println("TOTAL ELAPSED TIME: " + elapsedTime.toUpperCase()); }
From source file:net.iiit.siel.analysis.lang.LanguageIdentifier.java
/** * The main method.// ww w .j a v a 2s .com * * @param args the arguments */ public static void main(String args[]) { String usage = "Usage: LanguageIdentifier " + "[-identifyrows filename maxlines] " + "[-identifyfile charset filename] " + "[-identifyfileset charset files] " + "[-identifytext text] " + "[-identifyurl url]"; int command = 0; final int IDFILE = 1; final int IDTEXT = 2; final int IDURL = 3; final int IDFILESET = 4; final int IDROWS = 5; Vector fileset = new Vector(); String filename = ""; String charset = ""; String url = ""; String text = ""; int max = 0; // TODO niket writing test args here.. /* args = new String[2]; args[0] = "-identifyurl"; args[1] = "file:/home1/niket/TamilSamplePage.html"; //args[2] = "/home1/niket/nutch-clia/input.txt"; */ // TODO niket end here if (args.length == 0) { System.err.println(usage); System.exit(-1); } for (int i = 0; i < args.length; i++) { // parse command line if (args[i].equals("-identifyfile")) { command = IDFILE; charset = args[++i]; filename = args[++i]; } if (args[i].equals("-identifyurl")) { command = IDURL; filename = args[++i]; } if (args[i].equals("-identifyrows")) { command = IDROWS; filename = args[++i]; max = Integer.parseInt(args[++i]); } if (args[i].equals("-identifytext")) { command = IDTEXT; for (i++; i < args.length - 1; i++) text += args[i] + " "; } if (args[i].equals("-identifyfileset")) { command = IDFILESET; charset = args[++i]; for (i++; i < args.length; i++) { File[] files = null; File f = new File(args[i]); if (f.isDirectory()) { files = f.listFiles(); } else { files = new File[] { f }; } for (int j = 0; j < files.length; j++) { fileset.add(files[j].getAbsolutePath()); } } } } Configuration conf = NutchConfiguration.create(); String lang = null; LanguageIdentifier idfr = new LanguageIdentifier(conf); File f; FileInputStream fis; try { switch (command) { case IDTEXT: lang = idfr.identify(text); System.out.println("Lang :" + lang); break; case IDFILE: f = new File(filename); fis = new FileInputStream(f); lang = idfr.identify(fis, charset); fis.close(); break; case IDURL: lang = LangIdentifierUtility.IdentifyLangFromURLDirectly(filename); /* * our url identifier is confused or couldn't identify lang from * URL */ if (lang == null || lang.equalsIgnoreCase("en")) { System.out.println("Ambuguity in identifying language from URL"); } else { System.out.println("Lang was identified(using URL) as: " + lang); } break; case IDROWS: f = new File(filename); BufferedReader br = new BufferedReader(new InputStreamReader(new FileInputStream(f))); String line; while (max > 0 && (line = br.readLine()) != null) { line = line.trim(); if (line.length() > 2) { max--; lang = idfr.identify(line); System.out.println("R=" + lang + ":" + line); } } br.close(); System.exit(0); break; case IDFILESET: /* * used for benchs for (int j=128; j<=524288; j*=2) { long start * = System.currentTimeMillis(); idfr.analyzeLength = j; */ System.out.println("FILESET"); Iterator i = fileset.iterator(); while (i.hasNext()) { try { filename = (String) i.next(); f = new File(filename); fis = new FileInputStream(f); lang = idfr.identify(fis, charset); fis.close(); } catch (Exception e) { System.out.println(e); } System.out.println(filename + " was identified as " + lang); } /* * used for benchs System.out.println(j + "/" + * (System.currentTimeMillis()-start)); } */ System.exit(0); break; } } catch (Exception e) { System.out.println(e); System.out.println("lang could not be identified properly"); e.printStackTrace(); } System.out.println("text was identified as " + lang); /* * DONOT delete the next few lines, they should be enabled, when a lang. * mapping map needs to be generated. TODO this is for printing * the hashMapRangeLangIDTable only * * idfr.langMarkerObject.printHashmapTableWithFormatting(); * * System.out * .println("\n\n\n Printing english text contents in this file:\n"); * System.out.println(idfr.langMarkerObject.getLangCharacters( * LanguageIdentifierConstants.LangShortNames.ENGLISH * .langShortName()).toString()); * * System.out * .println("\n\n\n Printing telugu text contents in this file:\n"); * System.out.println(idfr.langMarkerObject.getLangCharacters( * LanguageIdentifierConstants.LangShortNames.TELUGU * .langShortName()).toString()); * * System.out * .println("\n\n\n Printing unknown text contents in this file:\n"); * System.out.println(idfr.langMarkerObject.getLangCharacters( * LanguageIdentifierConstants.LangShortNames.UNKNOWN_LANG * .langShortName()).toString()); */ }
From source file:com.adobe.aem.demomachine.gui.AemDemo.java
public static void main(String[] args) { String demoMachineRootFolder = null; // Command line options for this tool Options options = new Options(); options.addOption("f", true, "Path to Demo Machine root folder"); CommandLineParser parser = new BasicParser(); try {/*from w w w .j av a2s . c om*/ CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("f")) { demoMachineRootFolder = cmd.getOptionValue("f"); } } catch (ParseException ex) { logger.error(ex.getMessage()); } // Let's grab the version number for the core Maven file String mavenFilePath = (demoMachineRootFolder != null ? demoMachineRootFolder : System.getProperty("user.dir")) + File.separator + "java" + File.separator + "core" + File.separator + "pom.xml"; File mavenFile = new File(mavenFilePath); if (mavenFile.exists() && !mavenFile.isDirectory()) { try { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); DocumentBuilder builder = factory.newDocumentBuilder(); Document document; document = builder.parse(mavenFile); NodeList list = document.getElementsByTagName("version"); if (list != null && list.getLength() > 0) { aemDemoMachineVersion = list.item(0).getFirstChild().getNodeValue(); } } catch (Exception e) { logger.error("Can't parse Maven pom.xml file"); } } // Let's check if we have a valid build.xml file to work with... String buildFilePath = (demoMachineRootFolder != null ? demoMachineRootFolder : System.getProperty("user.dir")) + File.separator + "build.xml"; logger.debug("Trying to load build file from " + buildFilePath); buildFile = new File(buildFilePath); if (buildFile.exists() && !buildFile.isDirectory()) { // Launching the main window EventQueue.invokeLater(new Runnable() { public void run() { try { UIManager.getLookAndFeelDefaults().put("defaultFont", new Font("Arial", Font.BOLD, 14)); AemDemo window = new AemDemo(); window.frameMain.setVisible(true); } catch (Exception e) { e.printStackTrace(); } } }); } else { logger.error("No valid build.xml file to work with"); System.exit(-1); } }
From source file:act.installer.reachablesexplorer.FreemarkerRenderer.java
public static void main(String[] args) throws Exception { CLIUtil cliUtil = new CLIUtil(Loader.class, HELP_MESSAGE, OPTION_BUILDERS); CommandLine cl = cliUtil.parseCommandLine(args); File baseOutputDir = new File(cl.getOptionValue(OPTION_OUTPUT_DEST)); if (!baseOutputDir.exists()) { cliUtil.failWithMessage("Unable to find output directory at %s", baseOutputDir.getAbsolutePath()); return;//from w ww .ja va 2s . c om } File reachablesOut = new File(baseOutputDir, "Reachables"); File pathsOut = new File(baseOutputDir, "Paths"); File seqsOut = new File(baseOutputDir, "Sequences"); for (File subdir : Arrays.asList(reachablesOut, pathsOut, seqsOut)) { if (!subdir.exists()) { LOGGER.info("Creating output directory at %s", subdir.getAbsolutePath()); subdir.mkdir(); } else if (!subdir.isDirectory()) { cliUtil.failWithMessage("Output directory at %s is not a directory", subdir.getAbsolutePath()); return; } } FreemarkerRenderer renderer = FreemarkerRendererFactory.build( cl.getOptionValue(OPTION_DB_HOST, DEFAULT_HOST), Integer.valueOf(cl.getOptionValue(OPTION_DB_PORT, DEFAULT_PORT.toString())), cl.getOptionValue(OPTION_DB_NAME, DEFAULT_DB_NAME), cl.getOptionValue(OPTION_REACHABLES_COLLECTION, DEFAULT_REACHABLES_COLLECTION), cl.getOptionValue(OPTION_SEQUENCES_COLLECTION, DEFAULT_SEQUENCES_COLLECTION), cl.getOptionValue(OPTION_DNA_COLLECTION, DEFAULT_DNA_COLLECTION), cl.getOptionValue(OPTION_RENDERING_CACHE, DEFAULT_RENDERING_CACHE), cl.getOptionValue(OPTION_INSTALLER_SOURCE_DB, DEFAULT_CHEMICALS_DATABASE), cl.getOptionValue(OPTION_PATHWAY_COLLECTION, DEFAULT_PATHWAY_COLLECTION), cl.hasOption(OPTION_OMIT_PATHWAYS_AND_DESIGNS), reachablesOut, pathsOut, seqsOut); LOGGER.info("Page generation starting"); List<Long> idsToRender = Collections.emptyList(); if (cl.hasOption(OPTION_RENDER_SOME)) { idsToRender = Arrays.stream(cl.getOptionValues(OPTION_RENDER_SOME)).map(renderer::lookupMolecule) .collect(Collectors.toList()); } renderer.generatePages(idsToRender); }
From source file:act.installer.pubchem.PubchemTTLMerger.java
public static void main(String[] args) throws Exception { org.apache.commons.cli.Options opts = new org.apache.commons.cli.Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//from ww w .j ava2 s .co m } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } PubchemTTLMerger merger = new PubchemTTLMerger(); File rocksDBFile = new File(cl.getOptionValue(OPTION_INDEX_PATH)); if (cl.hasOption(OPTION_ONLY_MERGE)) { if (!(rocksDBFile.exists() && rocksDBFile.isDirectory())) { System.err.format("Must specify an existing RocksDB index when using '%s'.\n", OPTION_ONLY_MERGE); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } merger.finish(merger.merge(rocksDBFile)); return; } File rdfDir = new File(cl.getOptionValue(OPTION_RDF_DIRECTORY)); if (!rdfDir.isDirectory()) { System.err.format("Must specify a directory of RDF files to be parsed.\n"); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } File[] filesInDirectoryArray = rdfDir.listFiles(new FilenameFilter() { private static final String TTL_GZ_SUFFIX = ".ttl.gz"; @Override public boolean accept(File dir, String name) { return name.endsWith(TTL_GZ_SUFFIX); } }); if (filesInDirectoryArray == null || filesInDirectoryArray.length == 0) { System.err.format("Found zero compressed TTL files in directory at '%s'.\n", rdfDir.getAbsolutePath()); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } // Sort files for stability/sanity. List<File> filesInDirectory = Arrays.asList(filesInDirectoryArray); Collections.sort(filesInDirectory); if (cl.hasOption(OPTION_ONLY_SYNONYMS)) { filesInDirectory = filterByFileContents(filesInDirectory, PC_RDF_DATA_FILE_CONFIG.HASH_TO_SYNONYM); } if (cl.hasOption(OPTION_ONLY_MESH)) { filesInDirectory = filterByFileContents(filesInDirectory, PC_RDF_DATA_FILE_CONFIG.HASH_TO_MESH); } if (cl.hasOption(OPTION_ONLY_PUBCHEM_IDS)) { filesInDirectory = filterByFileContents(filesInDirectory, PC_RDF_DATA_FILE_CONFIG.HASH_TO_CID); } if (filesInDirectory.size() == 0) { System.err.format( "Arrived at index initialization with no files to process. " + "Maybe too many filters were specified? synonyms: %s, MeSH: %s, Pubchem ids: %s\n", cl.hasOption(OPTION_ONLY_SYNONYMS), cl.hasOption(OPTION_ONLY_MESH), cl.hasOption(OPTION_ONLY_PUBCHEM_IDS)); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } RocksDB.loadLibrary(); Pair<RocksDB, Map<COLUMN_FAMILIES, ColumnFamilyHandle>> dbAndHandles = null; try { if (rocksDBFile.exists()) { if (!cl.hasOption(OPTION_OPEN_EXISTING_OKAY)) { System.err.format( "Index directory at '%s' already exists, delete before retrying or add '%s' option to reuse.\n", rocksDBFile.getAbsolutePath(), OPTION_OPEN_EXISTING_OKAY); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } else { LOGGER.info("Reusing existing index at %s", rocksDBFile.getAbsolutePath()); dbAndHandles = openExistingRocksDB(rocksDBFile); } } else { LOGGER.info("Creating new index at %s", rocksDBFile.getAbsolutePath()); dbAndHandles = createNewRocksDB(rocksDBFile); } merger.buildIndex(dbAndHandles, filesInDirectory); merger.merge(dbAndHandles); } finally { if (dbAndHandles != null) { merger.finish(dbAndHandles); } } }