List of usage examples for java.lang String replace
public String replace(CharSequence target, CharSequence replacement)
From source file:de.citec.sc.matoll.process.Matoll_CreateMax.java
public static void main(String[] args) throws IOException, ParserConfigurationException, SAXException, InstantiationException, IllegalAccessException, ClassNotFoundException, Exception { String directory;/*from w w w .ja va2 s. c o m*/ String gold_standard_lexicon; String output_lexicon; String configFile; Language language; String output; Stopwords stopwords = new Stopwords(); HashMap<String, Double> maxima; maxima = new HashMap<String, Double>(); if (args.length < 3) { System.out.print("Usage: Matoll --mode=train/test <DIRECTORY> <CONFIG>\n"); return; } // Classifier classifier; directory = args[1]; configFile = args[2]; final Config config = new Config(); config.loadFromFile(configFile); gold_standard_lexicon = config.getGoldStandardLexicon(); String model_file = config.getModel(); output_lexicon = config.getOutputLexicon(); output = config.getOutput(); language = config.getLanguage(); LexiconLoader loader = new LexiconLoader(); Lexicon gold = loader.loadFromFile(gold_standard_lexicon); Set<String> uris = new HashSet<>(); // Map<Integer,String> sentence_list = new HashMap<>(); Map<Integer, Set<Integer>> mapping_words_sentences = new HashMap<>(); //consider only properties for (LexicalEntry entry : gold.getEntries()) { try { for (Sense sense : entry.getSenseBehaviours().keySet()) { String tmp_uri = sense.getReference().getURI().replace("http://dbpedia.org/ontology/", ""); if (!Character.isUpperCase(tmp_uri.charAt(0))) { uris.add(sense.getReference().getURI()); } } } catch (Exception e) { } ; } ModelPreprocessor preprocessor = new ModelPreprocessor(language); preprocessor.setCoreferenceResolution(false); Set<String> dep = new HashSet<>(); dep.add("prep"); dep.add("appos"); dep.add("nn"); dep.add("dobj"); dep.add("pobj"); dep.add("num"); preprocessor.setDEP(dep); List<File> list_files = new ArrayList<>(); if (config.getFiles().isEmpty()) { File folder = new File(directory); File[] files = folder.listFiles(); for (File file : files) { if (file.toString().contains(".ttl")) list_files.add(file); } } else { list_files.addAll(config.getFiles()); } System.out.println(list_files.size()); int sentence_counter = 0; Map<String, Set<Integer>> bag_words_uri = new HashMap<>(); Map<String, Integer> mapping_word_id = new HashMap<>(); for (File file : list_files) { Model model = RDFDataMgr.loadModel(file.toString()); for (Model sentence : getSentences(model)) { String reference = getReference(sentence); reference = reference.replace("http://dbpedia/", "http://dbpedia.org/"); if (uris.contains(reference)) { sentence_counter += 1; Set<Integer> words_ids = getBagOfWords(sentence, stopwords, mapping_word_id); //TODO: add sentence preprocessing String obj = getObject(sentence); String subj = getSubject(sentence); preprocessor.preprocess(sentence, subj, obj, language); //TODO: also return marker if object or subject of property (in SPARQL this has to be optional of course) String parsed_sentence = getParsedSentence(sentence); try (FileWriter fw = new FileWriter("mapping_sentences_to_ids_goldstandard.tsv", true); BufferedWriter bw = new BufferedWriter(fw); PrintWriter out = new PrintWriter(bw)) { out.println(sentence_counter + "\t" + parsed_sentence); } catch (IOException e) { e.printStackTrace(); } for (Integer word_id : words_ids) { if (mapping_words_sentences.containsKey(word_id)) { Set<Integer> tmp_set = mapping_words_sentences.get(word_id); tmp_set.add(sentence_counter); mapping_words_sentences.put(word_id, tmp_set); } else { Set<Integer> tmp_set = new HashSet<>(); tmp_set.add(sentence_counter); mapping_words_sentences.put(word_id, tmp_set); } } if (bag_words_uri.containsKey(reference)) { Set<Integer> tmp = bag_words_uri.get(reference); for (Integer w : words_ids) { tmp.add(w); } bag_words_uri.put(reference, tmp); } else { Set<Integer> tmp = new HashSet<>(); for (Integer w : words_ids) { tmp.add(w); } bag_words_uri.put(reference, tmp); } } } model.close(); } PrintWriter writer = new PrintWriter("bag_of_words_only_goldstandard.tsv"); StringBuilder string_builder = new StringBuilder(); for (String r : bag_words_uri.keySet()) { string_builder.append(r); for (Integer i : bag_words_uri.get(r)) { string_builder.append("\t"); string_builder.append(i); } string_builder.append("\n"); } writer.write(string_builder.toString()); writer.close(); writer = new PrintWriter("mapping_words_to_sentenceids_goldstandard.tsv"); string_builder = new StringBuilder(); for (Integer w : mapping_words_sentences.keySet()) { string_builder.append(w); for (int i : mapping_words_sentences.get(w)) { string_builder.append("\t"); string_builder.append(i); } string_builder.append("\n"); } writer.write(string_builder.toString()); writer.close(); }
From source file:cool.pandora.modeller.WordScopeTest.java
public static void main(final String[] args) { final MetadataTemplate metadataTemplate; final String canvasRegionURI = "http://localhost:8080/fcrepo/rest/collection/test/021/canvas/007#xywh=445%2C1431" + "" + "%2C154%2C40"; final String wordContainerURI = "http://test/word"; final String chars = "blah"blah"blah"; final List<WordScope.Prefix> prefixes = Arrays.asList(new WordScope.Prefix(FedoraPrefixes.RDFS), new WordScope.Prefix(FedoraPrefixes.MODE), new WordScope.Prefix(IIIFPrefixes.OA), new WordScope.Prefix(IIIFPrefixes.CNT), new WordScope.Prefix(IIIFPrefixes.SC), new WordScope.Prefix(IIIFPrefixes.DCTYPES)); final WordScope scope = new WordScope().fedoraPrefixes(prefixes).canvasURI(canvasRegionURI) .resourceContainerURI(wordContainerURI).chars(chars.replace(""", "\\\"")); metadataTemplate = MetadataTemplate.template().template("template/sparql-update-word" + ".mustache") .scope(scope).throwExceptionOnFailure().build(); final String metadata = unescapeXml(metadataTemplate.render()); System.out.println(metadata); }
From source file:Main.java
public static void main(String[] args) throws Exception { String ch = "this is a test this is a test test is is is a a a a a a a"; int count = 0, len = 0; do {//ww w . j a v a2 s. co m try { char name[] = ch.toCharArray(); len = name.length; count = 0; for (int j = 0; j < len; j++) { if ((name[0] == name[j]) && ((name[0] >= 65 && name[0] <= 91) || (name[0] >= 97 && name[0] <= 123))) count++; } if (count != 0) System.out.println(name[0] + " " + count + " Times"); ch = ch.replace("" + name[0], ""); } catch (Exception ex) { } } while (len != 1); }
From source file:com.vmware.photon.controller.core.Main.java
public static void main(String[] args) throws Throwable { try {/* w w w . j a v a2s .c om*/ LoggingFactory.bootstrap(); logger.info("args: " + Arrays.toString(args)); ArgumentParser parser = ArgumentParsers.newArgumentParser("PhotonControllerCore").defaultHelp(true) .description("Photon Controller Core"); parser.addArgument("config-file").help("photon controller configuration file"); parser.addArgument("--manual").type(Boolean.class).setDefault(false) .help("If true, create default deployment."); Namespace namespace = parser.parseArgsOrFail(args); PhotonControllerConfig photonControllerConfig = getPhotonControllerConfig(namespace); DeployerConfig deployerConfig = photonControllerConfig.getDeployerConfig(); new LoggingFactory(photonControllerConfig.getLogging(), "photon-controller-core").configure(); SSLContext sslContext; if (deployerConfig.getDeployerContext().isAuthEnabled()) { sslContext = SSLContext.getInstance(KeyStoreUtils.THRIFT_PROTOCOL); TrustManagerFactory tmf = null; tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); KeyStore keyStore = KeyStore.getInstance("JKS"); InputStream in = FileUtils .openInputStream(new File(deployerConfig.getDeployerContext().getKeyStorePath())); keyStore.load(in, deployerConfig.getDeployerContext().getKeyStorePassword().toCharArray()); tmf.init(keyStore); sslContext.init(null, tmf.getTrustManagers(), null); } else { KeyStoreUtils.generateKeys("/thrift/"); sslContext = KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } ThriftModule thriftModule = new ThriftModule(sslContext); PhotonControllerXenonHost xenonHost = startXenonHost(photonControllerConfig, thriftModule, deployerConfig, sslContext); if ((Boolean) namespace.get("manual")) { DefaultDeployment.createDefaultDeployment(photonControllerConfig.getXenonConfig().getPeerNodes(), deployerConfig, xenonHost); } // Creating a temp configuration file for apife with modification to some named sections in photon-controller-config // so that it can match the Configuration class of dropwizard. File apiFeTempConfig = File.createTempFile("apiFeTempConfig", ".tmp"); File source = new File(args[0]); FileInputStream fis = new FileInputStream(source); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); FileWriter fstream = new FileWriter(apiFeTempConfig, true); BufferedWriter out = new BufferedWriter(fstream); String aLine = null; while ((aLine = in.readLine()) != null) { if (aLine.equals("apife:")) { aLine = aLine.replace("apife:", "server:"); } out.write(aLine); out.newLine(); } in.close(); out.close(); // This approach can be simplified once the apife container is gone, but for the time being // it expects the first arg to be the string "server". String[] apiFeArgs = new String[2]; apiFeArgs[0] = "server"; apiFeArgs[1] = apiFeTempConfig.getAbsolutePath(); ApiFeService.setupApiFeConfigurationForServerCommand(apiFeArgs); ApiFeService.addServiceHost(xenonHost); ApiFeService.setSSLContext(sslContext); ApiFeService apiFeService = new ApiFeService(); apiFeService.run(apiFeArgs); apiFeTempConfig.deleteOnExit(); LocalApiClient localApiClient = apiFeService.getInjector().getInstance(LocalApiClient.class); xenonHost.setApiClient(localApiClient); // in the non-auth enabled scenario we need to be able to accept any self-signed certificate if (!deployerConfig.getDeployerContext().isAuthEnabled()) { KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { logger.info("Shutting down"); xenonHost.stop(); logger.info("Done"); LoggingFactory.detachAndStop(); } }); } catch (Exception e) { logger.error("Failed to start photon controller ", e); throw e; } }
From source file:edu.cuhk.hccl.evaluation.EvaluationApp.java
public static void main(String[] args) throws IOException, TasteException { File realFile = new File(args[0]); File estimateFile = new File(args[1]); // Build real-rating map Map<String, long[]> realMap = buildRatingMap(realFile); // Build estimate-rating map Map<String, long[]> estimateMap = buildRatingMap(estimateFile); // Compare realMap with estimateMap Map<Integer, List<Double>> realList = new HashMap<Integer, List<Double>>(); Map<Integer, List<Double>> estimateList = new HashMap<Integer, List<Double>>(); // Use set to store non-duplicate pairs only Set<String> noRatingList = new HashSet<String>(); for (String pair : realMap.keySet()) { long[] realRatings = realMap.get(pair); long[] estimateRatings = estimateMap.get(pair); if (realRatings == null || estimateRatings == null) continue; for (int i = 0; i < realRatings.length; i++) { long real = realRatings[i]; long estimate = estimateRatings[i]; // continue if the aspect rating can not be estimated due to incomplete reviews if (estimate <= 0) { noRatingList.add(pair.replace("@", "\t")); continue; }//from ww w. j a va 2 s .c om if (real > 0 && estimate > 0) { if (!realList.containsKey(i)) realList.put(i, new ArrayList<Double>()); realList.get(i).add((double) real); if (!estimateList.containsKey(i)) estimateList.put(i, new ArrayList<Double>()); estimateList.get(i).add((double) estimate); } } } System.out.println("[INFO] RMSE, MAE for estimate ratings: "); System.out.println("------------------------------"); System.out.println("Index \t RMSE \t MAE"); for (int i = 1; i < 6; i++) { double rmse = Metric.computeRMSE(realList.get(i), estimateList.get(i)); double mae = Metric.computeMAE(realList.get(i), estimateList.get(i)); System.out.printf("%d \t %.3f \t %.3f \n", i, rmse, mae); } System.out.println("------------------------------"); if (noRatingList.size() > 0) { String noRatingFileName = "evaluation-no-ratings.txt"; FileUtils.writeLines(new File(noRatingFileName), noRatingList, false); System.out.println("[INFO] User-item pairs with no ratings are saved in file: " + noRatingFileName); } else { System.out.println("[INFO] All user-item pairs have ratings."); } }
From source file:com.vwf5.base.utils.DataUtil.java
public static void main(String[] args) { String a = ";;;;;;;"; System.out.println(a.replace(";;", ";").replace(";;", "")); }
From source file:DIA_Umpire_SE.DIA_Umpire_SE.java
/** * @param args the command line arguments DIA_Umpire parameterfile *///from w ww . ja va 2s. com public static void main(String[] args) throws InterruptedException, FileNotFoundException, ExecutionException, IOException, ParserConfigurationException, DataFormatException, SAXException, Exception { System.out.println( "================================================================================================="); System.out.println( "DIA-Umpire singal extraction analysis (version: " + UmpireInfo.GetInstance().Version + ")"); if (args.length < 2 || args.length > 3) { System.out.println( "command format error, the correct format is: java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params"); System.out.println( "To fix DIA setting, use : java -jar -Xmx8G DIA_Umpire_SE.jar mzMXL_file diaumpire_se.params -f"); return; } try { //Define logger level for console ConsoleLogger.SetConsoleLogger(Level.INFO); //Define logger level and file path for text log file ConsoleLogger.SetFileLogger(Level.DEBUG, FilenameUtils.getFullPath(args[0]) + "diaumpire_se.log"); } catch (Exception e) { } boolean Fix = false; boolean Resume = false; if (args.length == 3 && args[2].equals("-f")) { Fix = true; } String parameterfile = args[1]; String MSFilePath = args[0]; Logger.getRootLogger().info("Version: " + UmpireInfo.GetInstance().Version); Logger.getRootLogger().info("Parameter file:" + parameterfile); Logger.getRootLogger().info("Spectra file:" + MSFilePath); BufferedReader reader = new BufferedReader(new FileReader(parameterfile)); String line = ""; InstrumentParameter param = new InstrumentParameter(InstrumentParameter.InstrumentType.TOF5600); param.DetermineBGByID = false; param.EstimateBG = true; int NoCPUs = 2; SpectralDataType.DataType dataType = SpectralDataType.DataType.DIA_F_Window; String WindowType = ""; int WindowSize = 25; ArrayList<XYData> WindowList = new ArrayList<>(); boolean ExportPrecursorPeak = false; boolean ExportFragmentPeak = false; //<editor-fold defaultstate="collapsed" desc="Read parameter file"> while ((line = reader.readLine()) != null) { Logger.getRootLogger().info(line); if (!"".equals(line) && !line.startsWith("#")) { //System.out.println(line); if (line.equals("==window setting begin")) { while (!(line = reader.readLine()).equals("==window setting end")) { if (!"".equals(line)) { WindowList.add(new XYData(Float.parseFloat(line.split("\t")[0]), Float.parseFloat(line.split("\t")[1]))); } } continue; } if (line.split("=").length < 2) { continue; } String type = line.split("=")[0].trim(); if (type.startsWith("para.")) { type = type.replace("para.", "SE."); } String value = line.split("=")[1].trim(); switch (type) { case "Thread": { NoCPUs = Integer.parseInt(value); break; } case "ExportPrecursorPeak": { ExportPrecursorPeak = Boolean.parseBoolean(value); break; } case "ExportFragmentPeak": { ExportFragmentPeak = Boolean.parseBoolean(value); break; } //<editor-fold defaultstate="collapsed" desc="instrument parameters"> case "RPmax": { param.PrecursorRank = Integer.parseInt(value); break; } case "RFmax": { param.FragmentRank = Integer.parseInt(value); break; } case "CorrThreshold": { param.CorrThreshold = Float.parseFloat(value); break; } case "DeltaApex": { param.ApexDelta = Float.parseFloat(value); break; } case "RTOverlap": { param.RTOverlapThreshold = Float.parseFloat(value); break; } case "BoostComplementaryIon": { param.BoostComplementaryIon = Boolean.parseBoolean(value); break; } case "AdjustFragIntensity": { param.AdjustFragIntensity = Boolean.parseBoolean(value); break; } case "SE.MS1PPM": { param.MS1PPM = Float.parseFloat(value); break; } case "SE.MS2PPM": { param.MS2PPM = Float.parseFloat(value); break; } case "SE.SN": { param.SNThreshold = Float.parseFloat(value); break; } case "SE.MS2SN": { param.MS2SNThreshold = Float.parseFloat(value); break; } case "SE.MinMSIntensity": { param.MinMSIntensity = Float.parseFloat(value); break; } case "SE.MinMSMSIntensity": { param.MinMSMSIntensity = Float.parseFloat(value); break; } case "SE.MinRTRange": { param.MinRTRange = Float.parseFloat(value); break; } case "SE.MaxNoPeakCluster": { param.MaxNoPeakCluster = Integer.parseInt(value); param.MaxMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MinNoPeakCluster": { param.MinNoPeakCluster = Integer.parseInt(value); param.MinMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MinMS2NoPeakCluster": { param.MinMS2NoPeakCluster = Integer.parseInt(value); break; } case "SE.MaxCurveRTRange": { param.MaxCurveRTRange = Float.parseFloat(value); break; } case "SE.Resolution": { param.Resolution = Integer.parseInt(value); break; } case "SE.RTtol": { param.RTtol = Float.parseFloat(value); break; } case "SE.NoPeakPerMin": { param.NoPeakPerMin = Integer.parseInt(value); break; } case "SE.StartCharge": { param.StartCharge = Integer.parseInt(value); break; } case "SE.EndCharge": { param.EndCharge = Integer.parseInt(value); break; } case "SE.MS2StartCharge": { param.MS2StartCharge = Integer.parseInt(value); break; } case "SE.MS2EndCharge": { param.MS2EndCharge = Integer.parseInt(value); break; } case "SE.NoMissedScan": { param.NoMissedScan = Integer.parseInt(value); break; } case "SE.Denoise": { param.Denoise = Boolean.valueOf(value); break; } case "SE.EstimateBG": { param.EstimateBG = Boolean.valueOf(value); break; } case "SE.RemoveGroupedPeaks": { param.RemoveGroupedPeaks = Boolean.valueOf(value); break; } case "SE.MinFrag": { param.MinFrag = Integer.parseInt(value); break; } case "SE.IsoPattern": { param.IsoPattern = Float.valueOf(value); break; } case "SE.StartRT": { param.startRT = Float.valueOf(value); break; } case "SE.EndRT": { param.endRT = Float.valueOf(value); break; } case "SE.RemoveGroupedPeaksRTOverlap": { param.RemoveGroupedPeaksRTOverlap = Float.valueOf(value); break; } case "SE.RemoveGroupedPeaksCorr": { param.RemoveGroupedPeaksCorr = Float.valueOf(value); break; } case "SE.MinMZ": { param.MinMZ = Float.valueOf(value); break; } case "SE.MinPrecursorMass": { param.MinPrecursorMass = Float.valueOf(value); break; } case "SE.MaxPrecursorMass": { param.MaxPrecursorMass = Float.valueOf(value); break; } case "SE.IsoCorrThreshold": { param.IsoCorrThreshold = Float.valueOf(value); break; } case "SE.MassDefectFilter": { param.MassDefectFilter = Boolean.parseBoolean(value); break; } case "SE.MassDefectOffset": { param.MassDefectOffset = Float.valueOf(value); break; } //</editor-fold>//</editor-fold> case "WindowType": { WindowType = value; switch (WindowType) { case "SWATH": { dataType = SpectralDataType.DataType.DIA_F_Window; break; } case "V_SWATH": { dataType = SpectralDataType.DataType.DIA_V_Window; break; } case "MSX": { dataType = SpectralDataType.DataType.MSX; break; } case "MSE": { dataType = SpectralDataType.DataType.MSe; break; } } break; } case "WindowSize": { WindowSize = Integer.parseInt(value); break; } } } } //</editor-fold> try { File MSFile = new File(MSFilePath); if (MSFile.exists()) { long time = System.currentTimeMillis(); Logger.getRootLogger().info( "================================================================================================="); Logger.getRootLogger().info("Processing " + MSFilePath + "...."); //Initialize a DIA file data structure DIAPack DiaFile = new DIAPack(MSFile.getAbsolutePath(), NoCPUs); DiaFile.Resume = Resume; DiaFile.SetDataType(dataType); DiaFile.SetParameter(param); //Set DIA isolation window setting if (dataType == SpectralDataType.DataType.DIA_F_Window) { DiaFile.SetWindowSize(WindowSize); } else if (dataType == SpectralDataType.DataType.DIA_V_Window) { for (XYData window : WindowList) { DiaFile.AddVariableWindow(window); } } DiaFile.SaveDIASetting(); DiaFile.SaveParams(); if (Fix) { DiaFile.FixScanidx(); return; } DiaFile.ExportPrecursorPeak = ExportPrecursorPeak; DiaFile.ExportFragmentPeak = ExportFragmentPeak; Logger.getRootLogger().info("Module A: Signal extraction"); //Start DIA signal extraction process to generate pseudo MS/MS files DiaFile.process(); time = System.currentTimeMillis() - time; Logger.getRootLogger().info(MSFilePath + " processed time:" + String.format("%d hour, %d min, %d sec", TimeUnit.MILLISECONDS.toHours(time), TimeUnit.MILLISECONDS.toMinutes(time) - TimeUnit.HOURS.toMinutes(TimeUnit.MILLISECONDS.toHours(time)), TimeUnit.MILLISECONDS.toSeconds(time) - TimeUnit.MINUTES.toSeconds(TimeUnit.MILLISECONDS.toMinutes(time)))); } else { throw new RuntimeException("file: " + MSFile + "? does not exist!"); } Logger.getRootLogger().info("Job complete"); Logger.getRootLogger().info( "================================================================================================="); } catch (Exception e) { Logger.getRootLogger().error(ExceptionUtils.getStackTrace(e)); throw e; } }
From source file:com.bright.json.JSonRequestor.java
public static void main(String[] args) { String fileBasename = null;//from w w w. ja va 2 s .co m String[] zipArgs = null; JFileChooser chooser = new JFileChooser("/Users/panos/STR_GRID"); try { chooser.setCurrentDirectory(new java.io.File(".")); chooser.setDialogTitle("Select the input directory"); chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); chooser.setAcceptAllFileFilterUsed(false); if (chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) { System.out.println("getCurrentDirectory(): " + chooser.getCurrentDirectory()); System.out.println("getSelectedFile() : " + chooser.getSelectedFile()); // String fileBasename = // chooser.getSelectedFile().toString().substring(chooser.getSelectedFile().toString().lastIndexOf(File.separator)+1,chooser.getSelectedFile().toString().lastIndexOf(".")); fileBasename = chooser.getSelectedFile().toString() .substring(chooser.getSelectedFile().toString().lastIndexOf(File.separator) + 1); System.out.println("Base name: " + fileBasename); zipArgs = new String[] { chooser.getSelectedFile().toString(), chooser.getCurrentDirectory().toString() + File.separator + fileBasename + ".zip" }; com.bright.utils.ZipFile.main(zipArgs); } else { System.out.println("No Selection "); } } catch (Exception e) { System.out.println(e.toString()); } JTextField uiHost = new JTextField("ucs-head.brightcomputing.com"); // TextPrompt puiHost = new // TextPrompt("hadoop.brightcomputing.com",uiHost); JTextField uiUser = new JTextField("nexus"); // TextPrompt puiUser = new TextPrompt("nexus", uiUser); JTextField uiPass = new JPasswordField("system"); // TextPrompt puiPass = new TextPrompt("", uiPass); JTextField uiWdir = new JTextField("/home/nexus/pp1234"); // TextPrompt puiWdir = new TextPrompt("/home/nexus/nexus_workdir", // uiWdir); JTextField uiOut = new JTextField("foo"); // TextPrompt puiOut = new TextPrompt("foobar123", uiOut); JPanel myPanel = new JPanel(new GridLayout(5, 1)); myPanel.add(new JLabel("Bright HeadNode hostname:")); myPanel.add(uiHost); // myPanel.add(Box.createHorizontalStrut(1)); // a spacer myPanel.add(new JLabel("Username:")); myPanel.add(uiUser); myPanel.add(new JLabel("Password:")); myPanel.add(uiPass); myPanel.add(new JLabel("Working Directory:")); myPanel.add(uiWdir); // myPanel.add(Box.createHorizontalStrut(1)); // a spacer myPanel.add(new JLabel("Output Study Name ( -s ):")); myPanel.add(uiOut); int result = JOptionPane.showConfirmDialog(null, myPanel, "Please fill in all the fields.", JOptionPane.OK_CANCEL_OPTION); if (result == JOptionPane.OK_OPTION) { System.out.println("Input received."); } String rfile = uiWdir.getText(); String rhost = uiHost.getText(); String ruser = uiUser.getText(); String rpass = uiPass.getText(); String nexusOut = uiOut.getText(); String[] myarg = new String[] { zipArgs[1], ruser + "@" + rhost + ":" + rfile, nexusOut, fileBasename }; com.bright.utils.ScpTo.main(myarg); String cmURL = "https://" + rhost + ":8081/json"; List<Cookie> cookies = doLogin(ruser, rpass, cmURL); chkVersion(cmURL, cookies); jobSubmit myjob = new jobSubmit(); jobSubmit.jobObject myjobObj = new jobSubmit.jobObject(); myjob.setService("cmjob"); myjob.setCall("submitJob"); myjobObj.setQueue("defq"); myjobObj.setJobname("myNexusJob"); myjobObj.setAccount(ruser); myjobObj.setRundirectory(rfile); myjobObj.setUsername(ruser); myjobObj.setGroupname("cmsupport"); myjobObj.setPriority("1"); myjobObj.setStdinfile(rfile + "/stdin-mpi"); myjobObj.setStdoutfile(rfile + "/stdout-mpi"); myjobObj.setStderrfile(rfile + "/stderr-mpi"); myjobObj.setResourceList(Arrays.asList("")); myjobObj.setDependencies(Arrays.asList("")); myjobObj.setMailNotify(false); myjobObj.setMailOptions("ALL"); myjobObj.setMaxWallClock("00:10:00"); myjobObj.setNumberOfProcesses(1); myjobObj.setNumberOfNodes(1); myjobObj.setNodes(Arrays.asList("")); myjobObj.setCommandLineInterpreter("/bin/bash"); myjobObj.setUserdefined(Arrays.asList("cd " + rfile, "date", "pwd")); myjobObj.setExecutable("mpirun"); myjobObj.setArguments("-env I_MPI_FABRICS shm:tcp " + Constants.NEXUSSIM_EXEC + " -mpi -c " + rfile + "/" + fileBasename + "/" + fileBasename + " -s " + rfile + "/" + fileBasename + "/" + nexusOut); myjobObj.setModules(Arrays.asList("shared", "nexus", "intel-mpi/64")); myjobObj.setDebug(false); myjobObj.setBaseType("Job"); myjobObj.setIsSlurm(true); myjobObj.setUniqueKey(0); myjobObj.setModified(false); myjobObj.setToBeRemoved(false); myjobObj.setChildType("SlurmJob"); myjobObj.setJobID("Nexus test"); // Map<String,jobSubmit.jobObject > mymap= new HashMap<String, // jobSubmit.jobObject>(); // mymap.put("Slurm",myjobObj); ArrayList<Object> mylist = new ArrayList<Object>(); mylist.add("slurm"); mylist.add(myjobObj); myjob.setArgs(mylist); GsonBuilder builder = new GsonBuilder(); builder.enableComplexMapKeySerialization(); // Gson g = new Gson(); Gson g = builder.create(); String json2 = g.toJson(myjob); // To be used from a real console and not Eclipse Delete.main(zipArgs[1]); String message = JSonRequestor.doRequest(json2, cmURL, cookies); @SuppressWarnings("resource") Scanner resInt = new Scanner(message).useDelimiter("[^0-9]+"); int jobID = resInt.nextInt(); System.out.println("Job ID: " + jobID); JOptionPane optionPane = new JOptionPane(message); JDialog myDialog = optionPane.createDialog(null, "CMDaemon response: "); myDialog.setModal(false); myDialog.setVisible(true); ArrayList<Object> mylist2 = new ArrayList<Object>(); mylist2.add("slurm"); String JobID = Integer.toString(jobID); mylist2.add(JobID); myjob.setArgs(mylist2); myjob.setService("cmjob"); myjob.setCall("getJob"); String json3 = g.toJson(myjob); System.out.println("JSON Request No. 4 " + json3); cmReadFile readfile = new cmReadFile(); readfile.setService("cmmain"); readfile.setCall("readFile"); readfile.setUserName(ruser); int fileByteIdx = 1; readfile.setPath(rfile + "/" + fileBasename + "/" + fileBasename + ".sum@+" + fileByteIdx); String json4 = g.toJson(readfile); String monFile = JSonRequestor.doRequest(json4, cmURL, cookies).replaceAll("^\"|\"$", ""); if (monFile.startsWith("Unable")) { monFile = ""; } else { fileByteIdx += countLines(monFile, "\\\\n"); System.out.println(""); } StringBuffer output = new StringBuffer(); // Get the correct Line Separator for the OS (CRLF or LF) String nl = System.getProperty("line.separator"); String filename = chooser.getCurrentDirectory().toString() + File.separator + fileBasename + ".sum.txt"; System.out.println("Local monitoring file: " + filename); output.append(monFile.replaceAll("\\\\n", System.getProperty("line.separator"))); String getJobJSON = JSonRequestor.doRequest(json3, cmURL, cookies); jobGet getJobObj = new Gson().fromJson(getJobJSON, jobGet.class); System.out.println("Job " + jobID + " status: " + getJobObj.getStatus().toString()); while (getJobObj.getStatus().toString().equals("RUNNING") || getJobObj.getStatus().toString().equals("COMPLETING")) { try { getJobJSON = JSonRequestor.doRequest(json3, cmURL, cookies); getJobObj = new Gson().fromJson(getJobJSON, jobGet.class); System.out.println("Job " + jobID + " status: " + getJobObj.getStatus().toString()); readfile.setPath(rfile + "/" + fileBasename + "/" + fileBasename + ".sum@+" + fileByteIdx); json4 = g.toJson(readfile); monFile = JSonRequestor.doRequest(json4, cmURL, cookies).replaceAll("^\"|\"$", ""); if (monFile.startsWith("Unable")) { monFile = ""; } else { output.append(monFile.replaceAll("\\\\n", System.getProperty("line.separator"))); System.out.println("FILE INDEX:" + fileByteIdx); fileByteIdx += countLines(monFile, "\\\\n"); } Thread.sleep(Constants.STATUS_CHECK_INTERVAL); } catch (InterruptedException ex) { Thread.currentThread().interrupt(); } } Gson gson_nice = new GsonBuilder().setPrettyPrinting().create(); String json_out = gson_nice.toJson(getJobJSON); System.out.println(json_out); System.out.println("JSON Request No. 5 " + json4); readfile.setPath(rfile + "/" + fileBasename + "/" + fileBasename + ".sum@+" + fileByteIdx); json4 = g.toJson(readfile); monFile = JSonRequestor.doRequest(json4, cmURL, cookies).replaceAll("^\"|\"$", ""); if (monFile.startsWith("Unable")) { monFile = ""; } else { output.append(monFile.replaceAll("\\\\n", System.getProperty("line.separator"))); fileByteIdx += countLines(monFile, "\\\\n"); } System.out.println("FILE INDEX:" + fileByteIdx); /* * System.out.print("Monitoring file: " + monFile.replaceAll("\\n", * System.getProperty("line.separator"))); try { * FileUtils.writeStringToFile( new * File(chooser.getCurrentDirectory().toString() + File.separator + * fileBasename + ".sum.txt"), monFile.replaceAll("\\n", * System.getProperty("line.separator"))); } catch (IOException e) { * * e.printStackTrace(); } */ if (getJobObj.getStatus().toString().equals("COMPLETED")) { String[] zipArgs_from = new String[] { chooser.getSelectedFile().toString(), chooser.getCurrentDirectory().toString() + File.separator + fileBasename + "_out.zip" }; String[] myarg_from = new String[] { ruser + "@" + rhost + ":" + rfile + "/" + fileBasename + "_out.zip", zipArgs_from[1], rfile, fileBasename }; com.bright.utils.ScpFrom.main(myarg_from); JOptionPane optionPaneS = new JOptionPane("Job execution completed without errors!"); JDialog myDialogS = optionPaneS.createDialog(null, "Job status: "); myDialogS.setModal(false); myDialogS.setVisible(true); } else { JOptionPane optionPaneF = new JOptionPane("Job execution FAILED!"); JDialog myDialogF = optionPaneF.createDialog(null, "Job status: "); myDialogF.setModal(false); myDialogF.setVisible(true); } try { System.out.println("Local monitoring file: " + filename); BufferedWriter out = new BufferedWriter(new FileWriter(filename)); String outText = output.toString(); String newString = outText.replace("\\\\n", nl); System.out.println("Text: " + outText); out.write(newString); out.close(); rmDuplicateLines.main(filename); } catch (IOException e) { e.printStackTrace(); } doLogout(cmURL, cookies); System.exit(0); }
From source file:eu.annocultor.converters.geonames.GeonamesDumpToRdf.java
public static void main(String[] args) throws Exception { File root = new File("input_source"); // load country-continent match countryToContinent/*from w w w . j a v a 2 s .c o m*/ .load((new GeonamesDumpToRdf()).getClass().getResourceAsStream("/country-to-continent.properties")); // creating files Map<String, BufferedWriter> files = new HashMap<String, BufferedWriter>(); Map<String, Boolean> started = new HashMap<String, Boolean>(); for (Object string : countryToContinent.keySet()) { String continent = countryToContinent.getProperty(string.toString()); File dir = new File(root, continent); if (!dir.exists()) { dir.mkdir(); } files.put(string.toString(), new BufferedWriter(new OutputStreamWriter( new FileOutputStream(new File(root, continent + "/" + string + ".rdf")), "UTF-8"))); System.out.println(continent + "/" + string + ".rdf"); started.put(string.toString(), false); } System.out.println(started); Pattern countryPattern = Pattern .compile("<inCountry rdf\\:resource\\=\"http\\://www\\.geonames\\.org/countries/\\#(\\w\\w)\"/>"); long counter = 0; LineIterator it = FileUtils.lineIterator(new File(root, "all-geonames-rdf.txt"), "UTF-8"); try { while (it.hasNext()) { String text = it.nextLine(); if (text.startsWith("http://sws.geonames")) continue; // progress counter++; if (counter % 100000 == 0) { System.out.print("*"); } // System.out.println(counter); // get country String country = null; Matcher matcher = countryPattern.matcher(text); if (matcher.find()) { country = matcher.group(1); } // System.out.println(country); if (country == null) country = "null"; text = text.replace("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?><rdf:RDF", "<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?><rdf:RDF"); if (started.get(country) == null) throw new Exception("Unknow country " + country); if (started.get(country).booleanValue()) { // remove RDF opening text = text.substring(text.indexOf("<rdf:RDF ")); text = text.substring(text.indexOf(">") + 1); } // remove RDF ending text = text.substring(0, text.indexOf("</rdf:RDF>")); files.get(country).append(text + "\n"); if (!started.get(country).booleanValue()) { // System.out.println("Started with country " + country); } started.put(country, true); } } finally { LineIterator.closeQuietly(it); } for (Object string : countryToContinent.keySet()) { boolean hasStarted = started.get(string.toString()).booleanValue(); if (hasStarted) { BufferedWriter bf = files.get(string.toString()); bf.append("</rdf:RDF>"); bf.flush(); bf.close(); } } return; }
From source file:edu.wpi.khufnagle.webimagemanager.WebImageManager.java
/** * Defines information for the lighthouses, then runs the * photograph-collection process./*from w ww. ja v a 2s. c o m*/ * @param args Command-line arguments for this program (not used in this * implementation) */ // Auto-boxing done "on the fly" to show progress of downloading images @SuppressWarnings("boxing") public static void main(final String[] args) { final long startTime = System.nanoTime(); System.out.println("***BEGIN PHOTO TRANSFER PROCESS***"); // Add data for lighthouses (next 375 lines or so) final List<LighthouseInfo> lighthouseData = new ArrayList<LighthouseInfo>(); /* * lighthouseData.add(new LighthouseInfo("Statue of Liberty", 40.689348, * -74.044726)); */// Statue of Liberty = 2080 photos w/out restrictions lighthouseData.add(new LighthouseInfo("Portland Head Light", 43.623104, -70.207867)); lighthouseData.add(new LighthouseInfo("Pemaquid Point Light", 43.836970, -69.505997)); lighthouseData.add(new LighthouseInfo("Five Mile Point (New Haven Harbor) Light", 41.248958, -72.903766)); lighthouseData.add(new LighthouseInfo("Cape Neddick (Nubble) Light", 43.165211, -70.591102)); lighthouseData.add(new LighthouseInfo("Portland Breakwater Light", 43.655516, -70.234813)); lighthouseData.add(new LighthouseInfo("Beavertail Light", 41.449368, -71.399372)); lighthouseData.add(new LighthouseInfo("Bass Harbor Head Light", 44.221976, -68.337214)); lighthouseData.add(new LighthouseInfo("Nobska Point Light", 41.515792, -70.655116)); lighthouseData.add(new LighthouseInfo("Spring Point Ledge Light", 43.652108, -70.223922)); lighthouseData.add(new LighthouseInfo("Gay Head Light", 41.348450, -70.834956)); lighthouseData.add(new LighthouseInfo("Derby Wharf Light", 42.516566, -70.883536)); lighthouseData.add(new LighthouseInfo("Rockland Breakwater Light", 44.104006, -69.077453)); lighthouseData.add(new LighthouseInfo("Sandy Neck Light", 41.722647, -70.280927)); lighthouseData.add(new LighthouseInfo("Marblehead Light", 42.505411, -70.833708)); lighthouseData.add(new LighthouseInfo("Portsmouth Harbor Light", 43.071061, -70.708634)); lighthouseData.add(new LighthouseInfo("Highland Light", 42.039122, -70.062025)); lighthouseData.add(new LighthouseInfo("Cape Elizabeth Light", 43.566058, -70.200042)); lighthouseData.add(new LighthouseInfo("Marshall Point Light", 43.917406, -69.261222)); lighthouseData.add(new LighthouseInfo("Chatham Light", 41.671407, -69.949884)); lighthouseData.add(new LighthouseInfo("Block Island Southeast Light", 41.153412, -71.552117)); lighthouseData.add(new LighthouseInfo("Edgartown Light", 41.390863, -70.503057)); lighthouseData.add(new LighthouseInfo("Watch Hill Light", 41.303884, -71.858575)); lighthouseData.add(new LighthouseInfo("Nauset Light", 41.858305, -69.951631)); lighthouseData .add(new LighthouseInfo("Fayerweather Island (Black Rock Harbor) Light", 41.142380, -73.217409)); lighthouseData.add(new LighthouseInfo("Owls Head Light", 44.092138, -69.044105)); lighthouseData.add(new LighthouseInfo("Point Judith Light", 41.361035, -71.481402)); lighthouseData.add(new LighthouseInfo("Sankaty Head Light", 41.284379, -69.966244)); lighthouseData.add(new LighthouseInfo("Eastern Point Light", 42.580229, -70.664537)); lighthouseData.add(new LighthouseInfo("Fort Pickering Light", 42.526473, -70.866465)); lighthouseData.add(new LighthouseInfo("Wood Island Light", 43.456788, -70.328976)); lighthouseData.add(new LighthouseInfo("Stonington Harbor Light", 41.328780, -71.905486)); lighthouseData.add(new LighthouseInfo("West Quoddy Head Light", 44.815073, -66.950742)); lighthouseData.add(new LighthouseInfo("Fort Point Light", 44.467265, -68.811717)); lighthouseData.add(new LighthouseInfo("Annisquam Light", 42.661874, -70.681488)); lighthouseData.add(new LighthouseInfo("Newport Harbor Light", 41.493299, -71.327038)); lighthouseData.add(new LighthouseInfo("Long Point Light", 42.033117, -70.168651)); lighthouseData.add(new LighthouseInfo("Castle Hill Light", 41.462116, -71.362919)); lighthouseData.add(new LighthouseInfo("Brant Point Light", 41.289918, -70.090287)); lighthouseData.add(new LighthouseInfo("Stratford Point Light", 41.151984, -73.103276)); lighthouseData.add(new LighthouseInfo("Boston Light", 42.327925, -70.890101)); lighthouseData.add(new LighthouseInfo("Lynde Point Light", 41.271452, -72.343142)); lighthouseData.add(new LighthouseInfo("Scituate Light", 42.204748, -70.715814)); lighthouseData.add(new LighthouseInfo("Prospect Harbor Light", 44.403285, -68.012922)); lighthouseData.add(new LighthouseInfo("Wood End Light", 42.021223, -70.193502)); lighthouseData.add(new LighthouseInfo("Rose Island Light", 41.495477, -71.342742)); lighthouseData.add(new LighthouseInfo("Saybrook Breakwater Light", 41.263158, -72.342813)); lighthouseData.add(new LighthouseInfo("Great Point Light", 41.390096, -70.048234)); lighthouseData.add(new LighthouseInfo("Cape Poge Light", 41.418798, -70.451923)); lighthouseData.add(new LighthouseInfo("Monhegan Light", 43.764779, -69.316204)); lighthouseData.add(new LighthouseInfo("Hendricks Head Light", 43.822589, -69.689761)); lighthouseData.add(new LighthouseInfo("Egg Rock Light", 44.354050, -68.138166)); lighthouseData.add(new LighthouseInfo("New London Ledge Light", 41.305826, -72.077448)); lighthouseData.add(new LighthouseInfo("Avery Point Lighthouse", 41.315245, -72.063579)); lighthouseData.add(new LighthouseInfo("Palmers Island Light", 41.626936, -70.909109)); lighthouseData.add(new LighthouseInfo("Cuckolds Light", 43.779663, -69.649982)); lighthouseData.add(new LighthouseInfo("Gull Rocks Light", 41.502451, -71.333140)); lighthouseData.add(new LighthouseInfo("Goat Island Light", 43.357826, -70.425109)); lighthouseData.add(new LighthouseInfo("East Chop Light", 41.470245, -70.567439)); lighthouseData.add(new LighthouseInfo("Neds Point Light", 41.650859, -70.795638)); lighthouseData.add(new LighthouseInfo("Sakonnet Point Light", 41.453090, -71.202382)); lighthouseData.add(new LighthouseInfo("Narrows (Bug) Light", 42.323137, -70.919158)); lighthouseData.add(new LighthouseInfo("Plum Island Light", 42.815119, -70.818981)); lighthouseData.add(new LighthouseInfo("Block Island North Light", 41.227639, -71.575811)); lighthouseData.add(new LighthouseInfo("Mount Desert Rock Light", 43.968582, -68.128306)); lighthouseData.add(new LighthouseInfo("Duxbury Pier Light", 41.987375, -70.648498)); lighthouseData.add(new LighthouseInfo("Long Island Head Light", 42.330197, -70.957712)); lighthouseData.add(new LighthouseInfo("Prudence Island Light", 41.605881, -71.303535)); lighthouseData.add(new LighthouseInfo("Plum Beach Light", 41.530248, -71.405202)); lighthouseData.add(new LighthouseInfo("Doubling Point Light", 43.882503, -69.806792)); lighthouseData.add(new LighthouseInfo("Dice Head Light", 44.382732, -68.819022)); lighthouseData.add(new LighthouseInfo("Ram Island Ledge Light", 43.631457, -70.187366)); lighthouseData.add(new LighthouseInfo("New London Harbor Light", 41.316619, -72.089743)); lighthouseData.add(new LighthouseInfo("Lime Rock Light", 41.477536, -71.325924)); lighthouseData.add(new LighthouseInfo("Ten Pound Island Light", 42.601865, -70.665556)); lighthouseData.add(new LighthouseInfo("Bristol Ferry Light", 41.642842, -71.260319)); lighthouseData.add(new LighthouseInfo("Musselbed Shoals Light", 41.636261, -71.259958)); lighthouseData.add(new LighthouseInfo("Conimicut Light", 41.716969, -71.345106)); lighthouseData.add(new LighthouseInfo("Tongue Point Light", 41.166590, -73.177497)); lighthouseData.add(new LighthouseInfo("Bass River Light", 41.651746, -70.169473)); lighthouseData.add(new LighthouseInfo("Hospital Point Light", 42.546413, -70.856164)); lighthouseData.add(new LighthouseInfo("Newburyport Range Light", 42.811524, -70.864838)); lighthouseData.add(new LighthouseInfo("Dutch Island Light", 41.496702, -71.404299)); lighthouseData.add(new LighthouseInfo("Heron Neck Light", 44.025216, -68.861966)); lighthouseData.add(new LighthouseInfo("Pumpkin Island Light", 44.309166, -68.742876)); lighthouseData.add(new LighthouseInfo("Whaleback Light", 43.058744, -70.696306)); lighthouseData.add(new LighthouseInfo("Hyannis Harbor Light", 41.636267, -70.288439)); lighthouseData.add(new LighthouseInfo("Stage Harbor Light", 41.658692, -69.983689)); lighthouseData.add(new LighthouseInfo("Lovells Island Range Light", 42.332440, -70.930214)); lighthouseData.add(new LighthouseInfo("Hog Island Shoal Light", 41.632338, -71.273198)); lighthouseData.add(new LighthouseInfo("Ram Island Light", 43.803935, -69.599349)); lighthouseData.add(new LighthouseInfo("Bridgeport Harbor Light", 41.156718, -73.179950)); lighthouseData.add(new LighthouseInfo("Straitsmouth Island Light", 42.662236, -70.588157)); lighthouseData.add(new LighthouseInfo("Squirrel Point Light", 43.816520, -69.802402)); lighthouseData.add(new LighthouseInfo("Mayos Beach Light", 41.930755, -70.032097)); lighthouseData.add(new LighthouseInfo("Race Point Light", 42.062314, -70.243084)); lighthouseData.add(new LighthouseInfo("Point Gammon Light", 41.609647, -70.266196)); lighthouseData.add(new LighthouseInfo("Wings Neck Light", 41.680235, -70.661250)); lighthouseData.add(new LighthouseInfo("West Chop Light", 41.480806, -70.599796)); lighthouseData.add(new LighthouseInfo("Bird Island Light", 41.669295, -70.717341)); lighthouseData.add(new LighthouseInfo("Clarks Point Light", 41.593176, -70.901416)); lighthouseData.add(new LighthouseInfo("Thacher Island Light", 42.639168, -70.574759)); lighthouseData.add(new LighthouseInfo("White Island Light", 42.967228, -70.623249)); lighthouseData.add(new LighthouseInfo("Wickford Harbor Light", 41.572618, -71.436831)); lighthouseData.add(new LighthouseInfo("Whale Rock Light", 41.444597, -71.423584)); lighthouseData.add(new LighthouseInfo("Burnt Island Light", 43.825133, -69.640262)); lighthouseData.add(new LighthouseInfo("Rockland Harbor Southwest Light", 44.082720, -69.096310)); lighthouseData.add(new LighthouseInfo("Saddleback Ledge Light", 44.014232, -68.726461)); lighthouseData.add(new LighthouseInfo("Grindle Point Light", 44.281451, -68.942967)); lighthouseData.add(new LighthouseInfo("Winter Harbor Light", 44.361421, -68.087742)); lighthouseData.add(new LighthouseInfo("Peck's Ledge Light", 41.077298, -73.369811)); lighthouseData.add(new LighthouseInfo("Sheffield Island Light", 41.048251, -73.419931)); lighthouseData.add(new LighthouseInfo("Whitlocks Mill Light", 45.162793, -67.227395)); lighthouseData.add(new LighthouseInfo("Boon Island Light", 43.121183, -70.475845)); lighthouseData.add(new LighthouseInfo("Southwest Ledge Light", 41.234443, -72.912092)); lighthouseData.add(new LighthouseInfo("Broad Sound Channel Inner Range Light", 42.326933, -70.984649)); lighthouseData.add(new LighthouseInfo("Spectacle Island Light", 42.326898, -70.984772)); lighthouseData.add(new LighthouseInfo("Deer Island Light", 42.339836, -70.954525)); lighthouseData.add(new LighthouseInfo("Nayatt Point Light", 41.725120, -71.338926)); lighthouseData.add(new LighthouseInfo("Doubling Point Range Lights", 43.882860, -69.795652)); lighthouseData.add(new LighthouseInfo("Burkehaven Light", 43.371669, -72.065869)); lighthouseData.add(new LighthouseInfo("Loon Island Light", 43.392123, -72.059977)); lighthouseData.add(new LighthouseInfo("Curtis Island Light", 44.201372, -69.048865)); lighthouseData.add(new LighthouseInfo("Butler Flats Light", 41.603775, -70.894556)); lighthouseData.add(new LighthouseInfo("Graves Light", 42.365098, -70.869191)); lighthouseData.add(new LighthouseInfo("Stamford Harbor Light", 41.013643, -73.542577)); lighthouseData.add(new LighthouseInfo("Billingsgate Light", 41.871624, -70.068982)); lighthouseData.add(new LighthouseInfo("Monomoy Point Light", 41.559310, -69.993650)); lighthouseData.add(new LighthouseInfo("Bishop & Clerks Light", 41.574154, -70.249963)); lighthouseData.add(new LighthouseInfo("Plymouth Light", 42.003737, -70.600565)); lighthouseData.add(new LighthouseInfo("Cleveland Ledge Light", 41.630927, -70.694201)); lighthouseData.add(new LighthouseInfo("Tarpaulin Cove Light", 41.468822, -70.757514)); lighthouseData.add(new LighthouseInfo("Minots Ledge Light", 42.269678, -70.759136)); lighthouseData.add(new LighthouseInfo("Dumpling Rock Light", 41.538167, -70.921427)); lighthouseData.add(new LighthouseInfo("Bakers Island Light", 42.536470, -70.785995)); lighthouseData.add(new LighthouseInfo("Cuttyhunk Light", 41.414391, -70.949558)); lighthouseData.add(new LighthouseInfo("Egg Rock Light", 42.433346, -70.897386)); lighthouseData.add(new LighthouseInfo("Ipswich Range Light", 42.685360, -70.766128)); lighthouseData.add(new LighthouseInfo("Borden Flats Light", 41.704450, -71.174395)); lighthouseData.add(new LighthouseInfo("Bullocks Point Light", 41.737740, -71.364179)); lighthouseData.add(new LighthouseInfo("Pomham Rocks Light", 41.777618, -71.369594)); lighthouseData.add(new LighthouseInfo("Sabin Point Light", 41.762010, -71.374234)); lighthouseData.add(new LighthouseInfo("Fuller Rock Light", 41.794055, -71.379720)); lighthouseData.add(new LighthouseInfo("Gould Island Light", 41.537826, -71.344804)); lighthouseData.add(new LighthouseInfo("Warwick Light", 41.667111, -71.378413)); lighthouseData.add(new LighthouseInfo("Sassafras Point Light", 41.802496, -71.390272)); lighthouseData.add(new LighthouseInfo("Conanicut Light", 41.573484, -71.371767)); lighthouseData.add(new LighthouseInfo("Poplar Point Light", 41.571053, -71.439189)); lighthouseData.add(new LighthouseInfo("Halfway Rock Light", 43.655873, -70.037402)); lighthouseData.add(new LighthouseInfo("Seguin Island Light", 43.707554, -69.758118)); lighthouseData.add(new LighthouseInfo("Pond Island Light", 43.740031, -69.770273)); lighthouseData.add(new LighthouseInfo("Perkins Island Light", 43.786764, -69.785256)); lighthouseData.add(new LighthouseInfo("Latimer Reef Light", 41.304503, -71.933292)); lighthouseData.add(new LighthouseInfo("Morgan Point Light", 41.316669, -71.989327)); lighthouseData.add(new LighthouseInfo("Franklin Island Light", 43.892184, -69.374842)); lighthouseData.add(new LighthouseInfo("Matinicus Rock Light", 43.783605, -68.854898)); lighthouseData.add(new LighthouseInfo("Tenants Harbor Light", 43.961107, -69.184877)); lighthouseData.add(new LighthouseInfo("Whitehead Light", 43.978706, -69.124285)); lighthouseData.add(new LighthouseInfo("Two Bush Island Light", 43.964239, -69.073942)); lighthouseData.add(new LighthouseInfo("Indian Island Light", 44.165470, -69.061004)); lighthouseData.add(new LighthouseInfo("Browns Head Light", 44.111774, -68.909482)); lighthouseData.add(new LighthouseInfo("Goose Rocks Light", 44.135394, -68.830526)); lighthouseData.add(new LighthouseInfo("Sperry Light", 41.221221, -72.423110)); lighthouseData.add(new LighthouseInfo("Isle au Haut Light", 44.064733, -68.651339)); lighthouseData.add(new LighthouseInfo("Deer Island Thorofare Light", 44.134338, -68.703202)); lighthouseData.add(new LighthouseInfo("Herrick Cove Light", 43.411136, -72.041706)); lighthouseData.add(new LighthouseInfo("Eagle Island Light", 44.217634, -68.767743)); lighthouseData.add(new LighthouseInfo("Burnt Coat Harbor Light", 44.134176, -68.447258)); lighthouseData.add(new LighthouseInfo("Faulkner's Island Light", 41.211612, -72.655088)); lighthouseData.add(new LighthouseInfo("Blue Hill Bay Light", 44.248746, -68.497880)); lighthouseData.add(new LighthouseInfo("Great Duck Island Light", 44.142193, -68.245836)); lighthouseData.add(new LighthouseInfo("Bear Island Light", 44.283485, -68.269858)); lighthouseData.add(new LighthouseInfo("Baker Island Light", 44.241266, -68.198923)); lighthouseData.add(new LighthouseInfo("Crabtree Ledge Light", 44.475613, -68.199383)); lighthouseData.add(new LighthouseInfo("Statford Shoal Light", 41.059557, -73.101394)); lighthouseData.add(new LighthouseInfo("Petit Manan Light", 44.367574, -67.864129)); lighthouseData.add(new LighthouseInfo("Penfield Reef Light", 41.117101, -73.222070)); lighthouseData.add(new LighthouseInfo("Narraguagus Light", 44.462467, -67.837844)); lighthouseData.add(new LighthouseInfo("Nash Island Light", 44.464305, -67.747299)); lighthouseData.add(new LighthouseInfo("Moose Peak Light", 44.474244, -67.533471)); lighthouseData.add(new LighthouseInfo("Green's Ledge Light", 41.041551, -73.443974)); lighthouseData.add(new LighthouseInfo("Libby Island Light", 44.568236, -67.367339)); lighthouseData.add(new LighthouseInfo("Great Captain Island Light", 40.982478, -73.623706)); lighthouseData.add(new LighthouseInfo("Avery Rock Light", 44.654358, -67.344137)); lighthouseData.add(new LighthouseInfo("Little River Light", 44.650873, -67.192325)); lighthouseData.add(new LighthouseInfo("Lubec Channel Light", 44.841955, -66.976731)); lighthouseData.add(new LighthouseInfo("St. Croix River Light", 45.128762, -67.133594)); /* * "Clean out" photo directories before beginning photo transfer process. */ final File photosDir = new File("photos"); final File[] photoLighthouseDirsToDelete = photosDir.listFiles(); if (photoLighthouseDirsToDelete != null) { for (final File photoLighthouseDir : photoLighthouseDirsToDelete) { // Use Apache Commons IO (again) to recursively delete the directory // and all of the files within it if (photoLighthouseDir.exists() && photoLighthouseDir.isDirectory()) { try { FileUtils.deleteDirectory(photoLighthouseDir); System.out.println("Deleted directory \"" + photoLighthouseDir + "\" successfully."); } catch (final IOException ioe) { System.err.println( "Could not delete directory: \"" + photoLighthouseDir + "\" successfully!"); } } } } // Keep track of elapsed time long estimatedTime = System.nanoTime() - startTime; String elapsedTime = WebImageManager.calculateElapsedTime(estimatedTime); System.out.println("Estimated elapsed time: " + elapsedTime + "."); System.out.println(); /* * Keep track of total number of photographs transferred from Flickr * websites to disks across _all_ lighthouses */ int totalNumPhotosTransferred = 0; /* * Keep track of total number of photographs that _should_ be transferred * from Flickr for _all_ lighthouses */ int totalNumPhotos = 0; for (final LighthouseInfo lighthousePieceOfData : lighthouseData) { System.out.println("Processing photos of " + lighthousePieceOfData.getName() + "..."); /* * URL for accessing Flickr APIs. For a given lighthouse, this URL * provides an XML file in response that lists information about every * geotagged, Creative Commons-enabled photograph for that lighthouse * on Flickr. */ // GET Parameter Explanation: // method - Use the "search photos" method for the Flickr APIs // // api_key - A unique key that I use to get the results // // text - Find all lighthouses whose title, tags, or description // contains the word "lighthouse" // // license - Find all photos with a Creative Commons license _except_ // those that do not allow for modification on my part // // content_type - Find photos only (no videos) // // has_geo - Implicitly set to true; implies that all photos are // geotagged // // lat - The latitude of the center of the "search circle" // // lon - The longitude of the center of the "search circle" // // radius - The radius of the "search circle," in _kilometers_ (NOT // miles) // // extras - Also include a URL to the "raw" photo (small version) final String inputURLText = "http://ycpi.api.flickr.com/services/rest/?" + "method=flickr.photos.search" + "&api_key=3ea8366b020383eb91f170c6f41748f5" + "&text=lighthouse" + "&license=1,2,4,5,7" + "&content_type=1" + "&has_geo" + "&lat=" + lighthousePieceOfData.getLatitude() + "&lon=" + lighthousePieceOfData.getLongitude() + "&radius=1" + "&extras=url_s"; // Output file where XML web response will be stored temporarily final String outputFileName = "output.xml"; /* * Convert the name of the lighthouse to a "computer friendly" version * with all lower-case letters and underscores replacing spaces, * apostrophes, and parenthesis */ String lighthouseName = lighthousePieceOfData.getName(); lighthouseName = lighthouseName.toLowerCase(); lighthouseName = lighthouseName.replace(' ', '_'); lighthouseName = lighthouseName.replace('\'', '_'); lighthouseName = lighthouseName.replace('(', '_'); lighthouseName = lighthouseName.replace(')', '_'); // Will contain the textual links to each "raw" photo website Set<String> rawPhotoURLs = new HashSet<String>(); // Make sure file for XML output does not exist at first // (don't want to use an old, incorrect version accidentally) final File outputXMLFile = new File(outputFileName); if (outputXMLFile.exists()) { outputXMLFile.delete(); } System.out.println("Cleaned output XML file containing photo URLs on disk successfully."); /* * Access the list of photographs for a given lighthouse and copy them * to the XML file on disk */ final WebDataExtractor extractor = new WebDataExtractor(inputURLText, outputFileName); System.out.println("Looking for XML file containing lighthosue photo information..."); extractor.transferURLToFile(); System.out.println("Found XML file containing lighthouse photo URLs."); /* * Object for extracting the "raw" URLs from each piece of photo data * in the XML file */ final XMLParser parser = new FlickrXMLOutputParser(outputFileName); // Complete the extraction process rawPhotoURLs = parser.parseFile("//photo/@url_s"); final int numPhotos = rawPhotoURLs.size(); totalNumPhotos += numPhotos; int i = 0; // Counter for keeping track of progress /* * Keep track of photos transferred successfully (which might be less * than the total number of photos defined int the XML output from * Flickr, especially if connection issues occur */ int numPhotosTransferred = 0; for (final String photoURL : rawPhotoURLs) { System.out.print("Transferring photos..."); i++; /* * Go to a website containing a "raw" JPEG image file and save it * accordingly on disk in the photo folder corresponding to the * lighthouse name */ final WebDataExtractor rawPhotoExtractor = new WebDataExtractor(photoURL, "photos/" + lighthouseName + "/lighthouse_photo_" + Integer.toString(i) + ".jpg"); final boolean transferSuccessful = rawPhotoExtractor.transferURLToFile(); if (transferSuccessful) { numPhotosTransferred++; } // Simple progress tracker System.out.printf("%d of %d (%.1f%%) complete.\n", i, numPhotos, i * 1.0 / numPhotos * 100.0); } // Indicate number of photos successfully transferred to disk if (numPhotosTransferred == numPhotos && numPhotos > 0) { System.out.println("All photos transferred to disk successfully!"); } else if (numPhotos == 0) { System.out.println("It appears there are no photos available for this lighthouse..."); } else if (numPhotosTransferred == 1 && numPhotos > 1) { System.out.println("1 photo transferred to disk successfully."); } else if (numPhotosTransferred == 1 && numPhotos == 1) { System.out.println("The photo transferred to disk successfully!"); } else { System.out.println(numPhotosTransferred + " photos transferred to disk successfully."); } // Keep track of elapsed time estimatedTime = System.nanoTime() - startTime; elapsedTime = WebImageManager.calculateElapsedTime(estimatedTime); System.out.println("Estimated elapsed time: " + elapsedTime + "."); // Add extra line in between lighthouses in output stream System.out.println(); /* * Keep track of total number of photos transferred so far across * _all_lighthouses */ totalNumPhotosTransferred += numPhotosTransferred; } // Display "grand" total (which is hopefully greater than 0) System.out.println("***GRAND TOTAL: " + totalNumPhotosTransferred + " OF " + totalNumPhotos + " PHOTOS TRANSFERRED SUCCESSFULLY***"); estimatedTime = System.nanoTime() - startTime; elapsedTime = WebImageManager.calculateElapsedTime(estimatedTime); System.out.println("TOTAL ELAPSED TIME: " + elapsedTime.toUpperCase()); }