List of usage examples for java.io File getAbsolutePath
public String getAbsolutePath()
From source file:com.vmware.photon.controller.core.Main.java
public static void main(String[] args) throws Throwable { try {//from w w w . j a va 2 s .c om LoggingFactory.bootstrap(); logger.info("args: " + Arrays.toString(args)); ArgumentParser parser = ArgumentParsers.newArgumentParser("PhotonControllerCore").defaultHelp(true) .description("Photon Controller Core"); parser.addArgument("config-file").help("photon controller configuration file"); parser.addArgument("--manual").type(Boolean.class).setDefault(false) .help("If true, create default deployment."); Namespace namespace = parser.parseArgsOrFail(args); PhotonControllerConfig photonControllerConfig = getPhotonControllerConfig(namespace); DeployerConfig deployerConfig = photonControllerConfig.getDeployerConfig(); new LoggingFactory(photonControllerConfig.getLogging(), "photon-controller-core").configure(); SSLContext sslContext; if (deployerConfig.getDeployerContext().isAuthEnabled()) { sslContext = SSLContext.getInstance(KeyStoreUtils.THRIFT_PROTOCOL); TrustManagerFactory tmf = null; tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); KeyStore keyStore = KeyStore.getInstance("JKS"); InputStream in = FileUtils .openInputStream(new File(deployerConfig.getDeployerContext().getKeyStorePath())); keyStore.load(in, deployerConfig.getDeployerContext().getKeyStorePassword().toCharArray()); tmf.init(keyStore); sslContext.init(null, tmf.getTrustManagers(), null); } else { KeyStoreUtils.generateKeys("/thrift/"); sslContext = KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } ThriftModule thriftModule = new ThriftModule(sslContext); PhotonControllerXenonHost xenonHost = startXenonHost(photonControllerConfig, thriftModule, deployerConfig, sslContext); if ((Boolean) namespace.get("manual")) { DefaultDeployment.createDefaultDeployment(photonControllerConfig.getXenonConfig().getPeerNodes(), deployerConfig, xenonHost); } // Creating a temp configuration file for apife with modification to some named sections in photon-controller-config // so that it can match the Configuration class of dropwizard. File apiFeTempConfig = File.createTempFile("apiFeTempConfig", ".tmp"); File source = new File(args[0]); FileInputStream fis = new FileInputStream(source); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); FileWriter fstream = new FileWriter(apiFeTempConfig, true); BufferedWriter out = new BufferedWriter(fstream); String aLine = null; while ((aLine = in.readLine()) != null) { if (aLine.equals("apife:")) { aLine = aLine.replace("apife:", "server:"); } out.write(aLine); out.newLine(); } in.close(); out.close(); // This approach can be simplified once the apife container is gone, but for the time being // it expects the first arg to be the string "server". String[] apiFeArgs = new String[2]; apiFeArgs[0] = "server"; apiFeArgs[1] = apiFeTempConfig.getAbsolutePath(); ApiFeService.setupApiFeConfigurationForServerCommand(apiFeArgs); ApiFeService.addServiceHost(xenonHost); ApiFeService.setSSLContext(sslContext); ApiFeService apiFeService = new ApiFeService(); apiFeService.run(apiFeArgs); apiFeTempConfig.deleteOnExit(); LocalApiClient localApiClient = apiFeService.getInjector().getInstance(LocalApiClient.class); xenonHost.setApiClient(localApiClient); // in the non-auth enabled scenario we need to be able to accept any self-signed certificate if (!deployerConfig.getDeployerContext().isAuthEnabled()) { KeyStoreUtils.acceptAllCerts(KeyStoreUtils.THRIFT_PROTOCOL); } Runtime.getRuntime().addShutdownHook(new Thread() { @Override public void run() { logger.info("Shutting down"); xenonHost.stop(); logger.info("Done"); LoggingFactory.detachAndStop(); } }); } catch (Exception e) { logger.error("Failed to start photon controller ", e); throw e; } }
From source file:com.zimbra.perf.chart.ChartUtil.java
public static void main(String[] args) throws Exception { CommandLineParser clParser = new GnuParser(); Options opts = getOptions();//from ww w . j av a2s . c om try { CommandLine cl = clParser.parse(opts, args); if (cl.hasOption('h')) usage(opts); if (!cl.hasOption('s') && !cl.hasOption('d')) usage(opts, "-s and -d options are required"); if (!cl.hasOption('s')) usage(opts, "Missing required -s option"); if (!cl.hasOption('d')) usage(opts, "Missing required -d option"); String[] confs = cl.getOptionValues(OPT_CONF); if (confs == null || confs.length == 0) usage(opts, "Missing --" + OPT_CONF + " option"); File[] confFiles = new File[confs.length]; for (int i = 0; i < confs.length; i++) { File conf = new File(confs[i]); if (!conf.exists()) { System.err.printf("Configuration file %s does not exist\n", conf.getAbsolutePath()); System.exit(1); } confFiles[i] = conf; } String[] srcDirStrs = cl.getOptionValues(OPT_SRCDIR); if (srcDirStrs == null || srcDirStrs.length == 0) usage(opts, "Missing --" + OPT_SRCDIR + " option"); List<File> srcDirsList = new ArrayList<File>(srcDirStrs.length); for (int i = 0; i < srcDirStrs.length; i++) { File srcDir = new File(srcDirStrs[i]); if (srcDir.exists()) srcDirsList.add(srcDir); else System.err.printf("Source directory %s does not exist\n", srcDir.getAbsolutePath()); } if (srcDirsList.size() < 1) usage(opts, "No valid source directory found"); File[] srcDirs = new File[srcDirsList.size()]; srcDirsList.toArray(srcDirs); String destDirStr = cl.getOptionValue(OPT_DESTDIR); if (destDirStr == null) usage(opts, "Missing --" + OPT_DESTDIR + " option"); File destDir = new File(destDirStr); if (!destDir.exists()) { boolean created = destDir.mkdirs(); if (!created) { System.err.printf("Unable to create destination directory %s\n", destDir.getAbsolutePath()); System.exit(1); } } if (!destDir.canWrite()) { System.err.printf("Destination directory %s is not writable\n", destDir.getAbsolutePath()); System.exit(1); } String title = cl.getOptionValue(OPT_TITLE); if (title == null) title = srcDirs[0].getAbsoluteFile().getName(); Date startAt = parseTimestampOption(cl, opts, OPT_START_AT); Date endAt = parseTimestampOption(cl, opts, OPT_END_AT); Date aggStartAt = parseTimestampOption(cl, opts, OPT_AGGREGATE_START_AT); Date aggEndAt = parseTimestampOption(cl, opts, OPT_AGGREGATE_END_AT); boolean noSummary = cl.hasOption('n'); ChartUtil app = new ChartUtil(confFiles, srcDirs, destDir, title, startAt, endAt, aggStartAt, aggEndAt, noSummary); app.doit(); } catch (Exception e) { e.printStackTrace(); System.err.println(); usage(opts); } }
From source file:com.github.rwhogg.git_vcr.App.java
/** * main is the entry point for Git-VCR//from w w w . j a va2s .c o m * @param args Command-line arguments */ public static void main(String[] args) { Options options = parseCommandLine(args); HierarchicalINIConfiguration configuration = null; try { configuration = getConfiguration(); } catch (ConfigurationException e) { Util.error("could not parse configuration file!"); } // verify we are in a git folder and then construct the repo final File currentFolder = new File("."); FileRepositoryBuilder builder = new FileRepositoryBuilder(); Repository localRepo = null; try { localRepo = builder.findGitDir().build(); } catch (IOException e) { Util.error("not in a Git folder!"); } // deal with submodules assert localRepo != null; if (localRepo.isBare()) { FileRepositoryBuilder parentBuilder = new FileRepositoryBuilder(); Repository parentRepo; try { parentRepo = parentBuilder.setGitDir(new File("..")).findGitDir().build(); localRepo = SubmoduleWalk.getSubmoduleRepository(parentRepo, currentFolder.getName()); } catch (IOException e) { Util.error("could not find parent of submodule!"); } } // if we need to retrieve the patch file, get it now URL patchUrl = options.getPatchUrl(); String patchPath = patchUrl.getFile(); File patchFile = null; HttpUrl httpUrl = HttpUrl.get(patchUrl); if (httpUrl != null) { try { patchFile = com.twitter.common.io.FileUtils.SYSTEM_TMP.createFile(".diff"); Request request = new Request.Builder().url(httpUrl).build(); OkHttpClient client = new OkHttpClient(); Call call = client.newCall(request); Response response = call.execute(); ResponseBody body = response.body(); if (!response.isSuccessful()) { Util.error("could not retrieve diff file from URL " + patchUrl); } String content = body.string(); org.apache.commons.io.FileUtils.write(patchFile, content, (Charset) null); } catch (IOException ie) { Util.error("could not retrieve diff file from URL " + patchUrl); } } else { patchFile = new File(patchPath); } // find the patch //noinspection ConstantConditions if (!patchFile.canRead()) { Util.error("patch file " + patchFile.getAbsolutePath() + " is not readable!"); } final Git git = new Git(localRepo); // handle the branch String branchName = options.getBranchName(); String theOldCommit = null; try { theOldCommit = localRepo.getBranch(); } catch (IOException e2) { Util.error("could not get reference to current branch!"); } final String oldCommit = theOldCommit; // needed to reference from shutdown hook if (branchName != null) { // switch to the branch try { git.checkout().setName(branchName).call(); } catch (RefAlreadyExistsException e) { // FIXME Auto-generated catch block e.printStackTrace(); } catch (RefNotFoundException e) { Util.error("the branch " + branchName + " was not found!"); } catch (InvalidRefNameException e) { Util.error("the branch name " + branchName + " is invalid!"); } catch (org.eclipse.jgit.api.errors.CheckoutConflictException e) { Util.error("there was a checkout conflict!"); } catch (GitAPIException e) { Util.error("there was an unspecified Git API failure!"); } } // ensure there are no changes before we apply the patch try { if (!git.status().call().isClean()) { Util.error("cannot run git-vcr while there are uncommitted changes!"); } } catch (NoWorkTreeException e1) { // won't happen assert false; } catch (GitAPIException e1) { Util.error("call to git status failed!"); } // list all the files changed String patchName = patchFile.getName(); Patch patch = new Patch(); try { patch.parse(new FileInputStream(patchFile)); } catch (FileNotFoundException e) { assert false; } catch (IOException e) { Util.error("could not parse the patch file!"); } ReviewResults oldResults = new ReviewResults(patchName, patch, configuration, false); try { oldResults.review(); } catch (InstantiationException e1) { Util.error("could not instantiate a review tool class!"); } catch (IllegalAccessException e1) { Util.error("illegal access to a class"); } catch (ClassNotFoundException e1) { Util.error("could not find a review tool class"); } catch (ReviewFailedException e1) { e1.printStackTrace(); Util.error("Review failed!"); } // we're about to change the repo, so register a shutdown hook to clean it up Runtime.getRuntime().addShutdownHook(new Thread() { public void run() { cleanupGit(git, oldCommit); } }); // apply the patch try { git.apply().setPatch(new FileInputStream(patchFile)).call(); } catch (PatchFormatException e) { Util.error("patch file " + patchFile.getAbsolutePath() + " is malformatted!"); } catch (PatchApplyException e) { Util.error("patch file " + patchFile.getAbsolutePath() + " did not apply correctly!"); } catch (FileNotFoundException e) { assert false; } catch (GitAPIException e) { Util.error(e.getLocalizedMessage()); } ReviewResults newResults = new ReviewResults(patchName, patch, configuration, true); try { newResults.review(); } catch (InstantiationException e1) { Util.error("could not instantiate a review tool class!"); } catch (IllegalAccessException e1) { Util.error("illegal access to a class"); } catch (ClassNotFoundException e1) { Util.error("could not find a review tool class"); } catch (ReviewFailedException e1) { e1.printStackTrace(); Util.error("Review failed!"); } // generate and show the report VelocityReport report = new VelocityReport(patch, oldResults, newResults); File reportFile = null; try { reportFile = com.twitter.common.io.FileUtils.SYSTEM_TMP.createFile(".html"); org.apache.commons.io.FileUtils.write(reportFile, report.toString(), (String) null); } catch (IOException e) { Util.error("could not generate the results page!"); } try { assert reportFile != null; Desktop.getDesktop().open(reportFile); } catch (IOException e) { Util.error("could not open the results page!"); } }
From source file:com.twentyn.patentScorer.ScoreMerger.java
public static void main(String[] args) throws Exception { System.out.println("Starting up..."); System.out.flush();// w ww. j a v a 2 s . c om Options opts = new Options(); opts.addOption(Option.builder("h").longOpt("help").desc("Print this help message and exit").build()); opts.addOption(Option.builder("r").longOpt("results").required().hasArg() .desc("A directory of search results to read").build()); opts.addOption(Option.builder("s").longOpt("scores").required().hasArg() .desc("A directory of patent classification scores to read").build()); opts.addOption(Option.builder("o").longOpt("output").required().hasArg() .desc("The output file where results will be written.").build()); HelpFormatter helpFormatter = new HelpFormatter(); CommandLineParser cmdLineParser = new DefaultParser(); CommandLine cmdLine = null; try { cmdLine = cmdLineParser.parse(opts, args); } catch (ParseException e) { System.out.println("Caught exception when parsing command line: " + e.getMessage()); helpFormatter.printHelp("DocumentIndexer", opts); System.exit(1); } if (cmdLine.hasOption("help")) { helpFormatter.printHelp("DocumentIndexer", opts); System.exit(0); } File scoresDirectory = new File(cmdLine.getOptionValue("scores")); if (cmdLine.getOptionValue("scores") == null || !scoresDirectory.isDirectory()) { LOGGER.error("Not a directory of score files: " + cmdLine.getOptionValue("scores")); } File resultsDirectory = new File(cmdLine.getOptionValue("results")); if (cmdLine.getOptionValue("results") == null || !resultsDirectory.isDirectory()) { LOGGER.error("Not a directory of results files: " + cmdLine.getOptionValue("results")); } FileWriter outputWriter = new FileWriter(cmdLine.getOptionValue("output")); ObjectMapper objectMapper = new ObjectMapper(); objectMapper.enable(SerializationFeature.INDENT_OUTPUT); objectMapper.setVisibility(PropertyAccessor.ALL, JsonAutoDetect.Visibility.ANY); FilenameFilter jsonFilter = new FilenameFilter() { public final Pattern JSON_PATTERN = Pattern.compile("\\.json$"); public boolean accept(File dir, String name) { return JSON_PATTERN.matcher(name).find(); } }; Map<String, PatentScorer.ClassificationResult> scores = new HashMap<>(); LOGGER.info("Reading scores from directory at " + scoresDirectory.getAbsolutePath()); for (File scoreFile : scoresDirectory.listFiles(jsonFilter)) { BufferedReader reader = new BufferedReader(new FileReader(scoreFile)); int count = 0; String line; while ((line = reader.readLine()) != null) { PatentScorer.ClassificationResult res = objectMapper.readValue(line, PatentScorer.ClassificationResult.class); scores.put(res.docId, res); count++; } LOGGER.info("Read " + count + " scores from " + scoreFile.getAbsolutePath()); } Map<String, List<DocumentSearch.SearchResult>> synonymsToResults = new HashMap<>(); Map<String, List<DocumentSearch.SearchResult>> inchisToResults = new HashMap<>(); LOGGER.info("Reading results from directory at " + resultsDirectory); // With help from http://stackoverflow.com/questions/6846244/jackson-and-generic-type-reference. JavaType resultsType = objectMapper.getTypeFactory().constructCollectionType(List.class, DocumentSearch.SearchResult.class); List<File> resultsFiles = Arrays.asList(resultsDirectory.listFiles(jsonFilter)); Collections.sort(resultsFiles, new Comparator<File>() { @Override public int compare(File o1, File o2) { return o1.getName().compareTo(o2.getName()); } }); for (File resultsFile : resultsFiles) { BufferedReader reader = new BufferedReader(new FileReader(resultsFile)); CharBuffer buffer = CharBuffer.allocate(Long.valueOf(resultsFile.length()).intValue()); int bytesRead = reader.read(buffer); LOGGER.info("Read " + bytesRead + " bytes from " + resultsFile.getName() + " (length is " + resultsFile.length() + ")"); List<DocumentSearch.SearchResult> results = objectMapper.readValue(new CharArrayReader(buffer.array()), resultsType); LOGGER.info("Read " + results.size() + " results from " + resultsFile.getAbsolutePath()); int count = 0; for (DocumentSearch.SearchResult sres : results) { for (DocumentSearch.ResultDocument resDoc : sres.getResults()) { String docId = resDoc.getDocId(); PatentScorer.ClassificationResult classificationResult = scores.get(docId); if (classificationResult == null) { LOGGER.warn("No classification result found for " + docId); } else { resDoc.setClassifierScore(classificationResult.getScore()); } } if (!synonymsToResults.containsKey(sres.getSynonym())) { synonymsToResults.put(sres.getSynonym(), new ArrayList<DocumentSearch.SearchResult>()); } synonymsToResults.get(sres.getSynonym()).add(sres); count++; if (count % 1000 == 0) { LOGGER.info("Processed " + count + " search result documents"); } } } Comparator<DocumentSearch.ResultDocument> resultDocumentComparator = new Comparator<DocumentSearch.ResultDocument>() { @Override public int compare(DocumentSearch.ResultDocument o1, DocumentSearch.ResultDocument o2) { int cmp = o2.getClassifierScore().compareTo(o1.getClassifierScore()); if (cmp != 0) { return cmp; } cmp = o2.getScore().compareTo(o1.getScore()); return cmp; } }; for (Map.Entry<String, List<DocumentSearch.SearchResult>> entry : synonymsToResults.entrySet()) { DocumentSearch.SearchResult newSearchRes = null; // Merge all result documents into a single search result. for (DocumentSearch.SearchResult sr : entry.getValue()) { if (newSearchRes == null) { newSearchRes = sr; } else { newSearchRes.getResults().addAll(sr.getResults()); } } if (newSearchRes == null || newSearchRes.getResults() == null) { LOGGER.error("Search results for " + entry.getKey() + " are null."); continue; } Collections.sort(newSearchRes.getResults(), resultDocumentComparator); if (!inchisToResults.containsKey(newSearchRes.getInchi())) { inchisToResults.put(newSearchRes.getInchi(), new ArrayList<DocumentSearch.SearchResult>()); } inchisToResults.get(newSearchRes.getInchi()).add(newSearchRes); } List<String> sortedKeys = new ArrayList<String>(inchisToResults.keySet()); Collections.sort(sortedKeys); List<GroupedInchiResults> orderedResults = new ArrayList<>(sortedKeys.size()); Comparator<DocumentSearch.SearchResult> synonymSorter = new Comparator<DocumentSearch.SearchResult>() { @Override public int compare(DocumentSearch.SearchResult o1, DocumentSearch.SearchResult o2) { return o1.getSynonym().compareTo(o2.getSynonym()); } }; for (String inchi : sortedKeys) { List<DocumentSearch.SearchResult> res = inchisToResults.get(inchi); Collections.sort(res, synonymSorter); orderedResults.add(new GroupedInchiResults(inchi, res)); } objectMapper.writerWithView(Object.class).writeValue(outputWriter, orderedResults); outputWriter.close(); }
From source file:de.prozesskraft.pkraft.Manager.java
public static void main(String[] args) throws org.apache.commons.cli.ParseException, CloneNotSupportedException { // try/* w w w . j a v a 2 s . c om*/ // { // if (args.length != 1) // { // System.out.println("Please specify Inputfile and Outputfile (prozessinstanz.lri)"); // } // // } // catch (ArrayIndexOutOfBoundsException e) // { // System.out.println("***ArrayIndexOutOfBoundsException: Please specify procesdefinition.lrd and processinstance.lri\n" + e.toString()); // } /*---------------------------- get options from ini-file ----------------------------*/ File inifile = new java.io.File( WhereAmI.getInstallDirectoryAbsolutePath(Manager.class) + "/" + "../etc/pkraft-manager.ini"); if (inifile.exists()) { try { ini = new Ini(inifile); } catch (InvalidFileFormatException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } catch (IOException e1) { // TODO Auto-generated catch block e1.printStackTrace(); } } else { System.err.println("ini file does not exist: " + inifile.getAbsolutePath()); System.exit(1); exit = true; } /*---------------------------- create boolean options ----------------------------*/ Option help = new Option("help", "print this message"); Option v = new Option("v", "prints version and build-date"); /*---------------------------- create argument options ----------------------------*/ Option instance = OptionBuilder.withArgName("instance").hasArg() .withDescription("[mandatory] process instance file") // .isRequired() .create("instance"); Option stop = OptionBuilder.withArgName("stop") // .hasArg() .withDescription("[optional] stops a running manager for given instance") // .isRequired() .create("stop"); Option kill = OptionBuilder.withArgName("kill") // .hasArg() .withDescription("[optional] kills all applications that have been started by steps") // .isRequired() .create("kill"); /*---------------------------- create options object ----------------------------*/ Options options = new Options(); options.addOption(help); options.addOption(v); options.addOption(instance); options.addOption(stop); options.addOption(kill); /*---------------------------- create the parser ----------------------------*/ CommandLineParser parser = new GnuParser(); try { // parse the command line arguments line = parser.parse(options, args); } // catch ( ParseException exp ) catch (Exception exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); exiter(); } /*---------------------------- usage/help ----------------------------*/ if (line.hasOption("help")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("manager", options); exit = true; System.exit(0); } else if (line.hasOption("v")) { System.out.println("author: info@prozesskraft.de"); System.out.println("version: [% version %]"); System.out.println("date: [% date %]"); exit = true; System.exit(0); } else if (!(line.hasOption("instance"))) { exiter(); } /*---------------------------- die lizenz ueberpruefen und ggf abbrechen ----------------------------*/ // check for valid license ArrayList<String> allPortAtHost = new ArrayList<String>(); allPortAtHost.add(ini.get("license-server", "license-server-1")); allPortAtHost.add(ini.get("license-server", "license-server-2")); allPortAtHost.add(ini.get("license-server", "license-server-3")); MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1"); // lizenz-logging ausgeben for (String actLine : (ArrayList<String>) lic.getLog()) { System.err.println(actLine); } // abbruch, wenn lizenz nicht valide if (!lic.isValid()) { exit = true; System.exit(1); } /*---------------------------- business logic ----------------------------*/ Process actualProcess = null; try { Process p1 = new Process(); // die dauer des loops festlegen. Dies soll kein standardwert sein, da sonst bei vielen subprozessen die Prozessorlast stark oszilliert // zwischen 12 und 17 sekunden // Random rand = new Random(System.currentTimeMillis()); // int loop_period_seconds = rand.nextInt((17 - 12) + 1) + 12; // System.err.println("loop period is randomly set to: "+loop_period_seconds); fileBinary = new java.io.File(line.getOptionValue("instance")); String pathBinary = ""; if (fileBinary.exists()) { pathBinary = fileBinary.getAbsolutePath(); System.err.println("file does exist: " + pathBinary); } else { System.err.println("file does not exist: " + fileBinary.getAbsolutePath()); exiter(); } if (line.hasOption("stop") || line.hasOption("kill")) { p1.setInfilebinary(pathBinary); Process p2 = p1.readBinary(); p2.log("debug", "setting new manager-Id (0) to signal actual manager (" + p2.getManagerid() + ") that he is no longer in charge "); System.err.println("info: stopping instance"); System.err.println("debug: setting new manager-Id (0) to signal actual manager (" + p2.getManagerid() + ") that he is no longer in charge "); p2.setManagerid(0); p2.run = false; p2.setOutfilebinary(pathBinary); p2.writeBinary(); if (line.hasOption("kill")) { System.err.println("info: killing all steps of instance"); String returnStringOfKills = p2.kill(); System.err.println("info: killing returns: " + returnStringOfKills); } boolean pradar = (!(p2.isWrapper())); // pradar checkout if (pradar) { pradarAttend(p2.getRootdir() + "/process.pmb"); // pradarCheckout(p2.getId(), p2.getName(), "0"); } exit = true; System.exit(0); } startZyklischerThread(0); // prozessinstanz einlesen p1.setInfilebinary(pathBinary); managerid = p1.genManagerid(); Process p2; p2 = p1.readBinary(); // beim aufruf des programms wird erstmal die instanz occupiert p2.setManagerid(managerid); System.err.println("debug: manager " + managerid + ": occupying instance."); p2.log("info", "manager " + managerid + ": occupying instance."); p2.log("debug", "manager " + managerid + ": setting new manager-id to signal other running managers that they are not longer needed."); p2.log("debug", "manager " + managerid + ": setting binary file for input to: " + pathBinary); // System.out.println("setting binary file for input to: "+line.getOptionValue("instance")); p2.log("debug", "manager " + managerid + ": reading binary file: " + pathBinary); p2.setInfilebinary(pathBinary); p2.setOutfilebinary(pathBinary); p2.log("debug", "manager " + managerid + ": setting binary file for output: " + pathBinary); // instanz auf platte schreiben (um anderen managern zu signalisieren, dass sie nicht mehr gebraucht werden // System.out.println("setting manager-id to: "+managerid); p2.log("debug", "manager " + managerid + ": writing process to binary file to occupy instance."); // wenn es kein wrapper-prozess ist, dann soll die komunikation mit pradar vom manager uebernommen werden boolean pradar = (!(p2.isWrapper())); System.err.println("debug: setting instance to run"); p2.run = true; // pradar checkin if (pradar && p2.run && p2.touchInMillis == 0) { pradarAttend(p2.getRootdir() + "/process.pmb"); // p2.log("debug", "pradar-checkin id="+p2.getId()+", process="+p2.getName()+", processversion="+p2.getVersion()+", id2="+p2.getId2()+", parentid="+p2.getParentid()+", resource="+p2.getRootdir()+"/process.pmb"); // pradarCheckin(p2.getId(), p2.getName(), p2.getVersion(), p2.getId2(), p2.getParentid(), getPid(), p2.getRootdir()+"/process.pmb"); } System.err.println("debug: writing binary"); p2.writeBinary(); // process weiter schubsen pushProcessAsFarAsPossible(pathBinary, false); // try // { // // der thread soll so lange schlafen, wie die periode lang ist. die schlafdauer wird mit der anzahl multipliziert, wie oft das loadAverage zu hoch war (max 5) // int faktorForPeriod = Math.min(10, p2.counterLoadAverageTooHigh + 1); // // int secondsToSleep = loop_period_seconds * faktorForPeriod; // System.err.println("debug: sleeping for " + secondsToSleep + " seconds"); // // int millisecondsToSleep = secondsToSleep*1000; // System.err.println("debug: sleeping for " + millisecondsToSleep + " milliseconds"); // // Thread.sleep(millisecondsToSleep); // } // catch (InterruptedException e) // { // // TODO Auto-generated catch block // e.printStackTrace(); // // // ausgabe in das debugLogFile // exiterException(actualProcess.getOutfilebinary(), e); // } } catch (Exception e) { if (actualProcess != null) { actualProcess.log("fatal", e.getMessage() + "\n" + Arrays.toString(e.getStackTrace())); updateFile(actualProcess); e.printStackTrace(); // ausgabe in das debugLogFile exiterException(actualProcess.getOutfilebinary(), e); } exit = true; System.exit(10); } }
From source file:com.zimbra.common.calendar.ZoneInfo2iCalendar.java
public static void main(String[] args) throws Exception { // command line handling CommandLine cl = null;/*from w ww . j a va 2 s. com*/ Params params = null; try { cl = parseArgs(args); if (cl.hasOption(OPT_HELP)) { usage(null); System.exit(0); } params = initParams(cl); } catch (Exception e) { System.err.println(e.getMessage()); e.printStackTrace(); System.exit(1); } // parse tzdata source ZoneInfoParser parser = new ZoneInfoParser(); for (File tzdataFile : params.tzdataFiles) { Reader r = null; try { r = new InputStreamReader(new FileInputStream(tzdataFile), "UTF-8"); parser.readTzdata(r); } catch (ParseException e) { System.err.println(e.getMessage()); System.err.println("Line: " + e.getErrorOffset()); System.err.println("File: " + tzdataFile.getAbsolutePath()); e.printStackTrace(); System.exit(1); } finally { if (r != null) r.close(); } } parser.analyze(); // read extra data file containing primary TZ list and zone match scores if (params.extraDataFile != null) { Reader r = null; try { r = new InputStreamReader(new FileInputStream(params.extraDataFile), "UTF-8"); readExtraData(r); } catch (ParseException e) { System.err.println(e.getMessage()); System.err.println("Line: " + e.getErrorOffset()); System.err.println("File: " + params.extraDataFile.getAbsolutePath()); e.printStackTrace(); System.exit(1); } finally { if (r != null) r.close(); } } Writer out; if (params.outputFile != null) { out = new PrintWriter(params.outputFile, "UTF-8"); } else { out = new PrintWriter(new OutputStreamWriter(System.out, "UTF-8")); } try { StringBuilder hdr = new StringBuilder("BEGIN:VCALENDAR"); hdr.append(CRLF); hdr.append("PRODID:Zimbra-Calendar-Provider").append(CRLF); hdr.append("VERSION:2.0").append(CRLF); hdr.append("METHOD:PUBLISH").append(CRLF); out.write(hdr.toString()); Map<String, VTimeZone> oldTimeZones = makeOldTimeZonesMap(params); Set<Zone> zones = new TreeSet<Zone>(new ZoneComparatorByGmtOffset()); zones.addAll(parser.getZones()); Set<String> zoneIDs = new TreeSet<String>(); for (Zone zone : zones) { zoneIDs.add(zone.getName()); } for (Zone zone : zones) { out.write(getTimeZoneForZone(zone, params, zoneIDs, oldTimeZones)); } StringBuilder footer = new StringBuilder("END:VCALENDAR"); footer.append(CRLF); out.write(footer.toString()); } finally { out.close(); } }
From source file:CommandLineInterpreter.java
/** * Main method, command line input will get parsed here. * * @param args/*www.ja va 2s . c o m*/ */ public static void main(String[] args) { // // test-arguments: // args = new String[] { "1.9", "-gui" }; boolean success = false; if (args.length == 1) { String arg = args[0].trim().replaceAll("[-]+", ""); if (arg.equals("help") || arg.equals("h")) printHelp(null); } if (args.length == 0) { printHelp("ONE ARGUMENT NEEDED"); } else { try { boolean guiAlert = false; Float minVersion = null; File resourcesFile = null; // ------------------------------------------------ // // -- // ------------------------------------------------ // final String minJavaVersionArgument = args[0]; if (!minJavaVersionArgument.trim().isEmpty()) { try { minVersion = Float.parseFloat(minJavaVersionArgument); } catch (Exception e) { // do nothing } } if (minVersion == null || minVersion > 2 || minVersion < 1.6) { printHelp("VERSION STRING IS NOT VALID"); } // ------------------------------------------------ // // -- // ------------------------------------------------ // for (int i = 1; i < (args.length <= 3 ? args.length : 3); i++) { final String argument = args[i].trim(); if (argument.equals("-gui")) { guiAlert = true; } else { String resourcesFilePath = argument; if (!resourcesFilePath.isEmpty()) { resourcesFile = new File(resourcesFilePath); if (!resourcesFile.exists() || !resourcesFile.isFile() || !resourcesFile.canRead()) { printHelp("RESOURCES FILE IS NOT VALID\n[" + resourcesFile.getAbsolutePath() + "]"); } } } } // ------------------------------------------------ // // -- // ------------------------------------------------ // success = checkJREVersion(minVersion, guiAlert); if (success && resourcesFile != null) { success = checkResources(resourcesFile, guiAlert); } } catch (Exception e) { success = false; e.printStackTrace(); } } if (!success) { // set error exit code System.exit(1); } }
From source file:edu.ucsd.crbs.cws.App.java
License:asdf
public static void main(String[] args) { Job.REFS_ENABLED = false;//w w w .jav a 2 s. c o m Workflow.REFS_ENABLED = false; try { OptionParser parser = new OptionParser() { { accepts(UPLOAD_WF_ARG, "Add/Update Workflow").withRequiredArg().ofType(File.class) .describedAs("Kepler .kar file"); //accepts(LOAD_TEST,"creates lots of workflows and jobs"); accepts(SYNC_WITH_CLUSTER_ARG, "Submits & Synchronizes Workflow Jobs on local cluster with CRBS Workflow Webservice. Requires --" + PROJECT_ARG + " --" + PORTALNAME_ARG + " --" + PORTAL_URL_ARG + " --" + HELP_EMAIL_ARG).withRequiredArg().ofType(String.class).describedAs("URL"); accepts(GEN_OLD_KEPLER_XML_ARG, "Generates version 1.x kepler xml for given workflow") .withRequiredArg().ofType(String.class).describedAs("wfid or .kar file"); accepts(UPLOAD_FILE_ARG, "Registers and uploads Workspace file to REST service") .withRequiredArg().ofType(File.class); accepts(REGISTER_FILE_ARG, "Registers Workspace file to REST service (DOES NOT UPLOAD FILE TO REST SERVICE)") .withRequiredArg().ofType(File.class); accepts(GET_WORKSPACE_FILE_INFO_ARG, "Outputs JSON of specified workspace file(s)") .withRequiredArg().ofType(String.class).describedAs("workspace file id"); accepts(GET_WORKFLOW_ARG, "Outputs JSON of specified Workflow").withRequiredArg() .ofType(Long.class).describedAs("Workflow Id"); accepts(DOWNLOAD_FILE_ARG, "Downloads Workspace file").withRequiredArg().ofType(String.class) .describedAs("workspace file id"); accepts(UPDATE_PATH_ARG, "Updates Workspace file path").withRequiredArg().ofType(String.class) .describedAs("workspace file id"); accepts(PATH_ARG, "Sets WorkspaceFile file path. Used in coordination with --" + UPDATE_PATH_ARG) .withRequiredArg().ofType(String.class).describedAs("file path"); accepts(URL_ARG, "URL to use with --" + UPLOAD_WF_ARG + ", --" + UPLOAD_FILE_ARG + ", --" + GET_WORKSPACE_FILE_INFO_ARG + " flags").withRequiredArg().ofType(String.class) .describedAs("URL"); accepts(EXAMPLE_JSON_ARG, "Outputs example JSON of Job, User, Workflow, and WorkspaceFile objects"); accepts(WF_EXEC_DIR_ARG, "Workflow Execution Directory").withRequiredArg().ofType(File.class) .describedAs("Directory"); accepts(WF_DIR_ARG, "Workflows Directory").withRequiredArg().ofType(File.class) .describedAs("Directory"); accepts(KEPLER_SCRIPT_ARG, "Kepler").withRequiredArg().ofType(File.class).describedAs("Script"); accepts(QUEUE_ARG, "SGE Queue").withRequiredArg().ofType(String.class).describedAs("Queue"); accepts(CAST_ARG, "Panfishcast binary").withRequiredArg().ofType(File.class) .describedAs("panfishcast"); accepts(STAT_ARG, "Panfishstat binary").withRequiredArg().ofType(File.class) .describedAs("panfishstat"); accepts(LOGIN_ARG, "User Login").withRequiredArg().ofType(String.class).describedAs("username"); accepts(TOKEN_ARG, "User Token").withRequiredArg().ofType(String.class).describedAs("token"); accepts(RUN_AS_ARG, "User to run as (for power accounts that can run as other users)") .withRequiredArg().ofType(String.class).describedAs("runas"); accepts(OWNER_ARG, "Sets owner when creating Workspace file and Workflow").withRequiredArg() .ofType(String.class).describedAs("username"); accepts(JOB_ID_ARG, "Sets source job id for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(Long.class) .describedAs("Job Id"); accepts(MD5_ARG, "Sets md5 for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(String.class) .describedAs("MD5 message digest"); accepts(SIZE_ARG, "Sets size in bytes for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(Long.class) .describedAs("Size of file/dir in bytes"); accepts(RESAVE_WORKSPACEFILE_ARG, "Resaves Workspace file").withRequiredArg().ofType(Long.class) .describedAs("WorkspaceFile Id or -1 to resave all"); accepts(RESAVE_JOB_ARG, "Resaves Job").withRequiredArg().ofType(Long.class) .describedAs("Job Id or -1 to resave all"); accepts(RESAVE_WORKFLOW_ARG, "Resaves Workflow").withRequiredArg().ofType(Long.class) .describedAs("Workflow Id or -1 to resave all"); accepts(PREVIEW_WORKFLOW_ARG, "Preview Workflow on Web, requires --" + URL_ARG + " currently it should be: http://imafish.dynamic.ucsd.edu/cws/makepreview") .withRequiredArg().ofType(File.class).describedAs("Kepler .kar file"); accepts(DESCRIPTION_ARG, "Description for WorkspaceFile").withRequiredArg() .ofType(String.class); accepts(TYPE_ARG, "Type of WorkspaceFile").withRequiredArg().ofType(String.class); accepts(NAME_ARG, "Sets name for Workspace file when used with --" + UPLOAD_FILE_ARG + " and --" + REGISTER_FILE_ARG).withRequiredArg().ofType(String.class) .describedAs("WorkspaceFile name"); accepts(REGISTER_JAR_ARG, "Path to Jar to register WorkspaceFiles").withRequiredArg() .ofType(File.class).describedAs("Path to this jar"); accepts(GET_JOB_ARG, "Gets job from service in JSON format, requires --" + URL_ARG) .withRequiredArg().ofType(Long.class).describedAs("Job Id"); accepts(GET_WORKSPACE_FILE_ARG, "Gets WorkspaceFile from service in JSON format, requires --" + URL_ARG) .withRequiredArg().ofType(Long.class) .describedAs("WorkspaceFile Id or -1 for all"); accepts(PROJECT_ARG, "Project name ie CRBS. Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(PORTALNAME_ARG, "Portal name ie SLASH portal Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(PORTAL_URL_ARG, "Portal url ie http://slashsegmentation.com Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(HELP_EMAIL_ARG, "Help and reply to email address Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(BCC_EMAIL_ARG, "Blind Carbon copy email address Used with --" + SYNC_WITH_CLUSTER_ARG) .withRequiredArg().ofType(String.class); accepts(WORKSPACE_FILE_FAILED_ARG, "Denotes whether workspacefile failed (true) or not (false). Used with --" + UPDATE_PATH_ARG).withRequiredArg().ofType(Boolean.class) .describedAs("false = success and true = failed"); accepts(ERROR_EMAIL_ARG, "Email to receive notifications if errors are encountered. Used with --" + SYNC_WITH_CLUSTER_ARG).withRequiredArg().ofType(String.class); accepts(HELP_ARG).forHelp(); } }; OptionSet optionSet = null; try { optionSet = parser.parse(args); } catch (OptionException oe) { System.err.println("\nThere was an error parsing arguments: " + oe.getMessage() + "\n\n"); parser.printHelpOn(System.err); System.exit(1); } if (optionSet.has(HELP_ARG) || (!optionSet.has(SYNC_WITH_CLUSTER_ARG) && !optionSet.has(UPLOAD_WF_ARG)) && !optionSet.has(EXAMPLE_JSON_ARG) && !optionSet.has(UPLOAD_FILE_ARG) && !optionSet.has(GET_WORKSPACE_FILE_INFO_ARG) && !optionSet.has(UPDATE_PATH_ARG) && !optionSet.has(REGISTER_FILE_ARG) && !optionSet.has(RESAVE_WORKSPACEFILE_ARG) && !optionSet.has(RESAVE_JOB_ARG) && !optionSet.has(RESAVE_WORKFLOW_ARG) && !optionSet.has(PREVIEW_WORKFLOW_ARG) && !optionSet.has(GEN_OLD_KEPLER_XML_ARG) && !optionSet.has(GET_JOB_ARG) && !optionSet.has(GET_WORKSPACE_FILE_ARG) && !optionSet.has(GET_WORKFLOW_ARG)) { System.out.println(PROGRAM_HELP + "\n"); parser.printHelpOn(System.out); System.exit(0); } if (optionSet.has(EXAMPLE_JSON_ARG)) { renderExampleWorkflowsAndTasksAsJson(); System.exit(0); } if (optionSet.has(GET_JOB_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_JOB_ARG + " flag"); getJobAsJson(optionSet); System.exit(0); } if (optionSet.has(GET_WORKSPACE_FILE_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKSPACE_FILE_ARG + " flag"); getWorkspaceFileAsJson(optionSet); System.exit(0); } if (optionSet.has(GET_WORKFLOW_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKFLOW_ARG + " flag"); getWorkflowAsJson(optionSet); System.exit(0); } MultivaluedMapFactory multivaluedMapFactory = new MultivaluedMapFactoryImpl(); if (optionSet.has(GEN_OLD_KEPLER_XML_ARG)) { String workflowFileOrId = (String) optionSet.valueOf(GEN_OLD_KEPLER_XML_ARG); File workflowFile = new File(workflowFileOrId); Workflow w = null; //if value is a file attempt to load it as a workflow file if (workflowFile.exists() && workflowFile.isFile()) { w = getWorkflowFromFile(workflowFile); if (w == null) { throw new Exception("Unable to extract workflow from file: " + workflowFile); } } else { //assume the value is a workflow id and get it from the service //but fail if url is missing failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GEN_OLD_KEPLER_XML_ARG + " flag"); User u = getUserFromOptionSet(optionSet); WorkflowRestDAOImpl workflowDAO = new WorkflowRestDAOImpl(); workflowDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); workflowDAO.setUser(u); w = workflowDAO.getWorkflowById(workflowFileOrId, u); if (w == null) { throw new Exception("Unable to extract workflow from id: " + workflowFileOrId); } } VersionOneWorkflowXmlWriter xmlWriter = new VersionOneWorkflowXmlWriter(); StringWriter sw = new StringWriter(); xmlWriter.write(sw, w); System.out.println(sw.toString()); System.exit(0); } if (optionSet.has(PREVIEW_WORKFLOW_ARG)) { failIfOptionSetMissingURL(optionSet, "--" + PREVIEW_WORKFLOW_ARG + " flag"); File workflowFile = (File) optionSet.valueOf(PREVIEW_WORKFLOW_ARG); Workflow w = getWorkflowFromFile(workflowFile); if (w == null) { throw new Exception("Unable to extract workflow from file"); } uploadPreviewWorkflowFile((String) optionSet.valueOf(URL_ARG), w); System.exit(0); } if (optionSet.has(REGISTER_FILE_ARG)) { addNewWorkspaceFile(optionSet, false, REGISTER_FILE_ARG); System.exit(0); } if (optionSet.has(RESAVE_WORKSPACEFILE_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_WORKSPACEFILE_ARG + " flag"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); User u = getUserFromOptionSet(optionSet); workspaceFileDAO.setUser(u); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long workspaceId = (Long) optionSet.valueOf(RESAVE_WORKSPACEFILE_ARG); if (workspaceId == -1) { System.out.println("Resaving all workspace files"); List<WorkspaceFile> wsfList = workspaceFileDAO.getWorkspaceFiles(null, null, null, null, null); if (wsfList != null) { System.out.println("Found " + wsfList.size() + " workspace files to resave"); for (WorkspaceFile wsf : wsfList) { System.out.println("WorkspaceFile Id: " + wsf.getId()); workspaceFileDAO.resave(wsf.getId()); } } } else { workspaceFileDAO.resave(workspaceId); } System.exit(0); } if (optionSet.has(RESAVE_JOB_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_JOB_ARG + " flag"); JobRestDAOImpl jobDAO = new JobRestDAOImpl(); User u = getUserFromOptionSet(optionSet); jobDAO.setUser(u); jobDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long jobId = (Long) optionSet.valueOf(RESAVE_JOB_ARG); if (jobId == -1) { System.out.println("Resaving all jobs"); List<Job> jobList = jobDAO.getJobs(null, null, null, true, true, Boolean.TRUE); if (jobList != null) { System.out.println("Found " + jobList.size() + " jobs to resave"); for (Job j : jobList) { System.out.println("job id: " + j.getId()); jobDAO.resave(j.getId()); } } } else { jobDAO.resave(jobId); } System.exit(0); } if (optionSet.has(RESAVE_WORKFLOW_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + RESAVE_WORKFLOW_ARG + " flag"); WorkflowRestDAOImpl workflowDAO = new WorkflowRestDAOImpl(); User u = getUserFromOptionSet(optionSet); workflowDAO.setUser(u); workflowDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); Long workflowId = (Long) optionSet.valueOf(RESAVE_WORKFLOW_ARG); if (workflowId == -1) { System.out.println("Resaving all workflows"); List<Workflow> workflowList = workflowDAO.getAllWorkflows(true, Boolean.TRUE); if (workflowList != null) { System.out.println("Found " + workflowList.size() + " workflow(s) to resave"); for (Workflow w : workflowList) { System.out.println("workflow id: " + w.getId()); workflowDAO.resave(w.getId()); } } } else { workflowDAO.resave(workflowId); } System.exit(0); } if (optionSet.has(UPDATE_PATH_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + UPDATE_PATH_ARG + " flag"); User u = getUserFromOptionSet(optionSet); String workspaceId = (String) optionSet.valueOf(UPDATE_PATH_ARG); String path = null; if (optionSet.has(PATH_ARG)) { path = (String) optionSet.valueOf(PATH_ARG); } String size = null; if (optionSet.has(SIZE_ARG)) { size = ((Long) optionSet.valueOf(SIZE_ARG)).toString(); } if (optionSet.has(MD5_ARG)) { //wsp.setMd5((String)optionSet.valueOf(MD5_ARG)); } Boolean isFailed = null; if (optionSet.has(WORKSPACE_FILE_FAILED_ARG)) { isFailed = (Boolean) optionSet.valueOf(WORKSPACE_FILE_FAILED_ARG); } WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setUser(u); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); workspaceFileDAO.updatePathSizeAndFailStatus(Long.parseLong(workspaceId), path, size, isFailed); System.exit(0); } if (optionSet.has(SYNC_WITH_CLUSTER_ARG)) { // @TODO NEED TO MAKE JOPT DO THIS REQUIRED FLAG CHECKING STUFF if (!optionSet.has(WF_EXEC_DIR_ARG)) { System.err.println( "-" + WF_EXEC_DIR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(2); } if (!optionSet.has(WF_DIR_ARG)) { System.err.println("-" + WF_DIR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(3); } if (!optionSet.has(KEPLER_SCRIPT_ARG)) { System.err.println( "-" + KEPLER_SCRIPT_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(4); } if (!optionSet.has(CAST_ARG)) { System.err.println("-" + CAST_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(5); } if (!optionSet.has(STAT_ARG)) { System.err.println("-" + STAT_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(6); } if (!optionSet.has(QUEUE_ARG)) { System.err.println("-" + QUEUE_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(7); } if (!optionSet.has(REGISTER_JAR_ARG)) { System.err.println( "-" + REGISTER_JAR_ARG + " is required with -" + SYNC_WITH_CLUSTER_ARG + " flag"); System.exit(8); } failIfOptionSetMissingLoginOrToken(optionSet, "--" + SYNC_WITH_CLUSTER_ARG + " flag"); File castFile = (File) optionSet.valueOf(CAST_ARG); String castPath = castFile.getAbsolutePath(); File statFile = (File) optionSet.valueOf(STAT_ARG); String statPath = statFile.getAbsolutePath(); String queue = (String) optionSet.valueOf(QUEUE_ARG); File wfExecDir = (File) optionSet.valueOf(WF_EXEC_DIR_ARG); File wfDir = (File) optionSet.valueOf(WF_DIR_ARG); File keplerScript = (File) optionSet.valueOf(KEPLER_SCRIPT_ARG); String registerJar = null; if (optionSet.has(REGISTER_JAR_ARG)) { File registerJarFile = (File) optionSet.valueOf(REGISTER_JAR_ARG); registerJar = registerJarFile.getAbsolutePath(); } JobEmailNotificationData emailNotifyData = getJobEmailNotificationData(optionSet); User u = getUserFromOptionSet(optionSet); ObjectifyService.ofy(); String url = (String) optionSet.valueOf(SYNC_WITH_CLUSTER_ARG); JobRestDAOImpl jobDAO = new JobRestDAOImpl(); jobDAO.setRestURL(url); jobDAO.setUser(u); System.out.println("Running sync with cluster"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setRestURL(url); workspaceFileDAO.setUser(u); JobPath jobPath = new JobPathImpl(wfExecDir.getAbsolutePath()); WorkspaceFilePathSetterImpl pathSetter = new WorkspaceFilePathSetterImpl(workspaceFileDAO); // Submit jobs to scheduler JobSubmissionManager submitter = new JobSubmissionManager(jobDAO, workspaceFileDAO, pathSetter, jobPath, wfDir.getAbsolutePath(), keplerScript.getAbsolutePath(), castPath, queue, u, url, registerJar, emailNotifyData); submitter.submitJobs(); // Update job status for all jobs in system MapOfJobStatusFactoryImpl jobStatusFactory = new MapOfJobStatusFactoryImpl(statPath); WorkflowFailedParser workflowFailedParser = new WorkflowFailedParserImpl(); JobStatusUpdater updater = new JobStatusUpdater(jobDAO, jobStatusFactory, workflowFailedParser, jobPath); updater.updateJobs(); System.exit(0); } if (optionSet.has(App.GET_WORKSPACE_FILE_INFO_ARG)) { failIfOptionSetMissingURLOrLoginOrToken(optionSet, "--" + GET_WORKSPACE_FILE_INFO_ARG + " flag"); WorkspaceFileRestDAOImpl workspaceFileDAO = new WorkspaceFileRestDAOImpl(); workspaceFileDAO.setRestURL((String) optionSet.valueOf(URL_ARG)); List<WorkspaceFile> wsFiles = workspaceFileDAO .getWorkspaceFilesById((String) optionSet.valueOf(GET_WORKSPACE_FILE_INFO_ARG), null); if (wsFiles != null) { ObjectMapper om = new ObjectMapper(); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); System.out.print("["); boolean first = true; for (WorkspaceFile wsf : wsFiles) { if (first == false) { System.out.println(","); } else { first = false; } System.out.print(ow.writeValueAsString(wsf)); } System.out.println("]"); } else { System.err.println("[]"); } System.exit(0); } if (optionSet.has(UPLOAD_FILE_ARG)) { addNewWorkspaceFile(optionSet, true, UPLOAD_FILE_ARG); System.exit(0); } if (optionSet.has(UPLOAD_WF_ARG)) { Long parentWfId = null; String postURL = null; if (optionSet.has(URL_ARG)) { postURL = (String) optionSet.valueOf(URL_ARG); failIfOptionSetMissingLoginOrToken(optionSet, "--" + UPLOAD_WF_ARG + " and --" + URL_ARG + " flag"); } File workflowFile = (File) optionSet.valueOf(UPLOAD_WF_ARG); Workflow w = getWorkflowFromFile(workflowFile); if (w != null) { if (optionSet.has(OWNER_ARG)) { w.setOwner((String) optionSet.valueOf(OWNER_ARG)); } ObjectMapper om = new ObjectMapper(); if (parentWfId != null) { w.setId(parentWfId); } if (postURL == null) { System.out.println("\n--- JSON Representation of Workflow ---"); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); System.out.println(ow.writeValueAsString(w)); System.out.flush(); System.out.println("---------------------------------------"); } else { postURL = new StringBuilder().append(postURL).append(Constants.SLASH) .append(Constants.REST_PATH).append(Constants.SLASH) .append(Constants.WORKFLOWS_PATH).toString(); ClientConfig cc = new DefaultClientConfig(); cc.getClasses().add(StringProvider.class); cc.getClasses().add(MultiPartWriter.class); Client client = Client.create(cc); client.setFollowRedirects(true); WebResource resource = client.resource(postURL); String workflowAsJson = om.writeValueAsString(w); User u = getUserFromOptionSet(optionSet); client.addFilter(new HTTPBasicAuthFilter(u.getLogin(), u.getToken())); MultivaluedMap queryParams = multivaluedMapFactory.getMultivaluedMap(u); String response = resource.queryParams(queryParams).type(MediaType.APPLICATION_JSON_TYPE) .entity(workflowAsJson).post(String.class); Workflow workflowRes = om.readValue(response, Workflow.class); ObjectWriter ow = om.writerWithDefaultPrettyPrinter(); if (workflowRes.getWorkflowFileUploadURL() == null) { throw new Exception( "No upload url found for workflow!!!" + ow.writeValueAsString(workflowRes)); } uploadWorkflowFile(workflowRes, workflowFile); } } } } catch (Exception ex) { ex.printStackTrace(); System.err.println("Caught Exception: " + ex.getMessage()); System.exit(2); } System.exit(0); }
From source file:act.installer.pubchem.PubchemTTLMerger.java
public static void main(String[] args) throws Exception { org.apache.commons.cli.Options opts = new org.apache.commons.cli.Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());/* w w w . j a v a 2 s . c o m*/ } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } PubchemTTLMerger merger = new PubchemTTLMerger(); File rocksDBFile = new File(cl.getOptionValue(OPTION_INDEX_PATH)); if (cl.hasOption(OPTION_ONLY_MERGE)) { if (!(rocksDBFile.exists() && rocksDBFile.isDirectory())) { System.err.format("Must specify an existing RocksDB index when using '%s'.\n", OPTION_ONLY_MERGE); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } merger.finish(merger.merge(rocksDBFile)); return; } File rdfDir = new File(cl.getOptionValue(OPTION_RDF_DIRECTORY)); if (!rdfDir.isDirectory()) { System.err.format("Must specify a directory of RDF files to be parsed.\n"); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } File[] filesInDirectoryArray = rdfDir.listFiles(new FilenameFilter() { private static final String TTL_GZ_SUFFIX = ".ttl.gz"; @Override public boolean accept(File dir, String name) { return name.endsWith(TTL_GZ_SUFFIX); } }); if (filesInDirectoryArray == null || filesInDirectoryArray.length == 0) { System.err.format("Found zero compressed TTL files in directory at '%s'.\n", rdfDir.getAbsolutePath()); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } // Sort files for stability/sanity. List<File> filesInDirectory = Arrays.asList(filesInDirectoryArray); Collections.sort(filesInDirectory); if (cl.hasOption(OPTION_ONLY_SYNONYMS)) { filesInDirectory = filterByFileContents(filesInDirectory, PC_RDF_DATA_FILE_CONFIG.HASH_TO_SYNONYM); } if (cl.hasOption(OPTION_ONLY_MESH)) { filesInDirectory = filterByFileContents(filesInDirectory, PC_RDF_DATA_FILE_CONFIG.HASH_TO_MESH); } if (cl.hasOption(OPTION_ONLY_PUBCHEM_IDS)) { filesInDirectory = filterByFileContents(filesInDirectory, PC_RDF_DATA_FILE_CONFIG.HASH_TO_CID); } if (filesInDirectory.size() == 0) { System.err.format( "Arrived at index initialization with no files to process. " + "Maybe too many filters were specified? synonyms: %s, MeSH: %s, Pubchem ids: %s\n", cl.hasOption(OPTION_ONLY_SYNONYMS), cl.hasOption(OPTION_ONLY_MESH), cl.hasOption(OPTION_ONLY_PUBCHEM_IDS)); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } RocksDB.loadLibrary(); Pair<RocksDB, Map<COLUMN_FAMILIES, ColumnFamilyHandle>> dbAndHandles = null; try { if (rocksDBFile.exists()) { if (!cl.hasOption(OPTION_OPEN_EXISTING_OKAY)) { System.err.format( "Index directory at '%s' already exists, delete before retrying or add '%s' option to reuse.\n", rocksDBFile.getAbsolutePath(), OPTION_OPEN_EXISTING_OKAY); HELP_FORMATTER.printHelp(PubchemTTLMerger.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } else { LOGGER.info("Reusing existing index at %s", rocksDBFile.getAbsolutePath()); dbAndHandles = openExistingRocksDB(rocksDBFile); } } else { LOGGER.info("Creating new index at %s", rocksDBFile.getAbsolutePath()); dbAndHandles = createNewRocksDB(rocksDBFile); } merger.buildIndex(dbAndHandles, filesInDirectory); merger.merge(dbAndHandles); } finally { if (dbAndHandles != null) { merger.finish(dbAndHandles); } } }
From source file:edu.brown.costmodel.SingleSitedCostModel.java
/** * MAIN!//from w w w . j a v a 2 s . c om * * @param vargs * @throws Exception */ public static void main(String[] vargs) throws Exception { ArgumentsParser args = ArgumentsParser.load(vargs); args.require(ArgumentsParser.PARAM_CATALOG, ArgumentsParser.PARAM_WORKLOAD, ArgumentsParser.PARAM_PARTITION_PLAN); assert (args.workload.getTransactionCount() > 0) : "No transactions were loaded from " + args.workload_path; if (args.hasParam(ArgumentsParser.PARAM_CATALOG_HOSTS)) { ClusterConfiguration cc = new ClusterConfiguration(args.getParam(ArgumentsParser.PARAM_CATALOG_HOSTS)); args.updateCatalog(FixCatalog.addHostInfo(args.catalog, cc), null); } // Enable compact output final boolean table_output = (args.getOptParams().contains("table")); // If given a PartitionPlan, then update the catalog File pplan_path = new File(args.getParam(ArgumentsParser.PARAM_PARTITION_PLAN)); PartitionPlan pplan = new PartitionPlan(); pplan.load(pplan_path.getAbsolutePath(), args.catalog_db); if (args.getBooleanParam(ArgumentsParser.PARAM_PARTITION_PLAN_REMOVE_PROCS, false)) { for (Procedure catalog_proc : pplan.proc_entries.keySet()) { pplan.setNullProcParameter(catalog_proc); } // FOR } if (args.getBooleanParam(ArgumentsParser.PARAM_PARTITION_PLAN_RANDOM_PROCS, false)) { for (Procedure catalog_proc : pplan.proc_entries.keySet()) { pplan.setRandomProcParameter(catalog_proc); } // FOR } pplan.apply(args.catalog_db); System.out.println("Applied PartitionPlan '" + pplan_path + "' to catalog\n" + pplan); System.out.print(StringUtil.DOUBLE_LINE); // if (!table_output) { // // } // } else if (!table_output) { // System.err.println("PartitionPlan file '" + pplan_path + // "' does not exist. Ignoring..."); // } if (args.hasParam(ArgumentsParser.PARAM_PARTITION_PLAN_OUTPUT)) { String output = args.getParam(ArgumentsParser.PARAM_PARTITION_PLAN_OUTPUT); if (output.equals("-")) output = pplan_path.getAbsolutePath(); pplan.save(output); System.out.println("Saved PartitionPlan to '" + output + "'"); } System.out.flush(); // TODO: REMOVE STORED PROCEDURE ROUTING FOR SCHISM long singlepartition = 0; long multipartition = 0; long total = 0; SingleSitedCostModel costmodel = new SingleSitedCostModel(args.catalog_db); Collection<Integer> all_partitions = CatalogUtil.getAllPartitionIds(args.catalog_db); // costmodel.setEntropyWeight(4.0); // costmodel.setJavaExecutionWeightEnabled(true); // costmodel.setJavaExecutionWeight(100); // XXX: 2011-10-28 costmodel.setCachingEnabled(true); Histogram<String> hist = new Histogram<String>(); for (int i = 0; i < 2; i++) { ProfileMeasurement time = new ProfileMeasurement("costmodel").start(); hist.clear(); for (AbstractTraceElement<? extends CatalogType> element : args.workload) { if (element instanceof TransactionTrace) { total++; TransactionTrace xact = (TransactionTrace) element; boolean is_singlesited = costmodel.processTransaction(args.catalog_db, xact, null).singlesited; if (is_singlesited) { singlepartition++; hist.put(xact.getCatalogItemName()); } else { multipartition++; if (!hist.contains(xact.getCatalogItemName())) hist.put(xact.getCatalogItemName(), 0); } } } // FOR System.err.println("ESTIMATE TIME: " + time.stop().getTotalThinkTimeSeconds()); break; // XXX } // FOR // long total_partitions_touched_txns = // costmodel.getTxnPartitionAccessHistogram().getSampleCount(); // long total_partitions_touched_queries = // costmodel.getQueryPartitionAccessHistogram().getSampleCount(); Histogram<Integer> h = null; if (!table_output) { System.out.println("Workload Procedure Histogram:"); System.out.println(StringUtil.addSpacers(args.workload.getProcedureHistogram().toString())); System.out.print(StringUtil.DOUBLE_LINE); System.out.println("SinglePartition Procedure Histogram:"); System.out.println(StringUtil.addSpacers(hist.toString())); System.out.print(StringUtil.DOUBLE_LINE); System.out.println("Java Execution Histogram:"); h = costmodel.getJavaExecutionHistogram(); h.setKeepZeroEntries(true); h.putAll(all_partitions, 0); System.out.println(StringUtil.addSpacers(h.toString())); System.out.print(StringUtil.DOUBLE_LINE); System.out.println("Transaction Partition Histogram:"); h = costmodel.getTxnPartitionAccessHistogram(); h.setKeepZeroEntries(true); h.putAll(all_partitions, 0); System.out.println(StringUtil.addSpacers(h.toString())); System.out.print(StringUtil.DOUBLE_LINE); System.out.println("Query Partition Touch Histogram:"); h = costmodel.getQueryPartitionAccessHistogram(); h.setKeepZeroEntries(true); h.putAll(all_partitions, 0); System.out.println(StringUtil.addSpacers(h.toString())); System.out.print(StringUtil.DOUBLE_LINE); } Map<String, Object> maps[] = new Map[2]; int idx = 0; ListOrderedMap<String, Object> m = null; // Execution Cost m = new ListOrderedMap<String, Object>(); m.put("SINGLE-PARTITION", singlepartition); m.put("MULTI-PARTITION", multipartition); m.put("TOTAL", total + " [" + singlepartition / (double) total + "]"); m.put("PARTITIONS TOUCHED (TXNS)", costmodel.getTxnPartitionAccessHistogram().getSampleCount()); m.put("PARTITIONS TOUCHED (QUERIES)", costmodel.getQueryPartitionAccessHistogram().getSampleCount()); maps[idx++] = m; // Utilization m = new ListOrderedMap<String, Object>(); costmodel.getJavaExecutionHistogram().setKeepZeroEntries(false); int active_partitions = costmodel.getJavaExecutionHistogram().getValueCount(); m.put("ACTIVE PARTITIONS", active_partitions); m.put("IDLE PARTITIONS", (all_partitions.size() - active_partitions)); // System.out.println("Partitions Touched By Queries: " + // total_partitions_touched_queries); Histogram<Integer> entropy_h = costmodel.getJavaExecutionHistogram(); m.put("JAVA SKEW", SkewFactorUtil.calculateSkew(all_partitions.size(), entropy_h.getSampleCount(), entropy_h)); entropy_h = costmodel.getTxnPartitionAccessHistogram(); m.put("TRANSACTION SKEW", SkewFactorUtil.calculateSkew(all_partitions.size(), entropy_h.getSampleCount(), entropy_h)); // TimeIntervalCostModel<SingleSitedCostModel> timecostmodel = new // TimeIntervalCostModel<SingleSitedCostModel>(args.catalog_db, // SingleSitedCostModel.class, 1); // timecostmodel.estimateCost(args.catalog_db, args.workload); // double entropy = timecostmodel.getLastEntropyCost() m.put("UTILIZATION", (costmodel.getJavaExecutionHistogram().getValueCount() / (double) all_partitions.size())); maps[idx++] = m; System.out.println(StringUtil.formatMaps(maps)); }