List of usage examples for java.io File mkdirs
public boolean mkdirs()
From source file:de.tudarmstadt.ukp.experiments.argumentation.convincingness.sampling.Step4MTurkOutputCollector.java
@SuppressWarnings("unchecked") public static void main(String[] args) throws Exception { String inputDirWithArgumentPairs = args[0]; File[] resultFiles;//from w w w . j a v a2 s .c o m if (args[1].contains("*")) { File path = new File(args[1]); File directory = path.getParentFile(); String regex = path.getName().replaceAll("\\*", ""); List<File> files = new ArrayList<>(FileUtils.listFiles(directory, new String[] { regex }, false)); resultFiles = new File[files.size()]; for (int i = 0; i < files.size(); i++) { resultFiles[i] = files.get(i); } } else { // result file is a comma-separated list of CSV files from MTurk String[] split = args[1].split(","); resultFiles = new File[split.length]; for (int i = 0; i < split.length; i++) { resultFiles[i] = new File(split[i]); } } File outputDir = new File(args[2]); if (!outputDir.exists()) { if (!outputDir.mkdirs()) { throw new IOException("Cannot create directory " + outputDir); } } // error if output folder not empty to prevent any confusion by mixing files if (!FileUtils.listFiles(outputDir, null, false).isEmpty()) { throw new IllegalArgumentException("Output dir " + outputDir + " is not empty"); } // collected assignments with empty reason for rejections Set<String> assignmentsWithEmptyReason = new HashSet<>(); // parse with first line as header MTurkOutputReader mTurkOutputReader = new MTurkOutputReader(resultFiles); Collection<File> files = FileUtils.listFiles(new File(inputDirWithArgumentPairs), new String[] { "xml" }, false); if (files.isEmpty()) { throw new IOException("No xml files found in " + inputDirWithArgumentPairs); } // statistics: how many hits with how many assignments ; hit ID / assignments Map<String, Map<String, Integer>> assignmentsPerHits = new HashMap<>(); // collect accept/reject statistics for (Map<String, String> record : mTurkOutputReader) { boolean wasRejected = "Rejected".equals(record.get("assignmentstatus")); String hitID = record.get("hitid"); String hitTypeId = record.get("hittypeid"); if (!wasRejected) { // update statistics if (!assignmentsPerHits.containsKey(hitTypeId)) { assignmentsPerHits.put(hitTypeId, new HashMap<String, Integer>()); } if (!assignmentsPerHits.get(hitTypeId).containsKey(hitID)) { assignmentsPerHits.get(hitTypeId).put(hitID, 0); } assignmentsPerHits.get(hitTypeId).put(hitID, assignmentsPerHits.get(hitTypeId).get(hitID) + 1); } } // statistics: how many hits with how many assignments ; hit ID / assignments Map<String, Integer> approvedAssignmentsPerHit = new HashMap<>(); Map<String, Integer> rejectedAssignmentsPerHit = new HashMap<>(); // collect accept/reject statistics for (Map<String, String> record : mTurkOutputReader) { boolean approved = "Approved".equals(record.get("assignmentstatus")); boolean rejected = "Rejected".equals(record.get("assignmentstatus")); String hitID = record.get("hitid"); if (approved) { // update statistics if (!approvedAssignmentsPerHit.containsKey(hitID)) { approvedAssignmentsPerHit.put(hitID, 0); } approvedAssignmentsPerHit.put(hitID, approvedAssignmentsPerHit.get(hitID) + 1); } else if (rejected) { // update statistics if (!rejectedAssignmentsPerHit.containsKey(hitID)) { rejectedAssignmentsPerHit.put(hitID, 0); } rejectedAssignmentsPerHit.put(hitID, rejectedAssignmentsPerHit.get(hitID) + 1); } else { throw new IllegalStateException( "Unknown state: " + record.get("assignmentstatus") + " HITID: " + hitID); } } // System.out.println("Approved: " + approvedAssignmentsPerHit); // System.out.println("Rejected: " + rejectedAssignmentsPerHit); System.out.println("Approved (values): " + new HashSet<>(approvedAssignmentsPerHit.values())); System.out.println("Rejected (values): " + new HashSet<>(rejectedAssignmentsPerHit.values())); // rejection statistics int totalRejected = 0; for (Map.Entry<String, Integer> rejectionEntry : rejectedAssignmentsPerHit.entrySet()) { totalRejected += rejectionEntry.getValue(); } System.out.println("Total rejections: " + totalRejected); /* // generate .success files for adding more annotations for (File resultFile : resultFiles) { String hitTypeID = mTurkOutputReader.getHitTypeIdForFile().get(resultFile); // assignments for that hittypeid (= file) Map<String, Integer> assignments = assignmentsPerHits.get(hitTypeID); prepareUpdateHITsFiles(assignments, hitTypeID, resultFile); } */ int totalSavedPairs = 0; // load all previously prepared argument pairs for (File file : files) { List<ArgumentPair> argumentPairs = (List<ArgumentPair>) XStreamTools.getXStream().fromXML(file); List<AnnotatedArgumentPair> annotatedArgumentPairs = new ArrayList<>(); for (ArgumentPair argumentPair : argumentPairs) { AnnotatedArgumentPair annotatedArgumentPair = new AnnotatedArgumentPair(argumentPair); // is there such an answer? String key = "Answer." + argumentPair.getId(); // iterate only if there is such column to save time if (mTurkOutputReader.getColumnNames().contains(key)) { // now find the results for (Map<String, String> record : mTurkOutputReader) { if (record.containsKey(key)) { // extract the values AnnotatedArgumentPair.MTurkAssignment assignment = new AnnotatedArgumentPair.MTurkAssignment(); boolean wasRejected = "Rejected".equals(record.get("assignmentstatus")); // only non-rejected (if required) if (!wasRejected) { String hitID = record.get("hitid"); String workerID = record.get("workerid"); String assignmentId = record.get("assignmentid"); try { assignment.setAssignmentAcceptTime( DATE_FORMAT.parse(record.get("assignmentaccepttime"))); assignment.setAssignmentSubmitTime( DATE_FORMAT.parse(record.get("assignmentsubmittime"))); assignment.setHitComment(record.get("Answer.feedback")); assignment.setHitID(hitID); assignment.setTurkID(workerID); assignment.setAssignmentId(assignmentId); // and answer specific fields String valueRaw = record.get(key); // so far the label has had format aXXX_aYYY_a1, aXXX_aYYY_a2, or aXXX_aYYY_equal // strip now only true label String label = valueRaw.split("_")[2]; assignment.setValue(label); String reason = record.get(key + "_reason"); // missing reason if (reason == null) { assignmentsWithEmptyReason.add(assignmentId); } else { assignment.setReason(reason); // get worker's stance String stanceRaw = record.get(key + "_stance"); if (stanceRaw != null) { // parse stance String stance = stanceRaw.split("_stance_")[1]; assignment.setWorkerStance(stance); } // we take maximal 5 assignments Collections.sort(annotatedArgumentPair.mTurkAssignments, new Comparator<AnnotatedArgumentPair.MTurkAssignment>() { @Override public int compare(AnnotatedArgumentPair.MTurkAssignment o1, AnnotatedArgumentPair.MTurkAssignment o2) { return o1.getAssignmentAcceptTime() .compareTo(o2.getAssignmentAcceptTime()); } }); if (annotatedArgumentPair.mTurkAssignments .size() < MAXIMUM_ASSIGNMENTS_PER_HIT) { annotatedArgumentPair.mTurkAssignments.add(assignment); } } } catch (IllegalArgumentException | NullPointerException ex) { System.err.println("Malformed annotations for HIT " + hitID + ", worker " + workerID + ", assignment " + assignmentId + "; " + ex.getMessage() + ", full record: " + record); } } } } } // and if there are some annotations, add it to the result set if (!annotatedArgumentPair.mTurkAssignments.isEmpty()) { annotatedArgumentPairs.add(annotatedArgumentPair); } } if (!annotatedArgumentPairs.isEmpty()) { File outputFile = new File(outputDir, file.getName()); XStreamTools.toXML(annotatedArgumentPairs, outputFile); System.out.println("Saved " + annotatedArgumentPairs.size() + " annotated pairs to " + outputFile); totalSavedPairs += annotatedArgumentPairs.size(); } } System.out.println("Total saved " + totalSavedPairs + " pairs"); // print assignments with empty reasons if (!assignmentsWithEmptyReason.isEmpty()) { System.out.println( "== Assignments with empty reason:\nassignmentIdToReject\tassignmentIdToRejectComment"); for (String assignmentId : assignmentsWithEmptyReason) { System.out.println( assignmentId + "\t\"Dear worker, you did not fill the required field with a reason.\""); } } }
From source file:com.meltmedia.rodimus.RodimusCli.java
public static void main(String... args) { try {/*from www . ja va 2 s. co m*/ final Cli<RodimusInterface> cli = CliFactory.createCli(RodimusInterface.class); final RodimusInterface options = cli.parseArguments(args); // if help was requested, then display the help message and exit. if (options.isHelp()) { System.out.println(cli.getHelpMessage()); return; } final boolean verbose = options.isVerbose(); if (options.getFiles() == null || options.getFiles().size() < 1) { System.out.println(cli.getHelpMessage()); return; } // get the input file. File inputFile = options.getFiles().get(0); // get the output file. File outputDir = null; if (options.getFiles().size() > 1) { outputDir = options.getFiles().get(1); } else { outputDir = new File(inputFile.getName().replaceFirst("\\.[^.]+\\Z", "")); } if (outputDir.exists() && !outputDir.isDirectory()) { throw new Exception(outputDir + " is not a directory."); } outputDir.mkdirs(); transformDocument(inputFile, outputDir, verbose); } catch (Exception e) { e.printStackTrace(System.err); } }
From source file:eu.fbk.dkm.sectionextractor.pantheon.WikipediaGoodTextExtractor.java
public static void main(String args[]) throws IOException { CommandLineWithLogger commandLineWithLogger = new CommandLineWithLogger(); commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg() .withDescription("wikipedia xml dump file").isRequired().withLongOpt("wikipedia-dump").create("d")); commandLineWithLogger.addOption(OptionBuilder.withArgName("dir").hasArg() .withDescription("output directory in which to store output files").isRequired() .withLongOpt("output-dir").create("o")); commandLineWithLogger/* ww w.j av a 2 s.c o m*/ .addOption(OptionBuilder.withDescription("use NAF format").withLongOpt("naf").create("n")); commandLineWithLogger.addOption(OptionBuilder.withDescription("tokenize and ssplit with Stanford") .withLongOpt("stanford").create("s")); commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("Filter file") .withLongOpt("filter").create("f")); commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg() .withDescription("ID and category file").withLongOpt("idcat").create("i")); commandLineWithLogger.addOption(OptionBuilder.withArgName("file").hasArg().withDescription("Redirect file") .withLongOpt("redirect").create("r")); commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg() .withDescription( "number of threads (default " + AbstractWikipediaXmlDumpParser.DEFAULT_THREADS_NUMBER + ")") .withLongOpt("num-threads").create("t")); commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg() .withDescription("number of pages to process (default all)").withLongOpt("num-pages").create("p")); commandLineWithLogger.addOption(OptionBuilder.withArgName("int").hasArg() .withDescription("receive notification every n pages (default " + AbstractWikipediaExtractor.DEFAULT_NOTIFICATION_POINT + ")") .withLongOpt("notification-point").create("b")); commandLineWithLogger.addOption(new Option("n", "NAF format")); CommandLine commandLine = null; try { commandLine = commandLineWithLogger.getCommandLine(args); PropertyConfigurator.configure(commandLineWithLogger.getLoggerProps()); } catch (Exception e) { System.exit(1); } int numThreads = Integer.parseInt(commandLine.getOptionValue("num-threads", Integer.toString(AbstractWikipediaXmlDumpParser.DEFAULT_THREADS_NUMBER))); int numPages = Integer.parseInt(commandLine.getOptionValue("num-pages", Integer.toString(AbstractWikipediaExtractor.DEFAULT_NUM_PAGES))); int notificationPoint = Integer.parseInt(commandLine.getOptionValue("notification-point", Integer.toString(AbstractWikipediaExtractor.DEFAULT_NOTIFICATION_POINT))); boolean nafFormat = commandLine.hasOption("n"); boolean useStanford = commandLine.hasOption("s"); HashMap<Integer, String> idCategory = new HashMap<>(); String idcatFileName = commandLine.getOptionValue("idcat"); if (idcatFileName != null) { logger.info("Loading categories"); File idcatFile = new File(idcatFileName); if (idcatFile.exists()) { List<String> lines = Files.readLines(idcatFile, Charsets.UTF_8); for (String line : lines) { line = line.trim(); if (line.length() == 0) { continue; } String[] parts = line.split("\\s+"); if (parts.length < 3) { continue; } idCategory.put(Integer.parseInt(parts[1]), parts[2]); } } } HashMap<String, String> redirects = new HashMap<>(); String redirectFileName = commandLine.getOptionValue("redirect"); if (redirectFileName != null) { logger.info("Loading redirects"); File redirectFile = new File(redirectFileName); if (redirectFile.exists()) { List<String> lines = Files.readLines(redirectFile, Charsets.UTF_8); for (String line : lines) { line = line.trim(); if (line.length() == 0) { continue; } String[] parts = line.split("\\t+"); if (parts.length < 2) { continue; } redirects.put(parts[0], parts[1]); } } } HashSet<String> pagesToConsider = null; String filterFileName = commandLine.getOptionValue("filter"); if (filterFileName != null) { logger.info("Loading file list"); File filterFile = new File(filterFileName); if (filterFile.exists()) { pagesToConsider = new HashSet<>(); List<String> lines = Files.readLines(filterFile, Charsets.UTF_8); for (String line : lines) { line = line.trim(); if (line.length() == 0) { continue; } line = line.replaceAll("\\s+", "_"); pagesToConsider.add(line); addRedirects(pagesToConsider, redirects, line, 0); } } } ExtractorParameters extractorParameters = new ExtractorParameters( commandLine.getOptionValue("wikipedia-dump"), commandLine.getOptionValue("output-dir")); File outputFolder = new File(commandLine.getOptionValue("output-dir")); if (!outputFolder.exists()) { boolean mkdirs = outputFolder.mkdirs(); if (!mkdirs) { throw new IOException("Unable to create folder " + outputFolder.getAbsolutePath()); } } WikipediaExtractor wikipediaPageParser = new WikipediaGoodTextExtractor(numThreads, numPages, extractorParameters.getLocale(), outputFolder, nafFormat, pagesToConsider, useStanford, idCategory); wikipediaPageParser.setNotificationPoint(notificationPoint); wikipediaPageParser.start(extractorParameters); logger.info("extraction ended " + new Date()); }
From source file:fr.jayasoft.ivy.Main.java
public static void main(String[] args) throws Exception { Options options = getOptions();/*from ww w. ja v a 2 s . com*/ CommandLineParser parser = new GnuParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); if (line.hasOption("?")) { usage(options); return; } if (line.hasOption("debug")) { Message.init(new DefaultMessageImpl(Message.MSG_DEBUG)); } else if (line.hasOption("verbose")) { Message.init(new DefaultMessageImpl(Message.MSG_VERBOSE)); } else if (line.hasOption("warn")) { Message.init(new DefaultMessageImpl(Message.MSG_WARN)); } else if (line.hasOption("error")) { Message.init(new DefaultMessageImpl(Message.MSG_ERR)); } else { Message.init(new DefaultMessageImpl(Message.MSG_INFO)); } boolean validate = line.hasOption("novalidate") ? false : true; Ivy ivy = new Ivy(); ivy.addAllVariables(System.getProperties()); if (line.hasOption("m2compatible")) { ivy.setVariable("ivy.default.configuration.m2compatible", "true"); } configureURLHandler(line.getOptionValue("realm", null), line.getOptionValue("host", null), line.getOptionValue("username", null), line.getOptionValue("passwd", null)); String confPath = line.getOptionValue("conf", ""); if ("".equals(confPath)) { ivy.configureDefault(); } else { File conffile = new File(confPath); if (!conffile.exists()) { error(options, "ivy configuration file not found: " + conffile); } else if (conffile.isDirectory()) { error(options, "ivy configuration file is not a file: " + conffile); } ivy.configure(conffile); } File cache = new File( ivy.substitute(line.getOptionValue("cache", ivy.getDefaultCache().getAbsolutePath()))); if (!cache.exists()) { cache.mkdirs(); } else if (!cache.isDirectory()) { error(options, cache + " is not a directory"); } String[] confs; if (line.hasOption("confs")) { confs = line.getOptionValues("confs"); } else { confs = new String[] { "*" }; } File ivyfile; if (line.hasOption("dependency")) { String[] dep = line.getOptionValues("dependency"); if (dep.length != 3) { error(options, "dependency should be expressed with exactly 3 arguments: organisation module revision"); } ivyfile = File.createTempFile("ivy", ".xml"); ivyfile.deleteOnExit(); DefaultModuleDescriptor md = DefaultModuleDescriptor .newDefaultInstance(ModuleRevisionId.newInstance(dep[0], dep[1] + "-caller", "working")); DefaultDependencyDescriptor dd = new DefaultDependencyDescriptor(md, ModuleRevisionId.newInstance(dep[0], dep[1], dep[2]), false, false, true); for (int i = 0; i < confs.length; i++) { dd.addDependencyConfiguration("default", confs[i]); } md.addDependency(dd); XmlModuleDescriptorWriter.write(md, ivyfile); confs = new String[] { "default" }; } else { ivyfile = new File(ivy.substitute(line.getOptionValue("ivy", "ivy.xml"))); if (!ivyfile.exists()) { error(options, "ivy file not found: " + ivyfile); } else if (ivyfile.isDirectory()) { error(options, "ivy file is not a file: " + ivyfile); } } ResolveReport report = ivy.resolve(ivyfile.toURL(), null, confs, cache, null, validate, false, true, line.hasOption("useOrigin"), null); if (report.hasError()) { System.exit(1); } ModuleDescriptor md = report.getModuleDescriptor(); if (confs.length == 1 && "*".equals(confs[0])) { confs = md.getConfigurationsNames(); } if (line.hasOption("retrieve")) { String retrievePattern = ivy.substitute(line.getOptionValue("retrieve")); if (retrievePattern.indexOf("[") == -1) { retrievePattern = retrievePattern + "/lib/[conf]/[artifact].[ext]"; } ivy.retrieve(md.getModuleRevisionId().getModuleId(), confs, cache, retrievePattern, null, null, line.hasOption("sync"), line.hasOption("useOrigin")); } if (line.hasOption("cachepath")) { outputCachePath(ivy, cache, md, confs, line.getOptionValue("cachepath", "ivycachepath.txt")); } if (line.hasOption("revision")) { ivy.deliver(md.getResolvedModuleRevisionId(), ivy.substitute(line.getOptionValue("revision")), cache, ivy.substitute(line.getOptionValue("deliverto", "ivy-[revision].xml")), ivy.substitute(line.getOptionValue("status", "release")), null, new DefaultPublishingDRResolver(), validate); if (line.hasOption("publish")) { ivy.publish(md.getResolvedModuleRevisionId(), ivy.substitute(line.getOptionValue("revision")), cache, ivy.substitute(line.getOptionValue("publishpattern", "distrib/[type]s/[artifact]-[revision].[ext]")), line.getOptionValue("publish"), ivy.substitute(line.getOptionValue("deliverto", "ivy-[revision].xml")), validate); } } if (line.hasOption("main")) { invoke(ivy, cache, md, confs, line.getOptionValue("main"), line.getOptionValues("args")); } } catch (ParseException exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); usage(options); } }
From source file:de.unibi.techfak.bibiserv.util.codegen.Main.java
public static void main(String[] args) { // check & validate cmdline options OptionGroup opt_g = getCMDLineOptionsGroups(); Options opt = getCMDLineOptions();/*from w ww. ja v a2 s . c o m*/ opt.addOptionGroup(opt_g); CommandLineParser cli = new DefaultParser(); try { CommandLine cl = cli.parse(opt, args); if (cl.hasOption("v")) { VerboseOutputFilter.SHOW_VERBOSE = true; } switch (opt_g.getSelected()) { case "V": try { URL jarUrl = Main.class.getProtectionDomain().getCodeSource().getLocation(); String jarPath = URLDecoder.decode(jarUrl.getFile(), "UTF-8"); JarFile jarFile = new JarFile(jarPath); Manifest m = jarFile.getManifest(); StringBuilder versionInfo = new StringBuilder(); for (Object key : m.getMainAttributes().keySet()) { versionInfo.append(key).append(":").append(m.getMainAttributes().getValue(key.toString())) .append("\n"); } System.out.println(versionInfo.toString()); } catch (Exception e) { log.error("Version info could not be read."); } break; case "h": HelpFormatter help = new HelpFormatter(); String header = ""; //TODO: missing infotext StringBuilder footer = new StringBuilder("Supported configuration properties :"); help.printHelp("CodeGen -h | -V | -g [...]", header, opt, footer.toString()); break; case "g": // target dir if (cl.hasOption("t")) { File target = new File(cl.getOptionValue("t")); if (target.isDirectory() && target.canExecute() && target.canWrite()) { config.setProperty("target.dir", cl.getOptionValue("t")); } else { log.error("Target dir '{}' is inaccessible!", cl.getOptionValue("t")); break; } } else { config.setProperty("target.dir", System.getProperty("java.io.tmpdir")); } // project dir if (cl.hasOption("p")) { File project = new File(cl.getOptionValue("p")); if (!project.exists()) { if (!project.mkdirs()) { log.error("Project dir '{}' can't be created!", cl.getOptionValue("p")); break; } } if (project.isDirectory() && project.canExecute() && project.canWrite()) { config.setProperty("project.dir", cl.getOptionValue("p")); } else { log.error("Project dir '{}' is inaccessible!", cl.getOptionValue("p")); break; } } generateAppfromXML(cl.getOptionValue("g")); break; } } catch (ParseException e) { log.error("ParseException occurred while parsing cmdline arguments!\n{}", e.getLocalizedMessage()); } }
From source file:ee.ria.xroad.asyncdb.AsyncDBIntegrationTest.java
/** * * * @param args//from w w w .ja v a 2 s.c o m * - use 'preservedb' to retain directory structure for further * investigation * @throws Exception - when running integration test fails. */ public static void main(String[] args) throws Exception { File provider = new File(AsyncDBTestUtil.getProviderDirPath()); File logFile = new File(AsyncDBTestUtil.getAsyncLogFilePath()); if (provider.exists() || logFile.exists()) { throw new IntegrationTestFailedException( "Directory '" + AsyncDBTestUtil.DB_FILEPATH + "' and file '" + AsyncDBTestUtil.LOG_FILEPATH + "' must not be present in the beginning of integration test, delete it!"); } File logDir = logFile.getParentFile(); logDir.mkdirs(); boolean preserveDB = false; if (args.length > 0) { preserveDB = "preservedb".equalsIgnoreCase(args[0]); LOG.warn("Preserving DB file tree after test, be sure to remove them later by yourself!"); } long freeFileDescriptorsAtBeginning = SystemMetrics.getFreeFileDescriptorCount(); try { addRequestToNonExistentProvider(); addRequestToExistentProvider(); markSecondRequestAsRemoved(); restoreSecondRequest(); getAllMessageQueues(); sendFirstRequestSuccessfully(); sendSecondRequestUnsuccessfully(); resetSendCountOfSecondRequest(); skipNotSendingRequest(); revertWritingFailure(); // Test cases from real life removeCorruptRequestAndSendNext(); } finally { validateFileDescriptors(freeFileDescriptorsAtBeginning); if (!preserveDB) { FileUtils.deleteDirectory(new File(AsyncDBTestUtil.getProviderDirPath())); FileUtils.deleteDirectory(logDir); } } LOG.info("Integration test of ASYNC-DB accomplished successfully."); }
From source file:co.cask.cdap.etl.tool.UpgradeTool.java
public static void main(String[] args) throws Exception { Options options = new Options().addOption(new Option("h", "help", false, "Print this usage message.")) .addOption(new Option("u", "uri", true, "CDAP instance URI to interact with in the format " + "[http[s]://]<hostname>:<port>. Defaults to localhost:10000.")) .addOption(new Option("a", "accesstoken", true, "File containing the access token to use when interacting " + "with a secure CDAP instance.")) .addOption(new Option("t", "timeout", true, "Timeout in milliseconds to use when interacting with the " + "CDAP RESTful APIs. Defaults to " + DEFAULT_READ_TIMEOUT_MILLIS + ".")) .addOption(new Option("n", "namespace", true, "Namespace to perform the upgrade in. If none is given, " + "pipelines in all namespaces will be upgraded.")) .addOption(new Option("p", "pipeline", true, "Name of the pipeline to upgrade. If specified, a namespace " + "must also be given.")) .addOption(new Option("f", "configfile", true, "File containing old application details to update. " + "The file contents are expected to be in the same format as the request body for creating an " + "ETL application from one of the etl artifacts. " + "It is expected to be a JSON Object containing 'artifact' and 'config' fields." + "The value for 'artifact' must be a JSON Object that specifies the artifact scope, name, and version. " + "The value for 'config' must be a JSON Object specifies the source, transforms, and sinks of the pipeline, " + "as expected by older versions of the etl artifacts.")) .addOption(new Option("o", "outputfile", true, "File to write the converted application details provided in " + "the configfile option. If none is given, results will be written to the input file + '.converted'. " + "The contents of this file can be sent directly to CDAP to update or create an application.")) .addOption(new Option("e", "errorDir", true, "Optional directory to write any upgraded pipeline configs that " + "failed to upgrade. The problematic configs can then be manually edited and upgraded separately. " + "Upgrade errors may happen for pipelines that use plugins that are not backwards compatible. " + "This directory must be writable by the user that is running this tool.")); CommandLineParser parser = new BasicParser(); CommandLine commandLine = parser.parse(options, args); String[] commandArgs = commandLine.getArgs(); // if help is an option, or if there isn't a single 'upgrade' command, print usage and exit. if (commandLine.hasOption("h") || commandArgs.length != 1 || !"upgrade".equalsIgnoreCase(commandArgs[0])) { HelpFormatter helpFormatter = new HelpFormatter(); helpFormatter.printHelp(UpgradeTool.class.getName() + " upgrade", "Upgrades Hydrator pipelines created for 3.2.x versions" + "of the cdap-etl-batch and cdap-etl-realtime artifacts into pipelines compatible with 3.3.x versions of " + "cdap-etl-batch and cdap-etl-realtime. Connects to an instance of CDAP to find any 3.2.x pipelines, then " + "upgrades those pipelines.", options, ""); System.exit(0);//w w w .ja v a2 s. com } ClientConfig clientConfig = getClientConfig(commandLine); if (commandLine.hasOption("f")) { String inputFilePath = commandLine.getOptionValue("f"); String outputFilePath = commandLine.hasOption("o") ? commandLine.getOptionValue("o") : inputFilePath + ".new"; convertFile(inputFilePath, outputFilePath, new ArtifactClient(clientConfig)); System.exit(0); } File errorDir = commandLine.hasOption("e") ? new File(commandLine.getOptionValue("e")) : null; if (errorDir != null) { if (!errorDir.exists()) { if (!errorDir.mkdirs()) { LOG.error("Unable to create error directory {}.", errorDir.getAbsolutePath()); System.exit(1); } } else if (!errorDir.isDirectory()) { LOG.error("{} is not a directory.", errorDir.getAbsolutePath()); System.exit(1); } else if (!errorDir.canWrite()) { LOG.error("Unable to write to error directory {}.", errorDir.getAbsolutePath()); System.exit(1); } } UpgradeTool upgradeTool = new UpgradeTool(clientConfig, errorDir); String namespace = commandLine.getOptionValue("n"); String pipelineName = commandLine.getOptionValue("p"); if (pipelineName != null) { if (namespace == null) { throw new IllegalArgumentException("Must specify a namespace when specifying a pipeline."); } Id.Application appId = Id.Application.from(namespace, pipelineName); if (upgradeTool.upgrade(appId)) { LOG.info("Successfully upgraded {}.", appId); } else { LOG.info("{} did not need to be upgraded.", appId); } System.exit(0); } if (namespace != null) { printUpgraded(upgradeTool.upgrade(Id.Namespace.from(namespace))); System.exit(0); } printUpgraded(upgradeTool.upgrade()); }
From source file:io.apicurio.studio.tools.release.ReleaseTool.java
/** * Main method./*w ww. ja v a2s. c o m*/ * @param args */ public static void main(String[] args) throws Exception { Options options = new Options(); options.addOption("n", "release-name", true, "The name of the new release."); options.addOption("p", "prerelease", false, "Indicate that this is a pre-release."); options.addOption("t", "release-tag", true, "The tag name of the new release."); options.addOption("o", "previous-tag", true, "The tag name of the previous release."); options.addOption("g", "github-pat", true, "The GitHub PAT (for authentication/authorization)."); options.addOption("a", "artifact", true, "The binary release artifact (full path)."); options.addOption("d", "output-directory", true, "Where to store output file(s)."); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); if (!cmd.hasOption("n") || !cmd.hasOption("t") || !cmd.hasOption("o") || !cmd.hasOption("g") || !cmd.hasOption("a")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("release-studio", options); System.exit(1); } // Arguments (command line) String releaseName = cmd.getOptionValue("n"); boolean isPrerelease = cmd.hasOption("p"); String releaseTag = cmd.getOptionValue("t"); String oldReleaseTag = cmd.getOptionValue("o"); String githubPAT = cmd.getOptionValue("g"); String artifact = cmd.getOptionValue("a"); File outputDir = new File(""); if (cmd.hasOption("d")) { outputDir = new File(cmd.getOptionValue("d")); if (!outputDir.exists()) { outputDir.mkdirs(); } } File releaseArtifactFile = new File(artifact); File releaseArtifactSigFile = new File(artifact + ".asc"); String releaseArtifact = releaseArtifactFile.getName(); String releaseArtifactSig = releaseArtifactSigFile.getName(); if (!releaseArtifactFile.isFile()) { System.err.println("Missing file: " + releaseArtifactFile.getAbsolutePath()); System.exit(1); } if (!releaseArtifactSigFile.isFile()) { System.err.println("Missing file: " + releaseArtifactSigFile.getAbsolutePath()); System.exit(1); } System.out.println("========================================="); System.out.println("Creating Release: " + releaseTag); System.out.println("Previous Release: " + oldReleaseTag); System.out.println(" Name: " + releaseName); System.out.println(" Artifact: " + releaseArtifact); System.out.println(" Pre-Release: " + isPrerelease); System.out.println("========================================="); String releaseNotes = ""; // Step #1 - Generate Release Notes // * Grab info about the previous release (extract publish date) // * Query all Issues for ones closed since that date // * Generate Release Notes from the resulting Issues try { System.out.println("Getting info about release " + oldReleaseTag); HttpResponse<JsonNode> response = Unirest .get("https://api.github.com/repos/apicurio/apicurio-studio/releases/tags/v" + oldReleaseTag) .header("Accept", "application/json").header("Authorization", "token " + githubPAT).asJson(); if (response.getStatus() != 200) { throw new Exception("Failed to get old release info: " + response.getStatusText()); } JsonNode body = response.getBody(); String publishedDate = body.getObject().getString("published_at"); if (publishedDate == null) { throw new Exception("Could not find Published Date for previous release " + oldReleaseTag); } System.out.println("Release " + oldReleaseTag + " was published on " + publishedDate); List<JSONObject> issues = getIssuesForRelease(publishedDate, githubPAT); System.out.println("Found " + issues.size() + " issues closed in release " + releaseTag); System.out.println("Generating Release Notes"); releaseNotes = generateReleaseNotes(releaseName, releaseTag, issues); System.out.println("------------ Release Notes --------------"); System.out.println(releaseNotes); System.out.println("-----------------------------------------"); } catch (Exception e) { e.printStackTrace(); System.exit(1); } String assetUploadUrl = null; // Step #2 - Create a GitHub Release try { System.out.println("\nCreating GitHub Release " + releaseTag); JSONObject body = new JSONObject(); body.put("tag_name", "v" + releaseTag); body.put("name", releaseName); body.put("body", releaseNotes); body.put("prerelease", isPrerelease); HttpResponse<JsonNode> response = Unirest .post("https://api.github.com/repos/apicurio/apicurio-studio/releases") .header("Accept", "application/json").header("Content-Type", "application/json") .header("Authorization", "token " + githubPAT).body(body).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to create release in GitHub: " + response.getStatusText()); } assetUploadUrl = response.getBody().getObject().getString("upload_url"); if (assetUploadUrl == null || assetUploadUrl.trim().isEmpty()) { throw new Exception("Failed to get Asset Upload URL for newly created release!"); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } // Step #3 - Upload Release Artifact (zip file) System.out.println("\nUploading Quickstart Artifact: " + releaseArtifact); try { String artifactUploadUrl = createUploadUrl(assetUploadUrl, releaseArtifact); byte[] artifactData = loadArtifactData(releaseArtifactFile); System.out.println("Uploading artifact asset: " + artifactUploadUrl); HttpResponse<JsonNode> response = Unirest.post(artifactUploadUrl).header("Accept", "application/json") .header("Content-Type", "application/zip").header("Authorization", "token " + githubPAT) .body(artifactData).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to upload asset: " + releaseArtifact, new Exception(response.getStatus() + "::" + response.getStatusText())); } Thread.sleep(1000); artifactUploadUrl = createUploadUrl(assetUploadUrl, releaseArtifactSig); artifactData = loadArtifactData(releaseArtifactSigFile); System.out.println("Uploading artifact asset: " + artifactUploadUrl); response = Unirest.post(artifactUploadUrl).header("Accept", "application/json") .header("Content-Type", "text/plain").header("Authorization", "token " + githubPAT) .body(artifactData).asJson(); if (response.getStatus() != 201) { throw new Exception("Failed to upload asset: " + releaseArtifactSig, new Exception(response.getStatus() + "::" + response.getStatusText())); } } catch (Exception e) { e.printStackTrace(); System.exit(1); } Thread.sleep(1000); // Step #4 - Download Latest Release JSON for inclusion in the project web site try { System.out.println("Getting info about the release."); HttpResponse<JsonNode> response = Unirest .get("https://api.github.com/repos/apicurio/apicurio-studio/releases/latest") .header("Accept", "application/json").asJson(); if (response.getStatus() != 200) { throw new Exception("Failed to get release info: " + response.getStatusText()); } JsonNode body = response.getBody(); String publishedDate = body.getObject().getString("published_at"); if (publishedDate == null) { throw new Exception("Could not find Published Date for release."); } String fname = publishedDate.replace(':', '-'); File outFile = new File(outputDir, fname + ".json"); System.out.println("Writing latest release info to: " + outFile.getAbsolutePath()); String output = body.getObject().toString(4); try (FileOutputStream fos = new FileOutputStream(outFile)) { fos.write(output.getBytes("UTF-8")); fos.flush(); } System.out.println("Release info successfully written."); } catch (Exception e) { e.printStackTrace(); System.exit(1); } System.out.println("========================================="); System.out.println("All Done!"); System.out.println("========================================="); }
From source file:fr.iphc.grid.jobmanager.JobManager.java
/** * @param args//from w w w . ja v a2 s. co m */ public static void main(String[] args) throws Exception { JobManager command = new JobManager(); CommandLine line = command.parse(args); ArrayList<File> JdlList = new ArrayList<File>(); Global.getOutputexecutor = Executors.newFixedThreadPool(10); Initialize init = new Initialize(); String SetupFile = "setup_vigrid.xml"; if (line.hasOption(OPT_SETUP)) { SetupFile = line.getOptionValue(OPT_SETUP); } if ((new File(SetupFile).isFile())) { init.GlobalSetup(SetupFile); } // Init Job if (line.hasOption(OPT_JOB)) { File file = new File(line.getOptionValue(OPT_JOB)); if ((file.isFile())) { JdlList.add(file); } else { System.err.println("The file " + file + " doesn't exist"); System.exit(-1); } } else { File file = new File(line.getOptionValue(OPT_FILEJOB)); if ((file.isFile())) { JdlList = init.InitJdl(file); } else { System.err.println("The file " + file + " doesn't exist"); System.exit(-1); } } if (line.hasOption(OPT_WAIT)) { Global.TIMEOUTWAIT = Integer.parseInt(line.getOptionValue(OPT_WAIT)); } if (line.hasOption(OPT_RUN)) { Global.TIMEOUTRUN = Integer.parseInt(line.getOptionValue(OPT_RUN)); } if (line.hasOption(OPT_END)) { Global.TIMEOUTEND = Integer.parseInt(line.getOptionValue(OPT_END)); } if (line.hasOption(OPT_LOGDISPLAY)) { Global.SEUILDISPLAYLOG = Float.parseFloat(line.getOptionValue(OPT_LOGDISPLAY)); } init.InitJob(JdlList); // Init Url Ce if (line.hasOption(OPT_QUEUE)) { Global.file = new File(line.getOptionValue(OPT_QUEUE)); } if (line.hasOption(OPT_BAD)) { Global.BadCe = new File(line.getOptionValue(OPT_BAD)); } if (line.hasOption(OPT_OPTIMIZETIMEOUTRUN)) { Global.OPTTIMEOUTRUN = false; } if (line.hasOption(OPT_CWD)) { File theDir = new File(line.getOptionValue(OPT_CWD)); if (!theDir.exists()) { if (!theDir.mkdirs()) { System.err.println("Working directory create failed: " + line.getOptionValue(OPT_CWD)); System.exit(-1); } } Global.Cwd = line.getOptionValue(OPT_CWD); } else { Global.Cwd = System.getProperty("user.dir"); } if (!(new File(Global.Cwd)).canWrite()) { System.err.println(" Write permission denied : " + Global.Cwd); System.exit(-1); } System.out.println("Current working directory : " + Global.Cwd); Date start = new Date(); init.PrintGlobalSetup(); init.InitUrl(Global.file); init.InitSosCe(); init.rmLoadFailed(Global.Cwd + "/loadFailed.txt"); System.out.println("CE: " + Global.ListUrl.size() + " Nb JOB: " + Global.ListJob.size() + " " + new Date()); if (Global.ListJob.size() < 6) { // pour obtenir rapport de 0.8 Global.OPTTIMEOUTRUN = false; } // check if we can connect to the grid try { SessionFactory.createSession(true); } catch (NoSuccessException e) { System.err.println("Could not connect to the grid at all (" + e.getMessage() + ")"); System.err.println("Aborting"); System.exit(0); } // Launch Tread Job JobThread st = new JobThread(Global.ListJob, Global.ListUrl); st.start(); LoggingThread logst = new LoggingThread(Global.ListJob, Global.ListUrl, Global.SEUILDISPLAYLOG); logst.start(); // create Thread Hook intercept kill +CNTL+C Thread hook = new Thread() { public void run() { try { for (Jdl job : Global.ListJob) { if (job.getJobId() != null) { JobThread.jobCancel(job.getJobId()); } } } catch (Exception e) { System.err.println("Thread Hook:\n" + e.getMessage()); } // give it a change to display final job state try { sleep(5000); } catch (InterruptedException e) { e.printStackTrace(); } } }; Runtime.getRuntime().addShutdownHook(hook); // Integer timer = 180 * 60 * 1000; Date now = new Date(); // Boolean Fin = false; while ((!Global.END) && ((now.getTime() - start.getTime()) < Global.TIMEOUTEND * 60 * 1000)) { // TOEND // en // minutes now = new Date(); // int mb = 1024*1024; // Getting the runtime reference from system // Runtime runtime = Runtime.getRuntime(); // System.out.println("##### Heap utilization statistics [MB] // #####"); // Print used memory // System.out.println("Used Memory:" // + (runtime.totalMemory() - runtime.freeMemory()) / mb); // Print free memory // System.out.println("Free Memory:" // + runtime.freeMemory() / mb); // Print total available memory // System.out.println("Total Memory:" + runtime.totalMemory() / mb); // Print Maximum available memory // System.out.println("Max Memory:" + runtime.maxMemory() / mb); // // System.out.println("NB: "+nb_end); // if ((float)(runtime.totalMemory() - // runtime.freeMemory())/(float)runtime.maxMemory() > (float)0.3){ // System.out.println ("GC: "+(float)(runtime.totalMemory() - // runtime.freeMemory())/runtime.maxMemory()); // System.gc(); // }; sleep(15 * 1000); // in ms // System.gc(); // Fin=true; // for (Jdl job : Global.ListJob) { // if (job.getJob() != null) { // System.out.println("JOB: "+job.getId()+"\t"+job.getStatus()); // if (job.getStatus().compareTo("END")==0){ // ((JobImpl) job.getJob()).postStagingAndCleanup(); // System.out.println("END JOB: "+job.getId()); // job.setStatus("END"); // } // if (job.getStatus().compareTo("END")!=0){ // Fin=false; // } // System.out.println("JOB: "+job.getId()+"\t"+job.getStatus() + // "\t"+job.getFail()+"\t"+job.getNodeCe()); // } // } // while ((Global.END==0) && ((new // Date().getTime()-start.getTime())<timer)){ } // Boolean end_load=false; // while (!end_load){ // end_load=true; // for(Jdl job:Global.ListJob){ // if (job.getStatus().equals("LOAD")){ // end_load=false; // } // } // } System.out.println("END JOB: " + now); st.halt(); logst.halt(); Iterator<Url> k = Global.ListUrl.iterator(); while (k.hasNext()) { Url url = k.next(); System.out.println("URL: " + url.getUrl()); } Iterator<Jdl> m = Global.ListJob.iterator(); while (m.hasNext()) { Jdl job = m.next(); System.out.println( "JOB: " + job.getId() + "\t" + job.getFail() + "\t" + job.getStatus() + "\t" + job.getNodeCe()); } System.out.println(start + " " + new Date()); System.exit(0); }
From source file:boa.compiler.BoaCompiler.java
public static void main(final String[] args) throws IOException { CommandLine cl = processCommandLineOptions(args); if (cl == null) return;//from w w w. ja va 2s .co m final ArrayList<File> inputFiles = BoaCompiler.inputFiles; // get the name of the generated class final String className = getGeneratedClass(cl); // get the filename of the jar we will be writing final String jarName; if (cl.hasOption('o')) jarName = cl.getOptionValue('o'); else jarName = className + ".jar"; // make the output directory File outputRoot = null; if (cl.hasOption("cd")) { outputRoot = new File(cl.getOptionValue("cd")); } else { outputRoot = new File(new File(System.getProperty("java.io.tmpdir")), UUID.randomUUID().toString()); } final File outputSrcDir = new File(outputRoot, "boa"); if (!outputSrcDir.mkdirs()) throw new IOException("unable to mkdir " + outputSrcDir); // find custom libs to load final List<URL> libs = new ArrayList<URL>(); if (cl.hasOption('l')) for (final String lib : cl.getOptionValues('l')) libs.add(new File(lib).toURI().toURL()); final File outputFile = new File(outputSrcDir, className + ".java"); final BufferedOutputStream o = new BufferedOutputStream(new FileOutputStream(outputFile)); try { final List<String> jobnames = new ArrayList<String>(); final List<String> jobs = new ArrayList<String>(); boolean isSimple = true; final List<Program> visitorPrograms = new ArrayList<Program>(); SymbolTable.initialize(libs); final int maxVisitors; if (cl.hasOption('v')) maxVisitors = Integer.parseInt(cl.getOptionValue('v')); else maxVisitors = Integer.MAX_VALUE; for (int i = 0; i < inputFiles.size(); i++) { final File f = inputFiles.get(i); try { final BoaLexer lexer = new BoaLexer(new ANTLRFileStream(f.getAbsolutePath())); lexer.removeErrorListeners(); lexer.addErrorListener(new LexerErrorListener()); final CommonTokenStream tokens = new CommonTokenStream(lexer); final BoaParser parser = new BoaParser(tokens); parser.removeErrorListeners(); parser.addErrorListener(new BaseErrorListener() { @Override public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e) throws ParseCancellationException { throw new ParseCancellationException(e); } }); final BoaErrorListener parserErrorListener = new ParserErrorListener(); final Start p = parse(tokens, parser, parserErrorListener); if (cl.hasOption("ast")) new ASTPrintingVisitor().start(p); final String jobName = "" + i; try { if (!parserErrorListener.hasError) { new TypeCheckingVisitor().start(p, new SymbolTable()); final TaskClassifyingVisitor simpleVisitor = new TaskClassifyingVisitor(); simpleVisitor.start(p); LOG.info(f.getName() + ": task complexity: " + (!simpleVisitor.isComplex() ? "simple" : "complex")); isSimple &= !simpleVisitor.isComplex(); new ShadowTypeEraser().start(p); new InheritedAttributeTransformer().start(p); new LocalAggregationTransformer().start(p); // if a job has no visitor, let it have its own method // also let jobs have own methods if visitor merging is disabled if (!simpleVisitor.isComplex() || maxVisitors < 2 || inputFiles.size() == 1) { new VisitorOptimizingTransformer().start(p); if (cl.hasOption("pp")) new PrettyPrintVisitor().start(p); if (cl.hasOption("ast2")) new ASTPrintingVisitor().start(p); final CodeGeneratingVisitor cg = new CodeGeneratingVisitor(jobName); cg.start(p); jobs.add(cg.getCode()); jobnames.add(jobName); } // if a job has visitors, fuse them all together into a single program else { p.getProgram().jobName = jobName; visitorPrograms.add(p.getProgram()); } } } catch (final TypeCheckException e) { parserErrorListener.error("typecheck", lexer, null, e.n.beginLine, e.n.beginColumn, e.n2.endColumn - e.n.beginColumn + 1, e.getMessage(), e); } } catch (final Exception e) { System.err.print(f.getName() + ": compilation failed: "); e.printStackTrace(); } } if (!visitorPrograms.isEmpty()) try { for (final Program p : new VisitorMergingTransformer().mergePrograms(visitorPrograms, maxVisitors)) { new VisitorOptimizingTransformer().start(p); if (cl.hasOption("pp")) new PrettyPrintVisitor().start(p); if (cl.hasOption("ast2")) new ASTPrintingVisitor().start(p); final CodeGeneratingVisitor cg = new CodeGeneratingVisitor(p.jobName); cg.start(p); jobs.add(cg.getCode()); jobnames.add(p.jobName); } } catch (final Exception e) { System.err.println("error fusing visitors - falling back: " + e); e.printStackTrace(); for (final Program p : visitorPrograms) { new VisitorOptimizingTransformer().start(p); if (cl.hasOption("pp")) new PrettyPrintVisitor().start(p); if (cl.hasOption("ast2")) new ASTPrintingVisitor().start(p); final CodeGeneratingVisitor cg = new CodeGeneratingVisitor(p.jobName); cg.start(p); jobs.add(cg.getCode()); jobnames.add(p.jobName); } } if (jobs.size() == 0) throw new RuntimeException("no files compiled without error"); final ST st = AbstractCodeGeneratingVisitor.stg.getInstanceOf("Program"); st.add("name", className); st.add("numreducers", inputFiles.size()); st.add("jobs", jobs); st.add("jobnames", jobnames); st.add("combineTables", CodeGeneratingVisitor.combineAggregatorStrings); st.add("reduceTables", CodeGeneratingVisitor.reduceAggregatorStrings); st.add("splitsize", isSimple ? 64 * 1024 * 1024 : 10 * 1024 * 1024); if (DefaultProperties.localDataPath != null) { st.add("isLocal", true); } o.write(st.render().getBytes()); } finally { o.close(); } compileGeneratedSrc(cl, jarName, outputRoot, outputFile); }