List of usage examples for org.apache.commons.cli DefaultParser DefaultParser
DefaultParser
From source file:com.martinlamas.dicomserver.DicomServer.java
public static void main(String[] args) { int port = DEFAULT_PORT; String aeTitle = DEFAULT_AE_TITLE; File storageDirectory = new File(DEFAULT_STORAGE_DIRECTORY); try {/*from w w w. jav a 2 s .c om*/ CommandLine line = new DefaultParser().parse(getOptions(), args); if (line.hasOption("p")) port = Integer.valueOf(line.getOptionValue("p")); if (line.hasOption("d")) storageDirectory = new File(line.getOptionValue("d")); if (line.hasOption("t")) aeTitle = line.getOptionValue("t"); List<DicomServerApplicationEntity> applicationEntities = new ArrayList<DicomServerApplicationEntity>(); DicomServerApplicationEntity applicationEntity = new DicomServerApplicationEntity(aeTitle, storageDirectory); applicationEntities.add(applicationEntity); showBanner(); IDicomStoreSCPServer server = new DicomStoreSCPServer(port, applicationEntities); server.start(); } catch (ParseException e) { printUsage(); } catch (Exception e) { logger.error("Unable to start DICOM server: " + e.getMessage()); e.printStackTrace(); } }
From source file:com.akana.demo.freemarker.templatetester.App.java
public static void main(String[] args) { final Options options = new Options(); @SuppressWarnings("static-access") Option optionContentType = OptionBuilder.withArgName("content-type").hasArg() .withDescription("content type of model").create("content"); @SuppressWarnings("static-access") Option optionUrlPath = OptionBuilder.withArgName("httpRequestLine").hasArg() .withDescription("url path and parameters in HTTP Request Line format").create("url"); @SuppressWarnings("static-access") Option optionRootMessageName = OptionBuilder.withArgName("messageName").hasArg() .withDescription("root data object name, defaults to 'message'").create("root"); @SuppressWarnings("static-access") Option optionAdditionalMessages = OptionBuilder.withArgName("dataModelPaths") .hasArgs(Option.UNLIMITED_VALUES).withDescription("additional message object data sources") .create("messages"); @SuppressWarnings("static-access") Option optionDebugMessages = OptionBuilder.hasArg(false) .withDescription("Shows debug information about template processing").create("debug"); Option optionHelp = new Option("help", "print this message"); options.addOption(optionHelp);//from w w w. j a v a2s . co m options.addOption(optionContentType); options.addOption(optionUrlPath); options.addOption(optionRootMessageName); options.addOption(optionAdditionalMessages); options.addOption(optionDebugMessages); CommandLineParser parser = new DefaultParser(); CommandLine cmd; try { cmd = parser.parse(options, args); // Check for help flag if (cmd.hasOption("help")) { showHelp(options); return; } String[] remainingArguments = cmd.getArgs(); if (remainingArguments.length < 2) { showHelp(options); return; } String ftlPath, dataPath = "none"; ftlPath = remainingArguments[0]; dataPath = remainingArguments[1]; String contentType = "text/xml"; // Discover content type from file extension String ext = FilenameUtils.getExtension(dataPath); if (ext.equals("json")) { contentType = "json"; } else if (ext.equals("txt")) { contentType = "txt"; } // Override discovered content type if (cmd.hasOption("content")) { contentType = cmd.getOptionValue("content"); } // Root data model name String rootMessageName = "message"; if (cmd.hasOption("root")) { rootMessageName = cmd.getOptionValue("root"); } // Additional data models String[] additionalModels = new String[0]; if (cmd.hasOption("messages")) { additionalModels = cmd.getOptionValues("messages"); } // Debug Info if (cmd.hasOption("debug")) { System.out.println(" Processing ftl : " + ftlPath); System.out.println(" with data model: " + dataPath); System.out.println(" with content-type: " + contentType); System.out.println(" data model object: " + rootMessageName); if (cmd.hasOption("messages")) { System.out.println("additional models: " + additionalModels.length); } } Configuration cfg = new Configuration(Configuration.VERSION_2_3_23); cfg.setDirectoryForTemplateLoading(new File(".")); cfg.setDefaultEncoding("UTF-8"); cfg.setTemplateExceptionHandler(TemplateExceptionHandler.RETHROW_HANDLER); /* Create the primary data-model */ Map<String, Object> message = new HashMap<String, Object>(); if (contentType.contains("json") || contentType.contains("txt")) { message.put("contentAsString", FileUtils.readFileToString(new File(dataPath), StandardCharsets.UTF_8)); } else { message.put("contentAsXml", freemarker.ext.dom.NodeModel.parse(new File(dataPath))); } if (cmd.hasOption("url")) { message.put("getProperty", new AkanaGetProperty(cmd.getOptionValue("url"))); } Map<String, Object> root = new HashMap<String, Object>(); root.put(rootMessageName, message); if (additionalModels.length > 0) { for (int i = 0; i < additionalModels.length; i++) { Map<String, Object> m = createMessageFromFile(additionalModels[i], contentType); root.put("message" + i, m); } } /* Get the template (uses cache internally) */ Template temp = cfg.getTemplate(ftlPath); /* Merge data-model with template */ Writer out = new OutputStreamWriter(System.out); temp.process(root, out); } catch (ParseException e) { showHelp(options); System.exit(1); } catch (IOException e) { System.out.println("Unable to parse ftl."); e.printStackTrace(); } catch (SAXException e) { System.out.println("XML parsing issue."); e.printStackTrace(); } catch (ParserConfigurationException e) { System.out.println("Unable to configure parser."); e.printStackTrace(); } catch (TemplateException e) { System.out.println("Unable to parse template."); e.printStackTrace(); } }
From source file:com.esri.geoportal.base.metadata.MetadataCLI.java
/** /*from www. j ava2s .c o m*/ * <h1>run the javascript Evaluators.js scripts</h1> * from command line for testing. * * <div> java com.esri.geoportal.base.metadata.MetadataCLI -md={XMLFile_fullpath} *</div> * * <p><b>Note:</b> This only produces the basic JSON elements seen in the * elastic search json document. Other steps, such as itemID are found in {@link com.esri.geoportal.lib.elastic.request.PublishMetadataRequest#prePublish(ElasticContext, AccessUtil, AppResponse, MetadataDocument)} </p> * *<p><b>Note:</b> mainly tested in JetBrains Intellij</p> * <p><b>Note:</b> mvn command line call is in contrib</p> * * @author David Valentine * */ public static void main(String[] args) { Option help = Option.builder("h").required(false).longOpt("help").desc("HELP").build(); // Option metadataJsDir = // Option.builder("js") // .required(true) // .hasArg() // .longOpt("jsdir") // .desc("Base metadata javascript directory") // // .type(File.class) // test if this is a directory // .build(); ; /* not needed. js read from classpath, metadata/js/Evaluator.js required to be on classpath. TODO: test if this works in/on a jar, if not might need to test if running in a jar, and set appropriate resource location */ Option metadataFile = Option.builder("md").required(true).hasArg().longOpt("metdatafile") .desc("Metadata File") // .type(File.class) .build(); ; Option verbose = Option.builder("v").required(false) .longOpt("verboase").build(); ; Options options = new Options(); options.addOption(help); //options.addOption(metadataJsDir); options.addOption(metadataFile); ; options.addOption(verbose); ; // create the parser CommandLineParser parser = new DefaultParser(); try { // parse the command line arguments CommandLine line = parser.parse(options, args); Boolean v = line.hasOption("v"); String mds = line.getOptionValue("md"); File md = new File(mds); if (!md.isFile()) System.err.println("Md Metadata must be a file"); testScriptEvaluator(md, v); } catch (ParseException exp) { // oops, something went wrong System.err.println("Parsing failed. Reason: " + exp.getMessage()); } catch (Exception ex) { System.err.println("Metadata Evaluation Failed. Reason: " + ex.getMessage()); } }
From source file:com.example.dlp.Metadata.java
/** Retrieve infoTypes. */ public static void main(String[] args) throws Exception { Options options = new Options(); Option languageCodeOption = new Option("language", null, true, "BCP-47 language code"); languageCodeOption.setRequired(false); options.addOption(languageCodeOption); Option categoryOption = new Option("category", null, true, "Category of info types to list."); categoryOption.setRequired(false);// ww w . ja v a 2 s.c o m options.addOption(categoryOption); CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.out.println(e.getMessage()); formatter.printHelp(Metadata.class.getName(), options); System.exit(1); return; } String languageCode = cmd.getOptionValue(languageCodeOption.getOpt(), "en-US"); if (cmd.hasOption(categoryOption.getOpt())) { String category = cmd.getOptionValue(categoryOption.getOpt()); listInfoTypes(category, languageCode); } else { listRootCategories(languageCode); } }
From source file:com.ibm.crail.storage.StorageServer.java
public static void main(String[] args) throws Exception { Logger LOG = CrailUtils.getLogger(); CrailConfiguration conf = new CrailConfiguration(); CrailConstants.updateConstants(conf); CrailConstants.printConf();/* w w w .jav a2s. c om*/ CrailConstants.verify(); int splitIndex = 0; for (String param : args) { if (param.equalsIgnoreCase("--")) { break; } splitIndex++; } //default values StringTokenizer tokenizer = new StringTokenizer(CrailConstants.STORAGE_TYPES, ","); if (!tokenizer.hasMoreTokens()) { throw new Exception("No storage types defined!"); } String storageName = tokenizer.nextToken(); int storageType = 0; HashMap<String, Integer> storageTypes = new HashMap<String, Integer>(); storageTypes.put(storageName, storageType); for (int type = 1; tokenizer.hasMoreElements(); type++) { String name = tokenizer.nextToken(); storageTypes.put(name, type); } int storageClass = -1; //custom values if (args != null) { Option typeOption = Option.builder("t").desc("storage type to start").hasArg().build(); Option classOption = Option.builder("c").desc("storage class the server will attach to").hasArg() .build(); Options options = new Options(); options.addOption(typeOption); options.addOption(classOption); CommandLineParser parser = new DefaultParser(); try { CommandLine line = parser.parse(options, Arrays.copyOfRange(args, 0, splitIndex)); if (line.hasOption(typeOption.getOpt())) { storageName = line.getOptionValue(typeOption.getOpt()); storageType = storageTypes.get(storageName).intValue(); } if (line.hasOption(classOption.getOpt())) { storageClass = Integer.parseInt(line.getOptionValue(classOption.getOpt())); } } catch (ParseException e) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("Storage tier", options); System.exit(-1); } } if (storageClass < 0) { storageClass = storageType; } StorageTier storageTier = StorageTier.createInstance(storageName); if (storageTier == null) { throw new Exception("Cannot instantiate datanode of type " + storageName); } String extraParams[] = null; splitIndex++; if (args.length > splitIndex) { extraParams = new String[args.length - splitIndex]; for (int i = splitIndex; i < args.length; i++) { extraParams[i - splitIndex] = args[i]; } } storageTier.init(conf, extraParams); storageTier.printConf(LOG); RpcClient rpcClient = RpcClient.createInstance(CrailConstants.NAMENODE_RPC_TYPE); rpcClient.init(conf, args); rpcClient.printConf(LOG); ConcurrentLinkedQueue<InetSocketAddress> namenodeList = CrailUtils.getNameNodeList(); ConcurrentLinkedQueue<RpcConnection> connectionList = new ConcurrentLinkedQueue<RpcConnection>(); while (!namenodeList.isEmpty()) { InetSocketAddress address = namenodeList.poll(); RpcConnection connection = rpcClient.connect(address); connectionList.add(connection); } RpcConnection rpcConnection = connectionList.peek(); if (connectionList.size() > 1) { rpcConnection = new RpcDispatcher(connectionList); } LOG.info("connected to namenode(s) " + rpcConnection.toString()); StorageServer server = storageTier.launchServer(); StorageRpcClient storageRpc = new StorageRpcClient(storageType, CrailStorageClass.get(storageClass), server.getAddress(), rpcConnection); HashMap<Long, Long> blockCount = new HashMap<Long, Long>(); long sumCount = 0; while (server.isAlive()) { StorageResource resource = server.allocateResource(); if (resource == null) { break; } else { storageRpc.setBlock(resource.getAddress(), resource.getLength(), resource.getKey()); DataNodeStatistics stats = storageRpc.getDataNode(); long newCount = stats.getFreeBlockCount(); long serviceId = stats.getServiceId(); long oldCount = 0; if (blockCount.containsKey(serviceId)) { oldCount = blockCount.get(serviceId); } long diffCount = newCount - oldCount; blockCount.put(serviceId, newCount); sumCount += diffCount; LOG.info("datanode statistics, freeBlocks " + sumCount); } } while (server.isAlive()) { DataNodeStatistics stats = storageRpc.getDataNode(); long newCount = stats.getFreeBlockCount(); long serviceId = stats.getServiceId(); long oldCount = 0; if (blockCount.containsKey(serviceId)) { oldCount = blockCount.get(serviceId); } long diffCount = newCount - oldCount; blockCount.put(serviceId, newCount); sumCount += diffCount; LOG.info("datanode statistics, freeBlocks " + sumCount); Thread.sleep(2000); } }
From source file:com.twitter.heron.eco.Eco.java
public static void main(String[] args) throws Exception { Options options = constructOptions(); CommandLineParser parser = new DefaultParser(); CommandLine cmd;// ww w.j ava 2s .co m try { cmd = parser.parse(options, args); } catch (ParseException e) { throw new RuntimeException("Error parsing command line options: ", e); } FileInputStream fin = new FileInputStream(new File(cmd.getOptionValue(ECO_CONFIG_FILE))); String propsFile = cmd.getOptionValue(PROPS); FileInputStream propsInputStream = null; if (propsFile != null) { propsInputStream = new FileInputStream(new File(propsFile)); } Boolean filterFromEnv = cmd.hasOption(ENV_PROPS); Eco eco = new Eco(new EcoBuilder(new SpoutBuilder(), new BoltBuilder(), new StreamBuilder(), new ComponentBuilder(), new ConfigBuilder()), new EcoParser(), new EcoSubmitter()); eco.submit(fin, propsInputStream, filterFromEnv); }
From source file:com.yhfudev.SimulatorForSelfStabilizing.java
public static void main(String[] args) { // command line lib: apache CLI http://commons.apache.org/proper/commons-cli/ // command line arguments: // -- input file // -- output line to a csv file // -- algorithm: Ding's linear or randomized // single thread parsing ... Options options = new Options(); options.addOption("h", false, "print this message"); //heuristic//from w w w. j a v a 2s . co m options.addOption("u", false, "(rand) heuristic on"); options.addOption("y", true, "show the graph with specified delay (ms)"); options.addOption("i", true, "the input file name"); options.addOption("o", true, "the results is save to a attachable output cvs file"); options.addOption("l", true, "the graph activities trace log file name"); options.addOption("s", true, "save the graph to a file"); options.addOption("a", true, "the algorithm name, ding or rand"); // options specified to generator options.addOption("g", true, "the graph generator algorithm name: fan1l, fan2l, rand, doro, flower, watt, lobster"); options.addOption("n", true, "the number of nodes"); options.addOption("d", true, "(rand) the node degree (max)"); options.addOption("f", false, "(rand) if the degree value is fix or not"); options.addOption("p", true, "(watt) the probability of beta"); CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e) { e.printStackTrace(); return; } if (cmd.hasOption("h")) { showHelp(options); return; } int delay_time = 0; if (cmd.hasOption("y")) { delay_time = Integer.parseInt(cmd.getOptionValue("y")); } String sFileName = null; sFileName = null; FileWriter writer = null; if (cmd.hasOption("o")) { sFileName = cmd.getOptionValue("o"); } if ((null != sFileName) && (!"".equals(sFileName))) { try { writer = new FileWriter(sFileName, true); // true: append } catch (IOException e) { e.printStackTrace(); System.out.println("Error: unable to open the output file " + sFileName); return; } } FileWriter wrGraph = null; sFileName = null; if (cmd.hasOption("s")) { sFileName = cmd.getOptionValue("s"); } if ((null != sFileName) && (!"".equals(sFileName))) { try { wrGraph = new FileWriter(sFileName, true); // true: append } catch (IOException e) { e.printStackTrace(); System.out.println("Error: unable to open the saveGraph file " + sFileName); return; } } sFileName = null; if (cmd.hasOption("i")) { sFileName = cmd.getOptionValue("i"); } String genname = null; if (cmd.hasOption("g")) { genname = cmd.getOptionValue("g"); } if ((null == genname) && (null == sFileName)) { System.out.println("Error: not specify the input file or graph generator"); showHelp(options); return; } if ((null != genname) && (null != sFileName)) { System.out.println("Error: do not specify the input file and graph generator at the same time"); showHelp(options); return; } if (delay_time > 0) { // create and display a graph System.setProperty("org.graphstream.ui.renderer", "org.graphstream.ui.j2dviewer.J2DGraphRenderer"); } Graph graph = new SingleGraph("test"); //graph.setNullAttributesAreErrors(true); // to throw an exception instead of returning null (in getAttribute()). if (delay_time > 0) { graph.addAttribute("ui.quality"); graph.addAttribute("ui.antialias"); graph.addAttribute("ui.stylesheet", "url(data/selfstab-mwcds.css);"); graph.display(); } // save the trace to file FileSinkDGS dgs = null; if (cmd.hasOption("l")) { dgs = new FileSinkDGS(); graph.addSink(dgs); try { dgs.begin(cmd.getOptionValue("l")); } catch (IOException e) { e.printStackTrace(); } } Generator generator = null; if (null != sFileName) { System.out.println("DEBUG: the input file=" + sFileName); FileSource source = new FileSourceDGS(); source.addSink(graph); int count_edge_error = 0; try { //source.begin("data/selfstab-mwcds.dgs"); // Ding's paper example //source.begin("data/selfstab-ds.dgs"); // DS example //source.begin("data/selfstab-doro-1002.dgs"); // DorogovtsevMendes //source.begin("data/selfstab-rand-p10-10002.dgs"); // random connected graph with degree = 10% nodes //source.begin("data/selfstab-rand-f5-34.dgs"); // random connected graph with degree = 5 source.begin(sFileName); while (true) { try { if (false == source.nextEvents()) { break; } } catch (EdgeRejectedException e) { // ignore count_edge_error++; System.out.println("DEBUG: adding edge error: " + e.toString()); } if (delay_time > 0) { delay(delay_time); } } source.end(); //} catch (InterruptedException e) { // e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } System.out.println("DEBUG: END read from source. # of edges ignored=" + count_edge_error); } else { // assert (genname != null); // graph generator //generator = new ChvatalGenerator(); // fix size //generator = new FullGenerator(); // full connected, 2 steps,1 node in dominate set //generator = new GridGenerator(); // only one result //generator = new HypercubeGenerator(); // one result //generator = new IncompleteGridGenerator(); // error //generator = new PetersenGraphGenerator(); // fix size //generator = new PointsOfInterestGenerator(); // error //generator = new RandomEuclideanGenerator(); // linear algo endless loop //generator = new RandomFixedDegreeDynamicGraphGenerator(); // //generator = new RandomGenerator(); // //generator = new URLGenerator("http://www.cnbeta.com"); // //generator = new WikipediaGenerator("Antarctica"); // no end //generator = new DorogovtsevMendesGenerator(); // ok //generator = new FlowerSnarkGenerator(); // ok //generator = new WattsStrogatzGenerator(maxSteps, 30, 0.5); // small world, ok //generator = new LobsterGenerator(); // tree like, ok int i; int n = 12; // the number of nodes if (cmd.hasOption("n")) { n = Integer.parseInt(cmd.getOptionValue("n")); } int d = 3; // the degree of nodes if (cmd.hasOption("d")) { d = Integer.parseInt(cmd.getOptionValue("d")); } boolean isFix = false; if (cmd.hasOption("f")) { isFix = true; } if ("".equals(genname)) { System.out.println("Error: not set generator name"); return; } else if ("fan1l".equals(genname)) { generator = new FanGenerator(); } else if ("fan2l".equals(genname)) { generator = new Fan2lGenerator(graph, d); } else if ("doro".equals(genname)) { generator = new DorogovtsevMendesGenerator(); } else if ("flower".equals(genname)) { generator = new FlowerSnarkGenerator(); } else if ("lobster".equals(genname)) { generator = new LobsterGenerator(); } else if ("rand".equals(genname)) { generator = new ConnectionGenerator(graph, d, false, isFix); } else if ("watt".equals(genname)) { // WattsStrogatzGenerator(n,k,beta) // a ring of n nodes // each node is connected to its k nearest neighbours, k must be even // n >> k >> log(n) >> 1 // beta being a probability it must be between 0 and 1. int k; double beta = 0.5; if (cmd.hasOption("p")) { beta = Double.parseDouble(cmd.getOptionValue("p")); } k = (n / 20) * 2; if (k < 2) { k = 2; } if (n < 2 * 6) { n = 2 * 6; } generator = new WattsStrogatzGenerator(n, k, beta); } /*int listf5[][] = { {12, 5}, {34, 5}, {102, 5}, {318, 5}, {1002, 5}, {3164, 5}, {10002, 5}, }; int listp3[][] = { {12, 2}, {34, 2}, {102, 3}, {318, 9}, {1002, 30}, {3164, 90}, {10002, 300}, }; int listp10[][] = { {12, 2}, {34, 3}, {102, 10}, {318, 32}, {1002, 100}, {3164, 316}, {10002, 1000}, }; i = 6; maxSteps = listf5[i][0]; int degree = listf5[i][1]; generator = new ConnectionGenerator(graph, degree, false, true); */ generator.addSink(graph); generator.begin(); for (i = 1; i < n; i++) { generator.nextEvents(); } generator.end(); delay(500); } if (cmd.hasOption("a")) { SinkAlgorithm algorithm = null; String algo = "rand"; algo = cmd.getOptionValue("a"); if ("ding".equals(algo)) { algorithm = new SelfStabilizingMWCDSLinear(); } else if ("ds".equals(algo)) { algorithm = new SelfStabilizingDSLinear(); } else { algorithm = new SelfStabilizingMWCDSRandom(); } algorithm.init(graph); algorithm.setSource("0"); if (delay_time > 0) { algorithm.setAnimationDelay(delay_time); } if (cmd.hasOption("u")) { algorithm.heuristicOn(true); } else { algorithm.heuristicOn(false); } algorithm.compute(); GraphVerificator verificator = new MWCDSGraphVerificator(); if (verificator.verify(graph)) { System.out.println("DEBUG: PASS MWCDSGraphVerificator verficiation."); } else { System.out.println("DEBUG: FAILED MWCDSGraphVerificator verficiation!"); } if (null != writer) { AlgorithmResult result = algorithm.getResult(); result.SaveTo(writer); try { writer.flush(); writer.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } algorithm.terminate(); } if (null != generator) { generator.removeSink(graph); } if (dgs != null) { graph.removeSink(dgs); try { dgs.end(); } catch (IOException e) { e.printStackTrace(); } } if (null != wrGraph) { try { saveGraph(graph, wrGraph); wrGraph.flush(); wrGraph.close(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); } } }
From source file:com.nextdoor.bender.S3SnsNotifier.java
public static void main(String[] args) throws ParseException, InterruptedException, IOException { formatter = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'").withZoneUTC(); /*//from w ww . j ava 2s . c om * Parse cli arguments */ Options options = new Options(); options.addOption(Option.builder().longOpt("bucket").hasArg().required() .desc("Name of S3 bucket to list s3 objects from").build()); options.addOption(Option.builder().longOpt("key-file").hasArg().required() .desc("Local file of S3 keys to process").build()); options.addOption( Option.builder().longOpt("sns-arn").hasArg().required().desc("SNS arn to publish to").build()); options.addOption(Option.builder().longOpt("throttle-ms").hasArg() .desc("Amount of ms to wait between publishing to SNS").build()); options.addOption(Option.builder().longOpt("processed-file").hasArg() .desc("Local file to use to store procssed S3 object names").build()); options.addOption(Option.builder().longOpt("skip-processed").hasArg(false) .desc("Whether to skip S3 objects that have been processed").build()); options.addOption( Option.builder().longOpt("dry-run").hasArg(false).desc("If set do not publish to SNS").build()); CommandLineParser parser = new DefaultParser(); CommandLine cmd = parser.parse(options, args); String bucket = cmd.getOptionValue("bucket"); String keyFile = cmd.getOptionValue("key-file"); String snsArn = cmd.getOptionValue("sns-arn"); String processedFile = cmd.getOptionValue("processed-file", null); boolean skipProcessed = cmd.hasOption("skip-processed"); dryRun = cmd.hasOption("dry-run"); long throttle = Long.parseLong(cmd.getOptionValue("throttle-ms", "-1")); if (processedFile != null) { File file = new File(processedFile); if (!file.exists()) { logger.debug("creating local file to store processed s3 object names: " + processedFile); file.createNewFile(); } } /* * Import S3 keys that have been processed */ if (skipProcessed && processedFile != null) { try (BufferedReader br = new BufferedReader(new FileReader(processedFile))) { String line; while ((line = br.readLine()) != null) { alreadyPublished.add(line.trim()); } } } /* * Setup writer for file containing processed S3 keys */ FileWriter fw = null; BufferedWriter bw = null; if (processedFile != null) { fw = new FileWriter(processedFile, true); bw = new BufferedWriter(fw); } /* * Create clients */ AmazonS3Client s3Client = new AmazonS3Client(); AmazonSNSClient snsClient = new AmazonSNSClient(); /* * Get S3 object list */ try (BufferedReader br = new BufferedReader(new FileReader(keyFile))) { String line; while ((line = br.readLine()) != null) { String key = line.trim(); if (alreadyPublished.contains(key)) { logger.info("skipping " + key); } ObjectMetadata om = s3Client.getObjectMetadata(bucket, key); S3EventNotification s3Notification = getS3Notification(key, bucket, om.getContentLength()); String json = s3Notification.toJson(); /* * Publish to SNS */ if (publish(snsArn, json, snsClient, key) && processedFile != null) { bw.write(key + "\n"); bw.flush(); } if (throttle != -1) { Thread.sleep(throttle); } } } if (processedFile != null) { bw.close(); fw.close(); } }
From source file:SDRecord.java
public static void main(String[] args) { boolean recordToInf = false; long recordTo = 0, txsize = 0, wr = 0, max = 0; int sourcePort = 0, destPort = 0; String val; OutputStream writer = null;/*w w w . j av a2 s. co m*/ InetAddress rhost = null, lhost = null; DatagramSocket socket = null; //Default values int buffSize = 1500; try { lhost = InetAddress.getByName("0.0.0.0"); } catch (UnknownHostException e1) { System.err.println("ERROR!: Host not reconized"); System.exit(3); } recordToInf = true; sourcePort = 7355; Options options = new Options(); options.addOption("m", true, "Minutes to record, default is no limit"); options.addOption("l", true, "Bind to a specific local address, default is 0.0.0.0"); options.addOption("p", true, "Local port to use, default is 7355"); options.addOption("r", true, "Remote address where to send data"); options.addOption("d", true, "Remote port, to use with -r option"); options.addOption("f", true, "Output file where to save the recording"); options.addOption("s", true, "Stop recording when reaching specified MBs"); options.addOption("h", false, "Help"); CommandLineParser parser = new DefaultParser(); CommandLine cmd = null; try { cmd = parser.parse(options, args); } catch (ParseException e1) { System.err.println("ERROR!: Error while parsing the command line"); System.exit(1); } if (cmd.hasOption("m")) { val = cmd.getOptionValue("m"); try { if (Long.parseLong(val) < 0) { System.err.println("ERROR!: -m argument value cannot be negative"); System.exit(3); } recordTo = System.currentTimeMillis() + (Long.parseLong(val) * 60000); recordToInf = false; } catch (NumberFormatException e) { System.err.println("ERROR!: -m argument not an integer"); System.exit(3); } } if (cmd.hasOption("l")) { val = cmd.getOptionValue("l"); try { lhost = InetAddress.getByName(val); } catch (UnknownHostException e) { System.err.println("ERROR!: Host not reconized"); System.exit(3); } } if (cmd.hasOption("p")) { val = cmd.getOptionValue("p"); try { sourcePort = Integer.parseInt(val); } catch (NumberFormatException e) { System.err.println("ERROR!: -p argument not an integer"); System.exit(3); } } if (cmd.hasOption("r")) { val = cmd.getOptionValue("r"); try { rhost = InetAddress.getByName(val); } catch (UnknownHostException e) { System.err.println("ERROR!: Host not reconized"); System.exit(3); } } if (cmd.hasOption("d")) { val = cmd.getOptionValue("d"); try { destPort = Integer.parseInt(val); } catch (NumberFormatException e) { System.err.println("-ERROR!: -d argument not an integer"); System.exit(3); } } if (cmd.hasOption("f")) { val = cmd.getOptionValue("f"); try { writer = new FileOutputStream(val); } catch (FileNotFoundException e) { System.err.println("ERROR!: File not found"); System.exit(3); } } if (cmd.hasOption("s")) { val = cmd.getOptionValue("s"); try { max = (long) (Double.parseDouble(val) * 1000000); } catch (NumberFormatException e) { System.err.println("ERROR!: -s argument not valid"); System.exit(3); } if (Double.parseDouble(val) < 0) { System.err.println("ERROR!: -s argument value cannot be negative"); System.exit(3); } } if (cmd.hasOption("h")) { HelpFormatter formatter = new HelpFormatter(); formatter.printHelp("SDRecord", options); System.exit(0); } try { socket = new DatagramSocket(sourcePort, lhost); //socket options socket.setReuseAddress(true); } catch (SocketException e) { e.printStackTrace(); System.exit(3); } byte[] buffer = new byte[buffSize]; DatagramPacket packet = new DatagramPacket(buffer, buffer.length); System.err.println("Listening " + lhost.toString() + " on port " + sourcePort); while (recordToInf == true || System.currentTimeMillis() <= recordTo) { //Stop recording when reaching max bytes if (max != 0 && txsize >= max) break; packet.setData(buffer); try { socket.receive(packet); } catch (IOException e) { e.printStackTrace(); System.exit(4); } //Ignoring packets with no data if (basicFilter(packet) == null) continue; if (writer == null && rhost == null) wr = recordToStdout(packet); if (writer != null) wr = recordToFile(packet, writer); if (rhost != null) wr = recordToSocket(packet, socket, rhost, destPort); txsize += wr; System.err .print("\r" + formatSize(txsize) + " transferred" + "\033[K" + "\t Press Ctrl+c to terminate"); } //closing socket and exit System.err.print("\r" + formatSize(txsize) + " transferred" + "\033[K"); socket.close(); System.out.println(); System.exit(0); }
From source file:fr.tpt.s3.mcdag.bench.MainBench.java
public static void main(String[] args) throws IOException, InterruptedException { // Command line options Options options = new Options(); Option input = new Option("i", "input", true, "MC-DAG XML models"); input.setRequired(true);//from w w w .java 2 s . c om input.setArgs(Option.UNLIMITED_VALUES); options.addOption(input); Option output = new Option("o", "output", true, "Folder where results have to be written."); output.setRequired(true); options.addOption(output); Option uUti = new Option("u", "utilization", true, "Utilization."); uUti.setRequired(true); options.addOption(uUti); Option output2 = new Option("ot", "output-total", true, "File where total results are being written"); output2.setRequired(true); options.addOption(output2); Option oCores = new Option("c", "cores", true, "Cores given to the test"); oCores.setRequired(true); options.addOption(oCores); Option oLvls = new Option("l", "levels", true, "Levels tested for the system"); oLvls.setRequired(true); options.addOption(oLvls); Option jobs = new Option("j", "jobs", true, "Number of threads to be launched."); jobs.setRequired(false); options.addOption(jobs); Option debug = new Option("d", "debug", false, "Debug logs."); debug.setRequired(false); options.addOption(debug); /* * Parsing of the command line */ CommandLineParser parser = new DefaultParser(); HelpFormatter formatter = new HelpFormatter(); CommandLine cmd; try { cmd = parser.parse(options, args); } catch (ParseException e) { System.err.println(e.getMessage()); formatter.printHelp("Benchmarks MultiDAG", options); System.exit(1); return; } String inputFilePath[] = cmd.getOptionValues("input"); String outputFilePath = cmd.getOptionValue("output"); String outputFilePathTotal = cmd.getOptionValue("output-total"); double utilization = Double.parseDouble(cmd.getOptionValue("utilization")); boolean boolDebug = cmd.hasOption("debug"); int nbLvls = Integer.parseInt(cmd.getOptionValue("levels")); int nbJobs = 1; int nbFiles = inputFilePath.length; if (cmd.hasOption("jobs")) nbJobs = Integer.parseInt(cmd.getOptionValue("jobs")); int nbCores = Integer.parseInt(cmd.getOptionValue("cores")); /* * While files need to be allocated * run the tests in the pool of threads */ // For dual-criticality systems we call a specific thread if (nbLvls == 2) { System.out.println(">>>>>>>>>>>>>>>>>>>>> NB levels " + nbLvls); int i_files2 = 0; String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.')) .concat("-schedulability.csv"); PrintWriter writer = new PrintWriter(outFile, "UTF-8"); writer.println( "Thread; File; FSched (%); FPreempts; FAct; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization"); writer.close(); ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs); while (i_files2 != nbFiles) { BenchThreadDualCriticality bt2 = new BenchThreadDualCriticality(inputFilePath[i_files2], outFile, nbCores, boolDebug); executor2.execute(bt2); i_files2++; } executor2.shutdown(); executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); int fedTotal = 0; int laxTotal = 0; int edfTotal = 0; int hybridTotal = 0; int fedPreempts = 0; int laxPreempts = 0; int edfPreempts = 0; int hybridPreempts = 0; int fedActiv = 0; int laxActiv = 0; int edfActiv = 0; int hybridActiv = 0; // Read lines in file and do average int i = 0; File f = new File(outFile); @SuppressWarnings("resource") Scanner line = new Scanner(f); while (line.hasNextLine()) { String s = line.nextLine(); if (i > 0) { // To skip the first line try (Scanner inLine = new Scanner(s).useDelimiter("; ")) { int j = 0; while (inLine.hasNext()) { String val = inLine.next(); if (j == 2) { fedTotal += Integer.parseInt(val); } else if (j == 3) { fedPreempts += Integer.parseInt(val); } else if (j == 4) { fedActiv += Integer.parseInt(val); } else if (j == 5) { laxTotal += Integer.parseInt(val); } else if (j == 6) { laxPreempts += Integer.parseInt(val); } else if (j == 7) { laxActiv += Integer.parseInt(val); } else if (j == 8) { edfTotal += Integer.parseInt(val); } else if (j == 9) { edfPreempts += Integer.parseInt(val); } else if (j == 10) { edfActiv += Integer.parseInt(val); } else if (j == 11) { hybridTotal += Integer.parseInt(val); } else if (j == 12) { hybridPreempts += Integer.parseInt(val); } else if (j == 13) { hybridActiv += Integer.parseInt(val); } j++; } } } i++; } // Write percentage double fedPerc = (double) fedTotal / nbFiles; double laxPerc = (double) laxTotal / nbFiles; double edfPerc = (double) edfTotal / nbFiles; double hybridPerc = (double) hybridTotal / nbFiles; double fedPercPreempts = (double) fedPreempts / fedActiv; double laxPercPreempts = (double) laxPreempts / laxActiv; double edfPercPreempts = (double) edfPreempts / edfActiv; double hybridPercPreempts = (double) hybridPreempts / hybridActiv; Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true)); wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + fedPerc + "; " + fedPreempts + "; " + fedActiv + "; " + fedPercPreempts + "; " + laxPerc + "; " + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; " + hybridActiv + "; " + hybridPercPreempts + "\n"); wOutput.close(); } else if (nbLvls > 2) { int i_files2 = 0; String outFile = outputFilePath.substring(0, outputFilePath.lastIndexOf('.')) .concat("-schedulability.csv"); PrintWriter writer = new PrintWriter(outFile, "UTF-8"); writer.println( "Thread; File; LSched (%); LPreempts; LAct; ESched (%); EPreempts; EAct; HSched(%); HPreempts; HAct; Utilization"); writer.close(); ExecutorService executor2 = Executors.newFixedThreadPool(nbJobs); while (i_files2 != nbFiles) { BenchThreadNLevels bt2 = new BenchThreadNLevels(inputFilePath[i_files2], outFile, nbCores, boolDebug); executor2.execute(bt2); i_files2++; } executor2.shutdown(); executor2.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS); int laxTotal = 0; int edfTotal = 0; int hybridTotal = 0; int laxPreempts = 0; int edfPreempts = 0; int hybridPreempts = 0; int laxActiv = 0; int edfActiv = 0; int hybridActiv = 0; // Read lines in file and do average int i = 0; File f = new File(outFile); @SuppressWarnings("resource") Scanner line = new Scanner(f); while (line.hasNextLine()) { String s = line.nextLine(); if (i > 0) { // To skip the first line try (Scanner inLine = new Scanner(s).useDelimiter("; ")) { int j = 0; while (inLine.hasNext()) { String val = inLine.next(); if (j == 2) { laxTotal += Integer.parseInt(val); } else if (j == 3) { laxPreempts += Integer.parseInt(val); } else if (j == 4) { laxActiv += Integer.parseInt(val); } else if (j == 5) { edfTotal += Integer.parseInt(val); } else if (j == 6) { edfPreempts += Integer.parseInt(val); } else if (j == 7) { edfActiv += Integer.parseInt(val); } else if (j == 8) { hybridTotal += Integer.parseInt(val); } else if (j == 9) { hybridPreempts += Integer.parseInt(val); } else if (j == 10) { hybridActiv += Integer.parseInt(val); } j++; } } } i++; } // Write percentage double laxPerc = (double) laxTotal / nbFiles; double edfPerc = (double) edfTotal / nbFiles; double hybridPerc = (double) hybridTotal / nbFiles; double laxPercPreempts = (double) laxPreempts / laxActiv; double edfPercPreempts = (double) edfPreempts / edfActiv; double hybridPercPreempts = (double) hybridPreempts / hybridActiv; Writer wOutput = new BufferedWriter(new FileWriter(outputFilePathTotal, true)); wOutput.write(Thread.currentThread().getName() + "; " + utilization + "; " + laxPerc + "; " + laxPreempts + "; " + laxActiv + "; " + laxPercPreempts + "; " + edfPerc + "; " + edfPreempts + "; " + edfActiv + "; " + edfPercPreempts + "; " + hybridPerc + "; " + hybridPreempts + "; " + hybridActiv + "; " + hybridPercPreempts + "\n"); wOutput.close(); } else { System.err.println("Wrong number of levels"); System.exit(-1); } System.out.println("[BENCH Main] Done benchmarking U = " + utilization + " Levels " + nbLvls); }