List of usage examples for java.lang IllegalArgumentException IllegalArgumentException
public IllegalArgumentException(Throwable cause)
From source file:org.elasticsearch.discovery.ec2.AmazonEC2Fixture.java
public static void main(String[] args) throws Exception { if (args == null || args.length != 2) { throw new IllegalArgumentException("AmazonEC2Fixture <working directory> <nodes transport uri file>"); }/*from w w w . ja v a 2s .c o m*/ final AmazonEC2Fixture fixture = new AmazonEC2Fixture(args[0], args[1]); fixture.listen(); }
From source file:org.envirocar.harvest.DummyTrackCreator.java
public static void main(String[] args) throws IOException { String consumerUrl = null;//w w w. j a va 2s . co m if (args != null && args.length > 0) { consumerUrl = args[0].trim(); } else { throw new IllegalArgumentException("consumerUrl needs to be provided"); } new DummyTrackCreator(consumerUrl).publishTracks(); }
From source file:edu.vt.middleware.cas.ldap.LoadDriver.java
public static void main(final String[] args) { if (args.length < 4) { System.out.println("USAGE: LoadDriver sample_count thread_count " + "path/to/credentials.csv path/to/spring-context.xml"); return;// w w w .ja v a2s . com } final int samples = Integer.parseInt(args[0]); final int threads = Integer.parseInt(args[1]); final File credentials = new File(args[2]); if (!credentials.exists()) { throw new IllegalArgumentException(credentials + " does not exist."); } ApplicationContext context; try { context = new ClassPathXmlApplicationContext(args[3]); } catch (BeanDefinitionStoreException e) { if (e.getCause() instanceof FileNotFoundException) { // Try treating path as filesystem path context = new FileSystemXmlApplicationContext(args[3]); } else { throw e; } } final LoadDriver driver = new LoadDriver(samples, threads, credentials, context); System.err.println("Load test configuration:"); System.err.println("\tthreads: " + threads); System.err.println("\tsamples: " + samples); System.err.println("\tcredentials: " + credentials); driver.start(); while (driver.getState().hasWorkRemaining()) { try { Thread.sleep(1000); } catch (InterruptedException e) { } } driver.stop(); }
From source file:com.github.zerkseez.codegen.wrappergenerator.Main.java
public static void main(final String[] args) throws Exception { final Options options = new Options(); options.addOption(Option.builder().longOpt("outputDirectory").hasArg().required().build()); options.addOption(Option.builder().longOpt("classMappings").hasArgs().required().build()); final CommandLineParser parser = new DefaultParser(); try {/* w ww. j av a 2s.com*/ final CommandLine line = parser.parse(options, args); final String outputDirectory = line.getOptionValue("outputDirectory"); final String[] classMappings = line.getOptionValues("classMappings"); for (String classMapping : classMappings) { final String[] tokens = classMapping.split(":"); if (tokens.length != 2) { throw new IllegalArgumentException( String.format("Invalid class mapping format \"%s\"", classMapping)); } final Class<?> wrappeeClass = Class.forName(tokens[0]); final String fullWrapperClassName = tokens[1]; final int indexOfLastDot = fullWrapperClassName.lastIndexOf('.'); final String wrapperPackageName = (indexOfLastDot == -1) ? "" : fullWrapperClassName.substring(0, indexOfLastDot); final String simpleWrapperClassName = (indexOfLastDot == -1) ? fullWrapperClassName : fullWrapperClassName.substring(indexOfLastDot + 1); System.out.println(String.format("Generating wrapper class for %s...", wrappeeClass)); final WrapperGenerator generator = new WrapperGenerator(wrappeeClass, wrapperPackageName, simpleWrapperClassName); generator.writeTo(outputDirectory, true); } System.out.println("Done"); } catch (MissingOptionException e) { final HelpFormatter formatter = new HelpFormatter(); formatter.printHelp(String.format("java -cp CLASSPATH %s", Main.class.getName()), options); } }
From source file:hdfs.MiniHDFS.java
public static void main(String[] args) throws Exception { if (args.length != 1 && args.length != 3) { throw new IllegalArgumentException( "Expected: MiniHDFS <baseDirectory> [<kerberosPrincipal> <kerberosKeytab>], " + "got: " + Arrays.toString(args)); }/* w w w.jav a2 s . co m*/ boolean secure = args.length == 3; // configure Paths Path baseDir = Paths.get(args[0]); // hadoop-home/, so logs will not complain if (System.getenv("HADOOP_HOME") == null) { Path hadoopHome = baseDir.resolve("hadoop-home"); Files.createDirectories(hadoopHome); System.setProperty("hadoop.home.dir", hadoopHome.toAbsolutePath().toString()); } // hdfs-data/, where any data is going Path hdfsHome = baseDir.resolve("hdfs-data"); // configure cluster Configuration cfg = new Configuration(); cfg.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, hdfsHome.toAbsolutePath().toString()); // lower default permission: TODO: needed? cfg.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_PERMISSION_KEY, "766"); // optionally configure security if (secure) { String kerberosPrincipal = args[1]; String keytabFile = args[2]; cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); cfg.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION, "true"); cfg.set(DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_DATANODE_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY, kerberosPrincipal); cfg.set(DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY, keytabFile); cfg.set(DFSConfigKeys.DFS_DATANODE_KEYTAB_FILE_KEY, keytabFile); cfg.set(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, "true"); cfg.set(DFSConfigKeys.DFS_BLOCK_ACCESS_TOKEN_ENABLE_KEY, "true"); cfg.set(DFSConfigKeys.IGNORE_SECURE_PORTS_FOR_TESTING_KEY, "true"); } UserGroupInformation.setConfiguration(cfg); // TODO: remove hardcoded port! MiniDFSCluster.Builder builder = new MiniDFSCluster.Builder(cfg); if (secure) { builder.nameNodePort(9998); } else { builder.nameNodePort(9999); } MiniDFSCluster dfs = builder.build(); // Configure contents of the filesystem org.apache.hadoop.fs.Path esUserPath = new org.apache.hadoop.fs.Path("/user/elasticsearch"); try (FileSystem fs = dfs.getFileSystem()) { // Set the elasticsearch user directory up fs.mkdirs(esUserPath); if (UserGroupInformation.isSecurityEnabled()) { List<AclEntry> acls = new ArrayList<>(); acls.add(new AclEntry.Builder().setType(AclEntryType.USER).setName("elasticsearch") .setPermission(FsAction.ALL).build()); fs.modifyAclEntries(esUserPath, acls); } // Install a pre-existing repository into HDFS String directoryName = "readonly-repository"; String archiveName = directoryName + ".tar.gz"; URL readOnlyRepositoryArchiveURL = MiniHDFS.class.getClassLoader().getResource(archiveName); if (readOnlyRepositoryArchiveURL != null) { Path tempDirectory = Files.createTempDirectory(MiniHDFS.class.getName()); File readOnlyRepositoryArchive = tempDirectory.resolve(archiveName).toFile(); FileUtils.copyURLToFile(readOnlyRepositoryArchiveURL, readOnlyRepositoryArchive); FileUtil.unTar(readOnlyRepositoryArchive, tempDirectory.toFile()); fs.copyFromLocalFile(true, true, new org.apache.hadoop.fs.Path( tempDirectory.resolve(directoryName).toAbsolutePath().toUri()), esUserPath.suffix("/existing/" + directoryName)); FileUtils.deleteDirectory(tempDirectory.toFile()); } } // write our PID file Path tmp = Files.createTempFile(baseDir, null, null); String pid = ManagementFactory.getRuntimeMXBean().getName().split("@")[0]; Files.write(tmp, pid.getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PID_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); // write our port file tmp = Files.createTempFile(baseDir, null, null); Files.write(tmp, Integer.toString(dfs.getNameNodePort()).getBytes(StandardCharsets.UTF_8)); Files.move(tmp, baseDir.resolve(PORT_FILE_NAME), StandardCopyOption.ATOMIC_MOVE); }
From source file:com.schnobosoft.semeval.cortical.PrintCorrelations.java
public static void main(String[] args) throws IOException { File inputFile;/*from w w w . j ava 2 s.c o m*/ Retina retinaName; if (args.length > 0) { inputFile = new File(args[0]); retinaName = (args.length > 1 && args[1].toLowerCase().startsWith("syn")) ? Retina.EN_SYNONYMOUS : DEFAULT_RETINA_NAME; } else { throw new IllegalArgumentException( "Call: " + PrintCorrelations.class.getCanonicalName() + " <input file> [<syn>]"); } LOG.info("Using Retina " + retinaName.name().toLowerCase()); // printCorrelations(inputFile); saveCorrelations(inputFile); }
From source file:com.kolich.aws.SQSTest.java
public static void main(String[] args) throws Exception { final String key = System.getProperty(AWS_ACCESS_KEY_PROPERTY); final String secret = System.getProperty(AWS_SECRET_PROPERTY); if (key == null || secret == null) { throw new IllegalArgumentException("You are missing the " + "-Daws.key and -Daws.secret required VM " + "properties on your command line."); }/* w ww .jav a 2s .c o m*/ final HttpClient client = KolichHttpClientFactory.getNewInstanceNoProxySelector(); final SQSClient sqs = new KolichSQSClient(client, key, secret); URI queueURI = null; final Either<HttpFailure, CreateQueueResult> create = sqs.createQueue("----__________"); if (create.success()) { System.out.println("Created queue successfully: " + create.right().getQueueUrl()); queueURI = URI.create(create.right().getQueueUrl()); } else { System.err.println("Failed to create queue: " + create.left().getStatusCode()); } final Either<HttpFailure, ListQueuesResult> list = sqs.listQueues(); if (list.success()) { for (final String queueUrl : list.right().getQueueUrls()) { System.out.println("Queue: " + queueUrl); } } else { System.err.println("Listing queues failed."); } for (int i = 0; i < 5; i++) { final Either<HttpFailure, SendMessageResult> send = sqs.sendMessage(queueURI, "test message: " + ISO8601DateFormat.format(new Date())); if (send.success()) { System.out.println("Sent message [" + i + "]: " + send.right().getMessageId()); } else { System.err.println("Failed to send message."); } } for (int fetched = 0, error = 0; fetched < 5 && error == 0;) { final Either<HttpFailure, ReceiveMessageResult> messages = sqs.receiveMessage(queueURI, 10, 5); if (messages.success()) { fetched += messages.right().getMessages().size(); System.out.println("Loaded " + messages.right().getMessages().size() + " messages."); for (final Message m : messages.right().getMessages()) { System.out.println("Message [" + m.getMessageId() + "]: " + m.getBody()); final Option<HttpFailure> deleteMsg = sqs.deleteMessage(queueURI, m.getReceiptHandle()); if (deleteMsg.isNone()) { System.out.println("Deleted message [" + m.getMessageId() + "]"); } else { System.err.println("Failed to delete message: " + m.getReceiptHandle()); } } } else { error = 1; System.err.println("Loading messages failed."); } } System.out.println("No messages should be on queue... long poll waiting!"); final Either<HttpFailure, ReceiveMessageResult> lp = sqs.receiveMessage(queueURI, 20, 5); if (lp.success()) { System.out.println("Long poll finished waiting successfully."); } else { System.err.println("Failed to long poll wait."); } final Option<HttpFailure> delete = sqs.deleteQueue(queueURI); if (delete.isNone()) { System.out.println("Deleted queue successfully: " + queueURI); } else { System.err.println("Deletion of queue failed: " + queueURI); } }
From source file:net.sf.jodreports.cli.CreateDocument.java
public static void main(String[] args) throws Exception { if (args.length < 3) { System.err.println("USAGE: " + CreateDocument.class.getName() + " <template-document> <data-file> <output-document>"); System.exit(0);//from w ww .j ava2s . c o m } File templateFile = new File(args[0]); File dataFile = new File(args[1]); File outputFile = new File(args[2]); DocumentTemplateFactory documentTemplateFactory = new DocumentTemplateFactory(); DocumentTemplate template = documentTemplateFactory.getTemplate(templateFile); Object model = null; String dataFileExtension = FilenameUtils.getExtension(dataFile.getName()); if (dataFileExtension.equals("xml")) { model = NodeModel.parse(dataFile); } else if (dataFileExtension.equals("properties")) { Properties properties = new Properties(); properties.load(new FileInputStream(dataFile)); model = properties; } else { throw new IllegalArgumentException( "data file must be 'xml' or 'properties'; unsupported type: " + dataFileExtension); } template.createDocument(model, new FileOutputStream(outputFile)); }
From source file:edu.unc.irss.arc.de.dvpublisher.DataverseClient.java
public static void main(String[] args) throws MalformedURLException { if (args.length != 5) { logger.log(Level.SEVERE,// ww w.j av a 2 s . c om "Three arguments: dataverse_URL, Api_key, dataverse_Alias are expected dataset_Id file_location"); throw new IllegalArgumentException("The number of arguments must be 5."); } for (String arg : args) { logger.log(Level.INFO, "arg={0}", arg); } if (StringUtils.isBlank(args[0])) { logger.log(Level.SEVERE, "dataverse URL should not be blank"); throw new IllegalArgumentException("dataverse URL should not be blank"); } if (StringUtils.isBlank(args[1])) { logger.log(Level.SEVERE, "API key should not be blank"); throw new IllegalArgumentException("API Key should not be blank"); } if (StringUtils.isBlank(args[2])) { logger.log(Level.SEVERE, "dataverse alias should not be blank"); throw new IllegalArgumentException("dataverse alias should not be blank"); } if (StringUtils.isBlank(args[3])) { logger.log(Level.SEVERE, "dataset Id should not be blank"); throw new IllegalArgumentException("dataset Id should not be blank"); } if (StringUtils.isBlank(args[4])) { logger.log(Level.SEVERE, "file location should not be blank"); throw new IllegalArgumentException("file location should not be blank"); } logger.log(Level.INFO, "running main method"); logger.log(Level.INFO, "dataverseUrl:{0}", args[0]); logger.log(Level.INFO, "apiKey:{0}", args[1]); logger.log(Level.INFO, "dataverseAlias:{0}", args[2]); logger.log(Level.INFO, "datasetId:{0}", args[3]); logger.log(Level.INFO, "fileLocation:{0}", args[4]); DataverseClient dvClient = new DataverseClient(args[0], args[1], args[2]); logger.log(Level.INFO, "uploading a file to a target dataverse"); dvClient.publishDatafile(args[3], args[4]); logger.log(Level.INFO, "uploading has been finished"); }
From source file:com.github.fritaly.svngraph.SvnGraph.java
public static void main(String[] args) throws Exception { if (args.length != 2) { System.out.println(String.format("%s <input-file> <output-file>", SvnGraph.class.getSimpleName())); System.exit(1);/*from w ww .j ava2 s .c om*/ } final File input = new File(args[0]); if (!input.exists()) { throw new IllegalArgumentException( String.format("The given file '%s' doesn't exist", input.getAbsolutePath())); } final File output = new File(args[1]); final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder().parse(input); final History history = new History(document); final Set<String> rootPaths = history.getRootPaths(); System.out.println(rootPaths); for (String path : rootPaths) { System.out.println(path); System.out.println(history.getHistory(path).getRevisions()); System.out.println(); } int count = 0; FileWriter fileWriter = null; GraphMLWriter graphWriter = null; try { fileWriter = new FileWriter(output); graphWriter = new GraphMLWriter(fileWriter); final NodeStyle tagStyle = graphWriter.getNodeStyle(); tagStyle.setFillColor(Color.WHITE); graphWriter.graph(); // map associating node labels to their corresponding node id in the graph final Map<String, String> nodeIdsPerLabel = new TreeMap<>(); // the node style associated to each branch final Map<String, NodeStyle> nodeStyles = new TreeMap<>(); for (Revision revision : history.getSignificantRevisions()) { System.out.println(revision.getNumber() + " - " + revision.getMessage()); // TODO Render also the deletion of branches // there should be only 1 significant update per revision (the one with action ADD) for (Update update : revision.getSignificantUpdates()) { if (update.isCopy()) { // a merge is also considered a copy final RevisionPath source = update.getCopySource(); System.out.println(String.format(" > %s %s from %s@%d", update.getAction(), update.getPath(), source.getPath(), source.getRevision())); final String sourceRoot = Utils.getRootName(source.getPath()); if (sourceRoot == null) { // skip the revisions whose associated root is // null (happens whether a branch was created // outside the 'branches' directory for // instance) System.err.println(String.format("Skipped revision %d because of a null root", source.getRevision())); continue; } final String sourceLabel = computeNodeLabel(sourceRoot, source.getRevision()); // create a node for the source (path, revision) final String sourceId; if (nodeIdsPerLabel.containsKey(sourceLabel)) { // retrieve the id of the existing node sourceId = nodeIdsPerLabel.get(sourceLabel); } else { // create the new node if (Utils.isTagPath(source.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(sourceRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(sourceRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(sourceRoot)); } sourceId = graphWriter.node(sourceLabel); nodeIdsPerLabel.put(sourceLabel, sourceId); } // and another for the newly created directory final String targetRoot = Utils.getRootName(update.getPath()); if (targetRoot == null) { System.err.println(String.format("Skipped revision %d because of a null root", revision.getNumber())); continue; } final String targetLabel = computeNodeLabel(targetRoot, revision.getNumber()); if (Utils.isTagPath(update.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(targetRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(targetRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(targetRoot)); } final String targetId; if (nodeIdsPerLabel.containsKey(targetLabel)) { // retrieve the id of the existing node targetId = nodeIdsPerLabel.get(targetLabel); } else { // create the new node if (Utils.isTagPath(update.getPath())) { graphWriter.setNodeStyle(tagStyle); } else { if (!nodeStyles.containsKey(targetRoot)) { final NodeStyle style = new NodeStyle(); style.setFillColor(randomColor()); nodeStyles.put(targetRoot, style); } graphWriter.setNodeStyle(nodeStyles.get(targetRoot)); } targetId = graphWriter.node(targetLabel); nodeIdsPerLabel.put(targetLabel, targetId); } // create an edge between the 2 nodes graphWriter.edge(sourceId, targetId); } else { System.out.println(String.format(" > %s %s", update.getAction(), update.getPath())); } } System.out.println(); count++; } // Dispatch the revisions per corresponding branch final Map<String, Set<Long>> revisionsPerBranch = new TreeMap<>(); for (String nodeLabel : nodeIdsPerLabel.keySet()) { if (nodeLabel.contains("@")) { final String branchName = StringUtils.substringBefore(nodeLabel, "@"); final long revision = Long.parseLong(StringUtils.substringAfter(nodeLabel, "@")); if (!revisionsPerBranch.containsKey(branchName)) { revisionsPerBranch.put(branchName, new TreeSet<Long>()); } revisionsPerBranch.get(branchName).add(revision); } else { throw new IllegalStateException(nodeLabel); } } // Recreate the missing edges between revisions from a same branch for (String branchName : revisionsPerBranch.keySet()) { final List<Long> branchRevisions = new ArrayList<>(revisionsPerBranch.get(branchName)); for (int i = 0; i < branchRevisions.size() - 1; i++) { final String nodeLabel1 = String.format("%s@%d", branchName, branchRevisions.get(i)); final String nodeLabel2 = String.format("%s@%d", branchName, branchRevisions.get(i + 1)); graphWriter.edge(nodeIdsPerLabel.get(nodeLabel1), nodeIdsPerLabel.get(nodeLabel2)); } } graphWriter.closeGraph(); System.out.println(String.format("Found %d significant revisions", count)); } finally { if (graphWriter != null) { graphWriter.close(); } if (fileWriter != null) { fileWriter.close(); } } System.out.println("Done"); }