Example usage for org.apache.commons.cli Options Options

List of usage examples for org.apache.commons.cli Options Options

Introduction

In this page you can find the example usage for org.apache.commons.cli Options Options.

Prototype

Options

Source Link

Usage

From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryThrownExceptionsPerPackage.java

public static void main(String[] args) {
    Options options = new Options();

    Option inputOpt = OptionBuilder.create(IN);
    inputOpt.setArgName("INPUT");
    inputOpt.setDescription("Input Kyanos resource directory");
    inputOpt.setArgs(1);//from www .ja  v a 2s .  c om
    inputOpt.setRequired(true);

    Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS);
    inClassOpt.setArgName("CLASS");
    inClassOpt.setDescription("FQN of EPackage implementation class");
    inClassOpt.setArgs(1);
    inClassOpt.setRequired(true);

    Option optFileOpt = OptionBuilder.create(OPTIONS_FILE);
    optFileOpt.setArgName("FILE");
    optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource");
    optFileOpt.setArgs(1);

    options.addOption(inputOpt);
    options.addOption(inClassOpt);
    options.addOption(optFileOpt);

    CommandLineParser parser = new PosixParser();

    try {
        PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME,
                new BlueprintsPersistenceBackendFactory());

        CommandLine commandLine = parser.parse(options, args);

        URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN)));

        Class<?> inClazz = KyanosGraphQueryThrownExceptionsPerPackage.class.getClassLoader()
                .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS));
        inClazz.getMethod("init").invoke(null);

        ResourceSet resourceSet = new ResourceSetImpl();
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap()
                .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE);

        Resource resource = resourceSet.createResource(uri);

        Map<String, Object> loadOpts = new HashMap<String, Object>();

        if (commandLine.hasOption(OPTIONS_FILE)) {
            Properties properties = new Properties();
            properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE))));
            for (final Entry<Object, Object> entry : properties.entrySet()) {
                loadOpts.put((String) entry.getKey(), (String) entry.getValue());
            }
        }
        resource.load(loadOpts);
        {
            LOG.log(Level.INFO, "Start query");
            long begin = System.currentTimeMillis();
            HashMap<String, EList<TypeAccess>> list = JavaQueries.getThrownExceptionsPerPackage(resource);
            long end = System.currentTimeMillis();
            LOG.log(Level.INFO, "End query");
            LOG.log(Level.INFO, MessageFormat.format("Query result contains {0} elements", list.size()));
            LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin)));
        }

        if (resource instanceof PersistentResourceImpl) {
            PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource);
        } else {
            resource.unload();
        }

    } catch (ParseException e) {
        MessageUtil.showError(e.toString());
        MessageUtil.showError("Current arguments: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    } catch (Throwable e) {
        MessageUtil.showError(e.toString());
    }
}

From source file:it.anyplace.sync.client.Main.java

public static void main(String[] args) throws Exception {
    Options options = new Options();
    options.addOption("C", "set-config", true, "set config file for s-client");
    options.addOption("c", "config", false, "dump config");
    options.addOption("sp", "set-peers", true, "set peer, or comma-separated list of peers");
    options.addOption("q", "query", true, "query directory server for device id");
    options.addOption("d", "discovery", true, "discovery local network for device id");
    options.addOption("p", "pull", true, "pull file from network");
    options.addOption("P", "push", true, "push file to network");
    options.addOption("o", "output", true, "set output file/directory");
    options.addOption("i", "input", true, "set input file/directory");
    options.addOption("lp", "list-peers", false, "list peer addresses");
    options.addOption("a", "address", true, "use this peer addresses");
    options.addOption("L", "list-remote", false, "list folder (root) content from network");
    options.addOption("I", "list-info", false, "dump folder info from network");
    options.addOption("li", "list-info", false, "list folder info from local db");
    //        options.addOption("l", "list-local", false, "list folder content from local (saved) index");
    options.addOption("s", "search", true, "search local index for <term>");
    options.addOption("D", "delete", true, "push delete to network");
    options.addOption("M", "mkdir", true, "push directory create to network");
    options.addOption("h", "help", false, "print help");
    CommandLineParser parser = new DefaultParser();
    CommandLine cmd = parser.parse(options, args);

    if (cmd.hasOption("h")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("s-client", options);
        return;//from  ww  w  . ja va2 s.c  om
    }

    File configFile = cmd.hasOption("C") ? new File(cmd.getOptionValue("C"))
            : new File(System.getProperty("user.home"), ".s-client.properties");
    logger.info("using config file = {}", configFile);
    ConfigurationService configuration = ConfigurationService.newLoader().loadFrom(configFile);
    FileUtils.cleanDirectory(configuration.getTemp());
    KeystoreHandler.newLoader().loadAndStore(configuration);
    if (cmd.hasOption("c")) {
        logger.info("configuration =\n{}", configuration.newWriter().dumpToString());
    } else {
        logger.trace("configuration =\n{}", configuration.newWriter().dumpToString());
    }
    logger.debug("{}", configuration.getStorageInfo().dumpAvailableSpace());

    if (cmd.hasOption("sp")) {
        List<String> peers = Lists.newArrayList(Lists.transform(
                Arrays.<String>asList(cmd.getOptionValue("sp").split(",")), new Function<String, String>() {
                    @Override
                    public String apply(String input) {
                        return input.trim();
                    }
                }));
        logger.info("set peers = {}", peers);
        configuration.edit().setPeers(Collections.<DeviceInfo>emptyList());
        for (String peer : peers) {
            KeystoreHandler.validateDeviceId(peer);
            configuration.edit().addPeers(new DeviceInfo(peer, null));
        }
        configuration.edit().persistNow();
    }

    if (cmd.hasOption("q")) {
        String deviceId = cmd.getOptionValue("q");
        logger.info("query device id = {}", deviceId);
        List<DeviceAddress> deviceAddresses = new GlobalDiscoveryHandler(configuration).query(deviceId);
        logger.info("server response = {}", deviceAddresses);
    }
    if (cmd.hasOption("d")) {
        String deviceId = cmd.getOptionValue("d");
        logger.info("discovery device id = {}", deviceId);
        List<DeviceAddress> deviceAddresses = new LocalDiscorveryHandler(configuration).queryAndClose(deviceId);
        logger.info("local response = {}", deviceAddresses);
    }

    if (cmd.hasOption("p")) {
        String path = cmd.getOptionValue("p");
        logger.info("file path = {}", path);
        String folder = path.split(":")[0];
        path = path.split(":")[1];
        try (SyncthingClient client = new SyncthingClient(configuration);
                BlockExchangeConnectionHandler connectionHandler = client.connectToBestPeer()) {
            InputStream inputStream = client.pullFile(connectionHandler, folder, path).waitForComplete()
                    .getInputStream();
            String fileName = client.getIndexHandler().getFileInfoByPath(folder, path).getFileName();
            File file;
            if (cmd.hasOption("o")) {
                File param = new File(cmd.getOptionValue("o"));
                file = param.isDirectory() ? new File(param, fileName) : param;
            } else {
                file = new File(fileName);
            }
            FileUtils.copyInputStreamToFile(inputStream, file);
            logger.info("saved file to = {}", file.getAbsolutePath());
        }
    }
    if (cmd.hasOption("P")) {
        String path = cmd.getOptionValue("P");
        File file = new File(cmd.getOptionValue("i"));
        checkArgument(!path.startsWith("/")); //TODO check path syntax
        logger.info("file path = {}", path);
        String folder = path.split(":")[0];
        path = path.split(":")[1];
        try (SyncthingClient client = new SyncthingClient(configuration);
                BlockPusher.FileUploadObserver fileUploadObserver = client.pushFile(new FileInputStream(file),
                        folder, path)) {
            while (!fileUploadObserver.isCompleted()) {
                fileUploadObserver.waitForProgressUpdate();
                logger.debug("upload progress {}", fileUploadObserver.getProgressMessage());
            }
            logger.info("uploaded file to network");
        }
    }
    if (cmd.hasOption("D")) {
        String path = cmd.getOptionValue("D");
        String folder = path.split(":")[0];
        path = path.split(":")[1];
        logger.info("delete path = {}", path);
        try (SyncthingClient client = new SyncthingClient(configuration);
                IndexEditObserver observer = client.pushDelete(folder, path)) {
            observer.waitForComplete();
            logger.info("deleted path");
        }
    }
    if (cmd.hasOption("M")) {
        String path = cmd.getOptionValue("M");
        String folder = path.split(":")[0];
        path = path.split(":")[1];
        logger.info("dir path = {}", path);
        try (SyncthingClient client = new SyncthingClient(configuration);
                IndexEditObserver observer = client.pushDir(folder, path)) {
            observer.waitForComplete();
            logger.info("uploaded dir to network");
        }
    }
    if (cmd.hasOption("L")) {
        try (SyncthingClient client = new SyncthingClient(configuration)) {
            client.waitForRemoteIndexAquired();
            for (String folder : client.getIndexHandler().getFolderList()) {
                try (IndexBrowser indexBrowser = client.getIndexHandler().newIndexBrowserBuilder()
                        .setFolder(folder).build()) {
                    logger.info("list folder = {}", indexBrowser.getFolder());
                    for (FileInfo fileInfo : indexBrowser.listFiles()) {
                        logger.info("\t\t{} {} {}", fileInfo.getType().name().substring(0, 1),
                                fileInfo.getPath(), fileInfo.describeSize());
                    }
                }
            }
        }
    }
    if (cmd.hasOption("I")) {
        try (SyncthingClient client = new SyncthingClient(configuration)) {
            if (cmd.hasOption("a")) {
                String deviceId = cmd.getOptionValue("a").substring(0, 63),
                        address = cmd.getOptionValue("a").substring(64);
                try (BlockExchangeConnectionHandler connection = client.getConnection(
                        DeviceAddress.newBuilder().setDeviceId(deviceId).setAddress(address).build())) {
                    client.getIndexHandler().waitForRemoteIndexAquired(connection);
                }
            } else {
                client.waitForRemoteIndexAquired();
            }
            String folderInfo = "";
            for (String folder : client.getIndexHandler().getFolderList()) {
                folderInfo += "\n\t\tfolder info : " + client.getIndexHandler().getFolderInfo(folder);
                folderInfo += "\n\t\tfolder stats : "
                        + client.getIndexHandler().newFolderBrowser().getFolderStats(folder).dumpInfo() + "\n";
            }
            logger.info("folders:\n{}\n", folderInfo);
        }
    }
    if (cmd.hasOption("li")) {
        try (SyncthingClient client = new SyncthingClient(configuration)) {
            String folderInfo = "";
            for (String folder : client.getIndexHandler().getFolderList()) {
                folderInfo += "\n\t\tfolder info : " + client.getIndexHandler().getFolderInfo(folder);
                folderInfo += "\n\t\tfolder stats : "
                        + client.getIndexHandler().newFolderBrowser().getFolderStats(folder).dumpInfo() + "\n";
            }
            logger.info("folders:\n{}\n", folderInfo);
        }
    }
    if (cmd.hasOption("lp")) {
        try (SyncthingClient client = new SyncthingClient(configuration);
                DeviceAddressSupplier deviceAddressSupplier = client.getDiscoveryHandler()
                        .newDeviceAddressSupplier()) {
            String deviceAddressesStr = "";
            for (DeviceAddress deviceAddress : Lists.newArrayList(deviceAddressSupplier)) {
                deviceAddressesStr += "\n\t\t" + deviceAddress.getDeviceId() + " : "
                        + deviceAddress.getAddress();
            }
            logger.info("device addresses:\n{}\n", deviceAddressesStr);
        }
    }
    if (cmd.hasOption("s")) {
        String term = cmd.getOptionValue("s");
        try (SyncthingClient client = new SyncthingClient(configuration);
                IndexFinder indexFinder = client.getIndexHandler().newIndexFinderBuilder().build()) {
            client.waitForRemoteIndexAquired();
            logger.info("search term = '{}'", term);
            IndexFinder.SearchCompletedEvent event = indexFinder.doSearch(term);
            if (event.hasGoodResults()) {
                logger.info("search results for term = '{}' :", term);
                for (FileInfo fileInfo : event.getResultList()) {
                    logger.info("\t\t{} {} {}", fileInfo.getType().name().substring(0, 1), fileInfo.getPath(),
                            fileInfo.describeSize());
                }
            } else if (event.hasTooManyResults()) {
                logger.info("too many results found for term = '{}'", term);
            } else {
                logger.info("no result found for term = '{}'", term);
            }
        }
    }
    //        if (cmd.hasOption("l")) {
    //            String indexDump = new IndexHandler(configuration).dumpIndex();
    //            logger.info("index dump = \n\n{}\n", indexDump);
    //        }
    IOUtils.closeQuietly(configuration);
}

From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryClassDeclarationAttributes.java

public static void main(String[] args) {
    Options options = new Options();

    Option inputOpt = OptionBuilder.create(IN);
    inputOpt.setArgName("INPUT");
    inputOpt.setDescription("Input Kyanos resource directory");
    inputOpt.setArgs(1);/*  w  w w.j  a  v  a 2s  . c  o m*/
    inputOpt.setRequired(true);

    Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS);
    inClassOpt.setArgName("CLASS");
    inClassOpt.setDescription("FQN of EPackage implementation class");
    inClassOpt.setArgs(1);
    inClassOpt.setRequired(true);

    Option optFileOpt = OptionBuilder.create(OPTIONS_FILE);
    optFileOpt.setArgName("FILE");
    optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource");
    optFileOpt.setArgs(1);

    options.addOption(inputOpt);
    options.addOption(inClassOpt);
    options.addOption(optFileOpt);

    CommandLineParser parser = new PosixParser();

    try {
        PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME,
                new BlueprintsPersistenceBackendFactory());

        CommandLine commandLine = parser.parse(options, args);

        URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN)));

        Class<?> inClazz = KyanosGraphQueryClassDeclarationAttributes.class.getClassLoader()
                .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS));
        inClazz.getMethod("init").invoke(null);

        ResourceSet resourceSet = new ResourceSetImpl();
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap()
                .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE);

        Resource resource = resourceSet.createResource(uri);

        Map<String, Object> loadOpts = new HashMap<String, Object>();

        if (commandLine.hasOption(OPTIONS_FILE)) {
            Properties properties = new Properties();
            properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE))));
            for (final Entry<Object, Object> entry : properties.entrySet()) {
                loadOpts.put((String) entry.getKey(), (String) entry.getValue());
            }
        }
        resource.load(loadOpts);
        {
            LOG.log(Level.INFO, "Start query");
            long begin = System.currentTimeMillis();
            HashMap<String, EList<NamedElement>> list = JavaQueries.getClassDeclarationAttributes(resource);
            long end = System.currentTimeMillis();
            LOG.log(Level.INFO, "End query");
            LOG.log(Level.INFO,
                    MessageFormat.format("Query result contains {0} elements", list.entrySet().size()));
            LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin)));
        }

        if (resource instanceof PersistentResourceImpl) {
            PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource);
        } else {
            resource.unload();
        }

    } catch (ParseException e) {
        MessageUtil.showError(e.toString());
        MessageUtil.showError("Current arguments: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    } catch (Throwable e) {
        MessageUtil.showError(e.toString());
    }
}

From source file:fr.inria.atlanmod.kyanos.benchmarks.KyanosGraphQueryInvisibleMethodDeclarations.java

public static void main(String[] args) {
    Options options = new Options();

    Option inputOpt = OptionBuilder.create(IN);
    inputOpt.setArgName("INPUT");
    inputOpt.setDescription("Input Kyanos resource directory");
    inputOpt.setArgs(1);//from   www . ja  va 2  s.c  o m
    inputOpt.setRequired(true);

    Option inClassOpt = OptionBuilder.create(EPACKAGE_CLASS);
    inClassOpt.setArgName("CLASS");
    inClassOpt.setDescription("FQN of EPackage implementation class");
    inClassOpt.setArgs(1);
    inClassOpt.setRequired(true);

    Option optFileOpt = OptionBuilder.create(OPTIONS_FILE);
    optFileOpt.setArgName("FILE");
    optFileOpt.setDescription("Properties file holding the options to be used in the Kyanos Resource");
    optFileOpt.setArgs(1);

    options.addOption(inputOpt);
    options.addOption(inClassOpt);
    options.addOption(optFileOpt);

    CommandLineParser parser = new PosixParser();

    try {
        PersistenceBackendFactoryRegistry.getFactories().put(NeoBlueprintsURI.NEO_GRAPH_SCHEME,
                new BlueprintsPersistenceBackendFactory());

        CommandLine commandLine = parser.parse(options, args);

        URI uri = NeoBlueprintsURI.createNeoGraphURI(new File(commandLine.getOptionValue(IN)));

        Class<?> inClazz = KyanosGraphQueryInvisibleMethodDeclarations.class.getClassLoader()
                .loadClass(commandLine.getOptionValue(EPACKAGE_CLASS));
        inClazz.getMethod("init").invoke(null);

        ResourceSet resourceSet = new ResourceSetImpl();
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap()
                .put(NeoBlueprintsURI.NEO_GRAPH_SCHEME, PersistentResourceFactory.eINSTANCE);

        Resource resource = resourceSet.createResource(uri);

        Map<String, Object> loadOpts = new HashMap<String, Object>();

        if (commandLine.hasOption(OPTIONS_FILE)) {
            Properties properties = new Properties();
            properties.load(new FileInputStream(new File(commandLine.getOptionValue(OPTIONS_FILE))));
            for (final Entry<Object, Object> entry : properties.entrySet()) {
                loadOpts.put((String) entry.getKey(), (String) entry.getValue());
            }
        }
        resource.load(loadOpts);
        {
            LOG.log(Level.INFO, "Start query");
            long begin = System.currentTimeMillis();
            EList<MethodDeclaration> list = JavaQueries.getInvisibleMethodDeclarations(resource);
            long end = System.currentTimeMillis();
            LOG.log(Level.INFO, "End query");
            LOG.log(Level.INFO, MessageFormat.format("Query result contains {0} elements", list.size()));
            LOG.log(Level.INFO, MessageFormat.format("Time spent: {0}", MessageUtil.formatMillis(end - begin)));
        }

        if (resource instanceof PersistentResourceImpl) {
            PersistentResourceImpl.shutdownWithoutUnload((PersistentResourceImpl) resource);
        } else {
            resource.unload();
        }

    } catch (ParseException e) {
        MessageUtil.showError(e.toString());
        MessageUtil.showError("Current arguments: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("java -jar <this-file.jar>", options, true);
    } catch (Throwable e) {
        MessageUtil.showError(e.toString());
    }
}

From source file:com.datatorrent.stram.StreamingAppMaster.java

/**
 * @param args/*from  w  w  w .  j  av  a2 s.  c om*/
 *          Command line args
 * @throws Throwable
 */
public static void main(final String[] args) throws Throwable {
    StdOutErrLog.tieSystemOutAndErrToLog();
    LOG.info("Master starting with classpath: {}", System.getProperty("java.class.path"));

    LOG.info("version: {}", VersionInfo.APEX_VERSION.getBuildVersion());
    StringWriter sw = new StringWriter();
    for (Map.Entry<String, String> e : System.getenv().entrySet()) {
        sw.append("\n").append(e.getKey()).append("=").append(e.getValue());
    }
    LOG.info("appmaster env:" + sw.toString());

    Options opts = new Options();
    opts.addOption("app_attempt_id", true, "App Attempt ID. Not to be used unless for testing purposes");

    opts.addOption("help", false, "Print usage");
    CommandLine cliParser = new GnuParser().parse(opts, args);

    // option "help" overrides and cancels any run
    if (cliParser.hasOption("help")) {
        new HelpFormatter().printHelp("ApplicationMaster", opts);
        return;
    }

    Map<String, String> envs = System.getenv();
    ApplicationAttemptId appAttemptID = Records.newRecord(ApplicationAttemptId.class);
    if (!envs.containsKey(Environment.CONTAINER_ID.name())) {
        if (cliParser.hasOption("app_attempt_id")) {
            String appIdStr = cliParser.getOptionValue("app_attempt_id", "");
            appAttemptID = ConverterUtils.toApplicationAttemptId(appIdStr);
        } else {
            throw new IllegalArgumentException("Application Attempt Id not set in the environment");
        }
    } else {
        ContainerId containerId = ConverterUtils.toContainerId(envs.get(Environment.CONTAINER_ID.name()));
        appAttemptID = containerId.getApplicationAttemptId();
    }

    boolean result = false;
    StreamingAppMasterService appMaster = null;
    try {
        appMaster = new StreamingAppMasterService(appAttemptID);
        LOG.info("Initializing Application Master.");

        Configuration conf = new YarnConfiguration();
        appMaster.init(conf);
        appMaster.start();
        result = appMaster.run();
    } catch (Throwable t) {
        LOG.error("Exiting Application Master", t);
        System.exit(1);
    } finally {
        if (appMaster != null) {
            appMaster.stop();
        }
    }

    if (result) {
        LOG.info("Application Master completed.");
        System.exit(0);
    } else {
        LOG.info("Application Master failed.");
        System.exit(2);
    }
}

From source file:SequentialPageRank.java

@SuppressWarnings({ "static-access" })
public static void main(String[] args) throws IOException {
    Options options = new Options();

    options.addOption(OptionBuilder.withArgName("path").hasArg().withDescription("input path").create(INPUT));
    options.addOption(/*w ww  .j a  va 2 s .  c om*/
            OptionBuilder.withArgName("val").hasArg().withDescription("random jump factor").create(JUMP));

    CommandLine cmdline = null;
    CommandLineParser parser = new GnuParser();

    try {
        cmdline = parser.parse(options, args);
    } catch (ParseException exp) {
        System.err.println("Error parsing command line: " + exp.getMessage());
        System.exit(-1);
    }

    if (!cmdline.hasOption(INPUT)) {
        System.out.println("args: " + Arrays.toString(args));
        HelpFormatter formatter = new HelpFormatter();
        formatter.setWidth(120);
        formatter.printHelp(SequentialPageRank.class.getName(), options);
        ToolRunner.printGenericCommandUsage(System.out);
        System.exit(-1);
    }

    String infile = cmdline.getOptionValue(INPUT);
    float alpha = cmdline.hasOption(JUMP) ? Float.parseFloat(cmdline.getOptionValue(JUMP)) : 0.15f;

    int edgeCnt = 0;
    DirectedSparseGraph<String, Integer> graph = new DirectedSparseGraph<String, Integer>();

    BufferedReader data = new BufferedReader(new InputStreamReader(new FileInputStream(infile)));

    String line;
    while ((line = data.readLine()) != null) {
        line.trim();
        String[] arr = line.split("\\t");

        for (int i = 1; i < arr.length; i++) {
            graph.addEdge(new Integer(edgeCnt++), arr[0], arr[i]);
        }
    }

    data.close();

    WeakComponentClusterer<String, Integer> clusterer = new WeakComponentClusterer<String, Integer>();

    Set<Set<String>> components = clusterer.transform(graph);
    int numComponents = components.size();
    System.out.println("Number of components: " + numComponents);
    System.out.println("Number of edges: " + graph.getEdgeCount());
    System.out.println("Number of nodes: " + graph.getVertexCount());
    System.out.println("Random jump factor: " + alpha);

    // Compute PageRank.
    PageRank<String, Integer> ranker = new PageRank<String, Integer>(graph, alpha);
    ranker.evaluate();

    // Use priority queue to sort vertices by PageRank values.
    PriorityQueue<Ranking<String>> q = new PriorityQueue<Ranking<String>>();
    int i = 0;
    for (String pmid : graph.getVertices()) {
        q.add(new Ranking<String>(i++, ranker.getVertexScore(pmid), pmid));
    }

    // Print PageRank values.
    System.out.println("\nPageRank of nodes, in descending order:");
    Ranking<String> r = null;
    while ((r = q.poll()) != null) {
        System.out.println(r.rankScore + "\t" + r.getRanked());
    }
}

From source file:com.redhat.poc.jdg.bankofchina.function.TestCase411RemoteMultiThreadsCustomMarshal.java

public static void main(String[] args) throws Exception {
    CommandLine commandLine;/*from w w w.  j  a v  a 2s  .  c  om*/
    Options options = new Options();
    options.addOption("s", true, "The start csv file number option");
    options.addOption("e", true, "The end csv file number option");
    BasicParser parser = new BasicParser();
    parser.parse(options, args);
    commandLine = parser.parse(options, args);
    if (commandLine.getOptions().length > 0) {
        if (commandLine.hasOption("s")) {
            String start = commandLine.getOptionValue("s");
            if (start != null && start.length() > 0) {
                csvFileStart = Integer.parseInt(start);
            }
        }
        if (commandLine.hasOption("e")) {
            String end = commandLine.getOptionValue("e");
            if (end != null && end.length() > 0) {
                csvFileEnd = Integer.parseInt(end);
            }
        }
    }

    System.out.println(
            "%%%%%%%%%  csv ?, ?, ?(ms),"
                    + new Date().getTime());

    for (int i = csvFileStart; i <= csvFileEnd; i++) {
        new TestCase411RemoteMultiThreadsCustomMarshal(i).start();
    }
}

From source file:edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation.java

/**
 * @param args//  w ww  . jav a  2s .c  o  m
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the aggregate functions " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option gOption = new Option("g", "group", true, "set group of datasets for which the aggregate functions"
            + " will be computed, followed by their temporal and spatial attribute indices");
    gOption.setRequired(true);
    gOption.setArgName("GROUP");
    gOption.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(gOption);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp("hadoop jar data-polygamy.jar "
                + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation", options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.scalar_function_computation.Aggregation",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    String datasetNames = "";
    String datasetIds = "";
    String preProcessingDatasets = "";

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> shortDatasetAggregation = new ArrayList<String>();
    HashMap<String, String> datasetTempAtt = new HashMap<String, String>();
    HashMap<String, String> datasetSpatialAtt = new HashMap<String, String>();
    HashMap<String, String> preProcessingDataset = new HashMap<String, String>();
    HashMap<String, String> datasetId = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");
    String[] datasetArgs = cmd.getOptionValues("g");

    for (int i = 0; i < datasetArgs.length; i += 3) {
        String dataset = datasetArgs[i];

        // getting pre-processing
        String tempPreProcessing = FrameworkUtils.searchPreProcessing(dataset, s3conf, s3);
        if (tempPreProcessing == null) {
            System.out.println("No pre-processing available for " + dataset);
            continue;
        }
        preProcessingDataset.put(dataset, tempPreProcessing);

        shortDataset.add(dataset);
        datasetTempAtt.put(dataset, ((datasetArgs[i + 1] == "null") ? null : datasetArgs[i + 1]));
        datasetSpatialAtt.put(dataset, ((datasetArgs[i + 2] == "null") ? null : datasetArgs[i + 2]));

        datasetId.put(dataset, null);
    }

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    // getting dataset id

    Path path = null;
    FileSystem fs = null;

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        fs = FileSystem.get(new Configuration());
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.aggregatesDir, s3conf, s3);

    // getting smallest resolution

    HashMap<String, String> tempResMap = new HashMap<String, String>();
    HashMap<String, String> spatialResMap = new HashMap<String, String>();

    HashMap<String, String> datasetTemporalStrMap = new HashMap<String, String>();
    HashMap<String, String> datasetSpatialStrMap = new HashMap<String, String>();

    HashSet<String> input = new HashSet<String>();

    for (String dataset : shortDataset) {

        String[] datasetArray = preProcessingDataset.get(dataset).split("-");

        String datasetTemporalStr = datasetArray[datasetArray.length - 2];
        int datasetTemporal = utils.temporalResolution(datasetTemporalStr);

        String datasetSpatialStr = datasetArray[datasetArray.length - 1];
        int datasetSpatial = utils.spatialResolution(datasetSpatialStr);

        // finding all possible resolutions

        String[] temporalResolutions = FrameworkUtils.getAggTempResolutions(datasetTemporal);
        String[] spatialResolutions = FrameworkUtils.getAggSpatialResolutions(datasetSpatial);

        String temporalResolution = "";
        String spatialResolution = "";

        String tempRes = "";
        String spatialRes = "";

        boolean dataAdded = false;

        for (int i = 0; i < temporalResolutions.length; i++) {
            for (int j = 0; j < spatialResolutions.length; j++) {

                temporalResolution = temporalResolutions[i];
                spatialResolution = spatialResolutions[j];

                String aggregatesOutputFileName = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/";

                if (removeExistingFiles) {
                    FrameworkUtils.removeFile(aggregatesOutputFileName, s3conf, s3);
                }

                if (!FrameworkUtils.fileExists(aggregatesOutputFileName, s3conf, s3)) {

                    dataAdded = true;

                    tempRes += temporalResolution + "-";
                    spatialRes += spatialResolution + "-";
                }
            }
        }

        if (dataAdded) {
            input.add(s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
            shortDatasetAggregation.add(dataset);

            tempResMap.put(dataset, tempRes.substring(0, tempRes.length() - 1));
            spatialResMap.put(dataset, spatialRes.substring(0, spatialRes.length() - 1));

            datasetTemporalStrMap.put(dataset, datasetTemporalStr);
            datasetSpatialStrMap.put(dataset, datasetSpatialStr);
        }
    }

    if (input.isEmpty()) {
        System.out.println("All the input datasets have aggregates.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    it = input.iterator();
    while (it.hasNext()) {
        preProcessingDatasets += it.next() + ",";
    }

    Job aggJob = null;
    String aggregatesOutputDir = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/";
    String jobName = "aggregates";

    FrameworkUtils.removeFile(aggregatesOutputDir, s3conf, s3);

    Configuration aggConf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    aggConf.set("dataset-name", datasetNames);
    aggConf.set("dataset-id", datasetIds);

    for (int i = 0; i < shortDatasetAggregation.size(); i++) {
        String dataset = shortDatasetAggregation.get(i);
        String id = datasetId.get(dataset);
        aggConf.set("dataset-" + id + "-temporal-resolutions", tempResMap.get(dataset));
        aggConf.set("dataset-" + id + "-spatial-resolutions", spatialResMap.get(dataset));
        aggConf.set("dataset-" + id + "-temporal-att", datasetTempAtt.get(dataset));
        aggConf.set("dataset-" + id + "-spatial-att", datasetSpatialAtt.get(dataset));
        aggConf.set("dataset-" + id + "-temporal", datasetTemporalStrMap.get(dataset));
        aggConf.set("dataset-" + id + "-spatial", datasetSpatialStrMap.get(dataset));

        if (s3)
            aggConf.set("dataset-" + id,
                    s3bucket + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
        else
            aggConf.set("dataset-" + id, FileSystem.get(new Configuration()).getHomeDirectory() + "/"
                    + FrameworkUtils.preProcessingDir + "/" + preProcessingDataset.get(dataset));
    }

    aggConf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    aggConf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    aggConf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    aggConf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    aggConf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    aggConf.set("mapreduce.task.io.sort.mb", "200");
    aggConf.set("mapreduce.task.io.sort.factor", "100");
    machineConf.setMachineConfiguration(aggConf);

    if (s3) {
        machineConf.setMachineConfiguration(aggConf);
        aggConf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        aggConf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
    }

    if (snappyCompression) {
        aggConf.set("mapreduce.map.output.compress", "true");
        aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        aggConf.set("mapreduce.map.output.compress", "true");
        aggConf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //aggConf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    aggJob = new Job(aggConf);
    aggJob.setJobName(jobName);

    aggJob.setMapOutputKeyClass(SpatioTemporalWritable.class);
    aggJob.setMapOutputValueClass(AggregationArrayWritable.class);
    aggJob.setOutputKeyClass(SpatioTemporalWritable.class);
    aggJob.setOutputValueClass(FloatArrayWritable.class);
    //aggJob.setOutputKeyClass(Text.class);
    //aggJob.setOutputValueClass(Text.class);

    aggJob.setMapperClass(AggregationMapper.class);
    aggJob.setCombinerClass(AggregationCombiner.class);
    aggJob.setReducerClass(AggregationReducer.class);
    aggJob.setNumReduceTasks(machineConf.getNumberReduces());

    aggJob.setInputFormatClass(SequenceFileInputFormat.class);
    //aggJob.setOutputFormatClass(SequenceFileOutputFormat.class);
    LazyOutputFormat.setOutputFormatClass(aggJob, SequenceFileOutputFormat.class);
    //LazyOutputFormat.setOutputFormatClass(aggJob, TextOutputFormat.class);
    SequenceFileOutputFormat.setCompressOutput(aggJob, true);
    SequenceFileOutputFormat.setOutputCompressionType(aggJob, CompressionType.BLOCK);

    FileInputFormat.setInputDirRecursive(aggJob, true);
    FileInputFormat.setInputPaths(aggJob,
            preProcessingDatasets.substring(0, preProcessingDatasets.length() - 1));
    FileOutputFormat.setOutputPath(aggJob, new Path(aggregatesOutputDir));

    aggJob.setJarByClass(Aggregation.class);

    long start = System.currentTimeMillis();
    aggJob.submit();
    aggJob.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (String dataset : shortDatasetAggregation) {
        String from = s3bucket + FrameworkUtils.aggregatesDir + "/tmp/" + dataset + "/";
        String to = s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset + "/";
        FrameworkUtils.renameFile(from, to, s3conf, s3);
    }

}

From source file:de.unirostock.sems.caro.CaRo.java

/**
 * The main method to be called by the command line.
 * //from  w  ww .j  a  v  a  2s . c  o  m
 * @param args
 *          the arguments
 */
public static void main(String[] args) {
    Options options = new Options();

    options.addOption(new Option("h", "help", false, "print the help message"));
    options.addOption(
            Option.builder().longOpt("roca").desc("convert a research object into a combine archive").build());
    options.addOption(
            Option.builder().longOpt("caro").desc("convert a combine archive into a research object").build());
    options.addOption(Option.builder("i").longOpt("in").required().argName("FILE").hasArg()
            .desc("source container to be converted").build());
    options.addOption(Option.builder("o").longOpt("out").required().argName("FILE").hasArg()
            .desc("target container to be created").build());

    CommandLineParser parser = new DefaultParser();
    CommandLine line = null;
    try {
        line = parser.parse(options, args);
        if (line.hasOption("help")) {
            help(options, null);
            return;
        }
    } catch (ParseException e) {
        help(options, "Parsing of command line options failed.  Reason: " + e.getMessage());
        return;
    }

    File in = new File(line.getOptionValue("in"));
    File out = new File(line.getOptionValue("out"));

    if (!in.exists()) {
        help(options, "file " + in + " does not exist");
        return;
    }

    if (out.exists()) {
        help(options, "file " + out + " already exist");
        return;
    }

    if (line.hasOption("caro") && line.hasOption("roca")) {
        help(options, "only one of --roca and --caro is allowed");
        return;
    }

    CaRoConverter conv = null;

    if (line.hasOption("caro"))
        conv = new CaToRo(in);
    else if (line.hasOption("roca"))
        conv = new RoToCa(in);
    else {
        help(options, "you need to either supply --roca or --caro");
        return;
    }
    conv.convertTo(out);

    if (conv.hasErrors())
        System.err.println("There were errors!");

    if (conv.hasWarnings())
        System.err.println("There were warnings!");

    List<CaRoNotification> notifications = conv.getNotifications();
    for (CaRoNotification note : notifications)
        System.out.println(note);

}

From source file:br.com.riselabs.cotonet.Main.java

/**
 * @param args/*  w w w .  j  a  v  a 2s.c  o  m*/
 * @throws EmptyContentException
 * @throws IOException
 * @throws NullPointerException
 * @throws InvalidNumberOfTagsException
 */
public static void main(String[] args) {

    CommandLineParser parser = new DefaultParser();
    Options options = new Options();

    options.addOption(Option.builder("c").longOpt("chunkBased").desc(
            "c - build a conflict chunk-based network with the developers that in fact conflits with each other."
                    + " Additionally to the c argument the user should provide a path. This path should have"
                    + " a file containig the repository's URL of the target systems.")
            .hasArg().build());

    options.addOption(Option.builder("cf").longOpt("chunkBasedFullGraph")
            .desc("cf - like c, build a conflict chunk-based network adding all developers involved in "
                    + "identified chunk conflicts. Additionally to the cf argument the user should provide a path. "
                    + "This path should have a file containig the repository's URL of the target systems.")
            .hasArg().build());

    options.addOption(Option.builder("f").longOpt("fileBase").desc(
            " f - build a conflict file-based network. In other others all developers that contribute to some"
                    + " conflict at file level should be part of this network. This network is based on network provides "
                    + "by cf, adding edges between developers of different chunks. Additionally to the f argument the"
                    + " user should provide a path. This path should have afile containig the repository's URL of "
                    + "the target systems.")
            .hasArg().build());
    /*
     * options.addOption( Option.builder("rw").longOpt("rewrite-aux").
     * desc("Rewrite auxilary files (e.g., *.conf, *.sh) " + "_WITHOUT_ " +
     * "the recreation of the merge scenarios based tags.").hasArg(false).
     * build());
     * 
     * options.addOption( Option.builder("rwt").longOpt("rewrite-tagfile").
     * desc("Rewrite auxilary files (e.g., *.conf, *.sh) " + "_INCLUDING_ "
     * + "the recreation of the merge scenarios based tags.").hasArg(false).
     * build());
     */
    options.addOption("h", "help", false, "Print this help page");

    File reposListFile = null;
    Boolean skipCloneAndNetworks = false;
    try {
        CommandLine cmd = parser.parse(options, args);
        // user is looking for help
        if (cmd.hasOption("h")) {
            new HelpFormatter().printHelp("java ", options);
            System.exit(0);
        }

        /* "c", "cf", and "f" are the three available options
        * "c" builds the chunk-based network with developers that contribute to the conflict
        * "cf" builds the chunk-based network with developers that contribute to the conflict and developers
        * that are part of the chunk, but don't contribute to the conflict
        * "f" builds the file-based network with developers that contribute to the chunk into a target file
        */
        else if (cmd.hasOption("c") || cmd.hasOption("cf") || cmd.hasOption("f")) {

            String urlsFilePath = null;
            NetworkType type;
            if (cmd.hasOption("c")) {
                urlsFilePath = cmd.getOptionValue("c");
                type = NetworkType.CHUNK_BASED;
            } else if (cmd.hasOption("cf")) {
                urlsFilePath = cmd.getOptionValue("cf");
                type = NetworkType.CHUNK_BASED_FULL;
            } else {
                urlsFilePath = cmd.getOptionValue("f");
                type = NetworkType.FILE_BASED;
            }

            System.out.println(urlsFilePath);

            reposListFile = new File(urlsFilePath);

            // Ends execution if file not found.
            if (!reposListFile.exists()) {
                System.out.println("COTONET ended without retrive any repository.\n\n"
                        + "The file containig the repository's URL of the target systems was not found. "
                        + "Check wether the file \"" + urlsFilePath + "\" exists.");
                System.exit(1);
            }

            skipCloneAndNetworks = (cmd.hasOption("rw") || cmd.hasOption("rwt")) ? true : false;

            MainThread m = new MainThread(type, reposListFile, skipCloneAndNetworks);
            m.start();
            m.join();
            Logger.log("COTONET finished. Files rewritten.");

        } else {
            System.out.println("COTONET ended without retrive any repository.\n\n"
                    + "You should use 'h' if you are looking for help. Otherwise,"
                    + " the 'l' or 'fc' option is mandatory.");
            System.exit(1);

        }

    } catch (ParseException e) {
        new HelpFormatter().printHelp("java ", options);
    } catch (Exception e) {
        Logger.log(e.getMessage());
    }
}