Example usage for java.lang System currentTimeMillis

List of usage examples for java.lang System currentTimeMillis

Introduction

In this page you can find the example usage for java.lang System currentTimeMillis.

Prototype

@HotSpotIntrinsicCandidate
public static native long currentTimeMillis();

Source Link

Document

Returns the current time in milliseconds.

Usage

From source file:cn.edu.buaa.act.petuumOnYarn.ApplicationMaster.java

/**
 * @param args/* ww  w .j a  v a  2  s.  c  o m*/
 *            Command line args
 */
public static void main(String[] args) {
    startTime = System.currentTimeMillis();
    boolean result = false;
    try {
        ApplicationMaster appMaster = new ApplicationMaster();
        LOG.info("Initializing ApplicationMaster");
        boolean doRun = appMaster.init(args);
        if (!doRun) {
            System.exit(0);
        }
        appMaster.run();
        result = appMaster.finish();
    } catch (Throwable t) {
        LOG.fatal("Error running ApplicationMaster", t);
        LogManager.shutdown();
        ExitUtil.terminate(1, t);
    }
    if (result) {
        LOG.info("Application Master completed successfully. exiting");
        System.exit(0);
    } else {
        LOG.info("Application Master failed. exiting");
        System.exit(2);
    }
}

From source file:com.tamingtext.qa.WikipediaWexIndexer.java

public static void main(String[] args) throws Exception {
    DefaultOptionBuilder obuilder = new DefaultOptionBuilder();
    ArgumentBuilder abuilder = new ArgumentBuilder();
    GroupBuilder gbuilder = new GroupBuilder();

    Option wikipediaFileOpt = obuilder.withLongName("wikiFile").withRequired(true)
            .withArgument(abuilder.withName("wikiFile").withMinimum(1).withMaximum(1).create())
            .withDescription("The path to the wikipedia dump file. "
                    + "May be a directory containing wikipedia dump files. "
                    + "If a directory is specified, files starting with the prefix "
                    + "freebase-segment- are used.")
            .withShortName("w").create();

    Option numDocsOpt = obuilder.withLongName("numDocs").withRequired(false)
            .withArgument(abuilder.withName("numDocs").withMinimum(1).withMaximum(1).create())
            .withDescription("The number of docs to index").withShortName("n").create();

    Option solrURLOpt = obuilder.withLongName("solrURL").withRequired(false)
            .withArgument(abuilder.withName("solrURL").withMinimum(1).withMaximum(1).create())
            .withDescription("The URL where Solr lives").withShortName("s").create();

    Option solrBatchOpt = obuilder.withLongName("batch").withRequired(false)
            .withArgument(abuilder.withName("batch").withMinimum(1).withMaximum(1).create())
            .withDescription("The number of docs to include in each indexing batch").withShortName("b")
            .create();/*from   w ww  . j  ava 2s .c om*/

    Option helpOpt = obuilder.withLongName("help").withDescription("Print out help").withShortName("h")
            .create();

    Group group = gbuilder.withName("Options").withOption(wikipediaFileOpt).withOption(numDocsOpt)
            .withOption(solrURLOpt).withOption(solrBatchOpt).withOption(helpOpt).create();

    Parser parser = new Parser();
    parser.setGroup(group);

    try {
        CommandLine cmdLine = parser.parse(args);

        if (cmdLine.hasOption(helpOpt)) {
            CommandLineUtil.printHelp(group);
            return;
        }

        File file;
        file = new File(cmdLine.getValue(wikipediaFileOpt).toString());
        File[] dumpFiles;
        if (file.isDirectory()) {
            dumpFiles = file.listFiles(new FilenameFilter() {
                public boolean accept(File file, String s) {
                    return s.startsWith("freebase-segment-");
                }
            });
        } else {
            dumpFiles = new File[] { file };
        }

        int numDocs = Integer.MAX_VALUE;
        if (cmdLine.hasOption(numDocsOpt)) {
            numDocs = Integer.parseInt(cmdLine.getValue(numDocsOpt).toString());
        }
        String url = DEFAULT_SOLR_URL;
        if (cmdLine.hasOption(solrURLOpt)) {
            url = cmdLine.getValue(solrURLOpt).toString();
        }
        int batch = 100;
        if (cmdLine.hasOption(solrBatchOpt)) {
            batch = Integer.parseInt(cmdLine.getValue(solrBatchOpt).toString());
        }
        WikipediaWexIndexer indexer = new WikipediaWexIndexer(new CommonsHttpSolrServer(url));
        int total = 0;
        for (int i = 0; i < dumpFiles.length && total < numDocs; i++) {
            File dumpFile = dumpFiles[i];
            log.info("Indexing: " + file + " Num files to index: " + (numDocs - total));
            long start = System.currentTimeMillis();
            int totalFile = indexer.index(dumpFile, numDocs - total, batch);
            long finish = System.currentTimeMillis();
            if (log.isInfoEnabled()) {
                log.info("Indexing " + dumpFile + " took " + (finish - start) + " ms");
            }
            total += totalFile;
            log.info("Done Indexing: " + file + ". Indexed " + totalFile + " docs for that file and " + total
                    + " overall.");

        }
        log.info("Indexed " + total + " docs overall.");
    } catch (OptionException e) {
        log.error("Exception", e);
        CommandLineUtil.printHelp(group);
        return;
    }
}

From source file:lu.tudor.santec.dicom.gui.header.Dcm2Xml.java

public static void main(String[] args) {
    CommandLine cl = parse(args);/*from w  ww  .  j  a v a2  s .com*/
    Dcm2Xml dcm2xml = new Dcm2Xml();
    File ifile = new File((String) cl.getArgList().get(0));
    File ofile = null;
    if (cl.hasOption("o")) {
        ofile = new File(cl.getOptionValue("o"));
        dcm2xml.setBaseDir(ofile.getAbsoluteFile().getParentFile());
    }
    if (cl.hasOption("d")) {
        dcm2xml.setBaseDir(new File(cl.getOptionValue("d")));
    }
    boolean x = cl.hasOption("X");
    if (cl.hasOption("x")) {
        String[] tagStr = cl.getOptionValues("x");
        int[] excludes = new int[x ? tagStr.length + 1 : tagStr.length];
        for (int i = 0; i < tagStr.length; i++) {
            try {
                excludes[i] = (int) Long.parseLong(tagStr[i], 16);
            } catch (NumberFormatException e) {
                excludes[i] = Tag.forName(tagStr[i]);
            }
        }
        if (x) {
            excludes[tagStr.length] = Tag.PixelData;
        }
        dcm2xml.setExclude(excludes);
    } else if (x) {
        dcm2xml.setExclude(new int[] { Tag.PixelData });
    }
    if (cl.hasOption("T")) {
        final String xslurl = cl.getOptionValue("T");
        try {
            dcm2xml.setXslt(new URL(xslurl));
        } catch (MalformedURLException e) {
            System.err.println("dcm2xml: invalid xsl URL: " + xslurl);
            System.exit(1);
        }
        dcm2xml.setXsltInc(cl.hasOption("I"));
        dcm2xml.setXsltParams(cl.getOptionValues("P"));
    }
    dcm2xml.setComments(cl.hasOption("C"));
    dcm2xml.setIndent(!cl.hasOption("c"));
    long t1 = System.currentTimeMillis();
    try {
        dcm2xml.convert(ifile, ofile);
    } catch (TransformerConfigurationException e) {
        System.err.println("dcm2xml: Configuration Error: " + e.getMessage());
        System.exit(1);
    } catch (IOException e) {
        System.err.println("dcm2xml: Failed to convert " + ifile + ": " + e.getMessage());
        e.printStackTrace(System.err);
        System.exit(1);
    }
    long t2 = System.currentTimeMillis();
    if (ofile != null)
        System.out.println("Finished conversion of " + ifile + "to " + ofile + " in " + (t2 - t1) + "ms");
}

From source file:deck36.storm.plan9.php.RecordBreakerBadgeTopology.java

public static void main(String[] args) throws Exception {

    String env = null;/*w  ww  .  j  av a2s.c o  m*/

    if (args != null && args.length > 0) {
        env = args[0];
    }

    if (!"dev".equals(env))
        if (!"prod".equals(env)) {
            System.out.println("Usage: $0 (dev|prod)\n");
            System.exit(1);
        }

    // Topology config
    Config conf = new Config();

    // Load parameters and add them to the Config
    Map configMap = YamlLoader.loadYamlFromResource("config_" + env + ".yml");

    conf.putAll(configMap);

    log.info(JSONValue.toJSONString((conf)));

    // Set topology loglevel to DEBUG
    conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug"));

    // Create Topology builder
    TopologyBuilder builder = new TopologyBuilder();

    // if there are not special reasons, start with parallelism hint of 1
    // and multiple tasks. By that, you can scale dynamically later on.
    int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint");
    int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks");

    // Create Stream from RabbitMQ messages
    // bind new queue with name of the topology
    // to the main plan9 exchange (from properties config)
    // consuming only CBT-related events by using the rounting key 'cbt.#'

    String badgeName = RecordBreakerBadgeTopology.class.getSimpleName();

    String rabbitQueueName = badgeName; // use topology class name as name for the queue
    String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.rabbitmq.exchange");
    String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.rabbitmq.routing_key");

    // Get JSON deserialization scheme
    Scheme rabbitScheme = new SimpleJSONScheme();

    // Setup a Declarator to configure exchange/queue/routing key
    RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName,
            rabbitRoutingKey);

    // Create Configuration for the Spout
    ConnectionConfig connectionConfig = new ConnectionConfig(
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat"));

    ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName)
            .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail()
            .build();

    // add global parameters to topology config - the RabbitMQSpout will read them from there
    conf.putAll(spoutConfig.asMap());

    // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch
    // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md
    if ("prod".equals(env)) {
        conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING,
                (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch"));
    }

    // Add RabbitMQ spout to topology
    builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint)
            .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks"));

    // construct command to invoke the external bolt implementation
    ArrayList<String> command = new ArrayList(15);

    // Add main execution program (php, hhvm, zend, ..) and parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.php.executor"));
    command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.executor_params"));

    // Add main command to be executed (app/console, the phar file, etc.) and global context parameters (environment etc.)
    command.add((String) JsonPath.read(conf, "$.deck36_storm.php.main"));
    command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.main_params"));

    // create command to execute the RecordBreakerBolt
    ArrayList<String> recordBreakerBoltCommand = new ArrayList<String>(command);

    // Add main route to be invoked and its parameters
    recordBreakerBoltCommand.add((String) JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.main"));
    List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.params");
    if (boltParams != null)
        recordBreakerBoltCommand.addAll(boltParams);

    // create command to execute the RecordMasterBolt
    ArrayList<String> recordMasterBoltCommand = new ArrayList<String>(command);

    // Add main route to be invoked and its parameters
    recordMasterBoltCommand.add((String) JsonPath.read(conf, "$.deck36_storm.RecordMasterBolt.main"));
    boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.RecordMasterBolt.params");
    if (boltParams != null)
        recordMasterBoltCommand.addAll(boltParams);

    // Log the final commands
    log.info("Command to start bolt for RecordBreaker badge: "
            + Arrays.toString(recordBreakerBoltCommand.toArray()));
    log.info("Command to start bolt for RecordMaster badge: "
            + Arrays.toString(recordMasterBoltCommand.toArray()));

    // Add constructed external bolt command to topology using MultilangAdapterBolt
    // The RecordBreaker reads the incoming messages from the game application, i.e. the "incoming" spout
    builder.setBolt("record_breaker", new MultilangAdapterBolt(recordBreakerBoltCommand, "badge"),
            parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("incoming");

    // The RecordMaster reads the badge messages generated by the RecordBreakerBolt
    builder.setBolt("record_master", new MultilangAdapterBolt(recordMasterBoltCommand, "badge"),
            parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("record_breaker");

    // the RabbitMQ router bolt can read messages from both, RecordBreakerBolt and RecordMasterBolt,
    // and forward those messages to the broker
    builder.setBolt("rabbitmq_router",
            new Plan9RabbitMQRouterBolt(
                    (String) JsonPath.read(conf, "$.deck36_storm.RecordBreakerBolt.rabbitmq.target_exchange"),
                    "RecordBreakerMaster" // RabbitMQ routing key
            ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("record_breaker")
            .shuffleGrouping("record_master");

    builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks)
            .shuffleGrouping("rabbitmq_router");

    if ("dev".equals(env)) {
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology());
        Thread.sleep(2000000);
    }

    if ("prod".equals(env)) {
        StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf,
                builder.createTopology());
    }

}

From source file:at.illecker.hama.rootbeer.examples.piestimator.cpu.PiEstimatorCpuBSP.java

public static void main(String[] args) throws InterruptedException, IOException, ClassNotFoundException {

    BSPJob job = createPiEstimatorCpuBSPConf(TMP_OUTPUT);

    BSPJobClient jobClient = new BSPJobClient(job.getConfiguration());
    ClusterStatus cluster = jobClient.getClusterStatus(true);

    if (args.length > 0) {
        if (args.length == 2) {
            job.setNumBspTask(Integer.parseInt(args[0]));
            job.set(CONF_ITERATIONS, args[1]);
        } else {//  w ww  .  ja va 2s .  c o  m
            System.out.println("Wrong argument size!");
            System.out.println("    Argument1=numBspTask");
            System.out.println("    Argument2=totalIterations");
            return;
        }
    } else {
        job.setNumBspTask(cluster.getMaxTasks());
        job.set(CONF_ITERATIONS, "" + PiEstimatorCpuBSP.totalIterations);
    }

    LOG.info("NumBspTask: " + job.getNumBspTask());
    long totalIterations = Long.parseLong(job.get(CONF_ITERATIONS));
    LOG.info("TotalIterations: " + totalIterations);
    LOG.info("IterationsPerBspTask: " + totalIterations / job.getNumBspTask());
    job.setBoolean(CONF_DEBUG, true);

    long startTime = System.currentTimeMillis();
    if (job.waitForCompletion(true)) {
        printOutput(job);
        System.out.println("Job Finished in " + (System.currentTimeMillis() - startTime) / 1000.0 + " seconds");
    }
}

From source file:com.betfair.application.performance.BaselinePerformanceTester.java

public static void main(String[] args) {
    setup();/*from w w w . j av a 2  s  .  c  om*/
    // Create an instance of HttpClient.
    Long time = System.currentTimeMillis();
    final Random rnd = new Random(1);
    for (int i = 0; i < NUM_CALLS; i++) {
        callsRemaining.incrementAndGet();
        executor.execute(new Runnable() {
            public void run() {
                makeRequest(getRequest(rnd), getContentType(rnd), rnd);
            }
        });
    }

    long lastTime = callsRemaining.longValue();
    while (callsRemaining.longValue() > 0) {
        try {
            Thread.sleep(1000);
        } catch (Exception ignored) {
        }
        if (lastTime - 1000 > callsRemaining.longValue()) {
            lastTime = callsRemaining.get();
            System.out.print(".");
        }
    }
    time = System.currentTimeMillis() - time;
    System.out.println("Done.");

    executor.shutdown();

    analyseCalls(time);
}

From source file:discovery.compression.kdd2011.ratio.RatioCompressionReport.java

public static void main(String[] args) throws GraphReadingException, IOException, java.text.ParseException {
    opts.addOption("r", true, "Goal compression ratio");

    //      opts.addOption( "a",
    //       true,
    //       "Algorithm used for compression. The default and only currently available option is \"greedy\"");
    //opts.addOption("cost-output",true,"Output file for costs, default is costs.txt");
    //opts.addOption("cost-format",true,"Output format for ");

    opts.addOption("ctype", true, "Connectivity type: global or local, default is global.");
    opts.addOption("connectivity", false,
            "enables output for connectivity. Connectivity info will be written to connectivity.txt");
    opts.addOption("output_bmg", true, "Write bmg file with groups to given file.");
    opts.addOption("algorithm", true, "Algorithm to use, one of: greedy random1 random2 bruteforce slowgreedy");
    opts.addOption("hop2", false, "Only try to merge nodes that have common neighbors");
    opts.addOption("kmedoids", false, "Enables output for kmedoids clustering");
    opts.addOption("kmedoids_k", true, "Number of clusters to be used in kmedoids. Default is 3");
    opts.addOption("kmedoids_output", true,
            "Output file for kmedoid clusters. Default is clusters.txt. This file will be overwritten.");
    opts.addOption("norefresh", false,
            "Use old style merging: all connectivities are not refreshed when merging");
    opts.addOption("edge_attribute", true, "Attribute from bmgraph used as edge weight");
    opts.addOption("only_times", false, "Only write times.txt");
    //opts.addOption("no_metrics",false,"Exit after compression, don't calculate any metrics or produce output bmg for the compression.");
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;//w w w .  j av a 2s.c  o  m

    try {
        cmd = parser.parse(opts, args);
    } catch (ParseException e) {
        e.printStackTrace();
        System.exit(0);
    }

    boolean connectivity = false;
    double ratio = 0;

    boolean hop2 = cmd.hasOption("hop2");

    RatioCompression compression = new GreedyRatioCompression(hop2);

    if (cmd.hasOption("connectivity"))
        connectivity = true;

    ConnectivityType ctype = ConnectivityType.GLOBAL;
    CompressionMergeModel mergeModel = new PathAverageMergeModel();
    if (cmd.hasOption("ctype")) {
        String ctypeStr = cmd.getOptionValue("ctype");
        if (ctypeStr.equals("local")) {
            ctype = ConnectivityType.LOCAL;
            mergeModel = new EdgeAverageMergeModel();
        } else if (ctypeStr.equals("global")) {
            ctype = ConnectivityType.GLOBAL;
            mergeModel = new PathAverageMergeModel();
        } else {
            System.out.println(PROGRAM_NAME + ": unknown connectivity type " + ctypeStr);
            printHelp();
        }
    }

    if (cmd.hasOption("norefresh"))
        mergeModel = new PathAverageMergeModelNorefresh();
    if (cmd.hasOption("algorithm")) {
        String alg = cmd.getOptionValue("algorithm");
        if (alg.equals("greedy")) {
            compression = new GreedyRatioCompression(hop2);
        } else if (alg.equals("random1")) {
            compression = new RandomRatioCompression(hop2);
        } else if (alg.equals("random2")) {
            compression = new SmartRandomRatioCompression(hop2);
        } else if (alg.equals("bruteforce")) {
            compression = new BruteForceCompression(hop2, ctype == ConnectivityType.LOCAL);
        } else if (alg.equals("slowgreedy")) {
            compression = new SlowGreedyRatioCompression(hop2);
        } else {
            System.out.println("algorithm must be one of: greedy random1 random2 bruteforce slowgreedy");
            printHelp();
        }
    }

    compression.setMergeModel(mergeModel);

    if (cmd.hasOption("r")) {
        ratio = Double.parseDouble(cmd.getOptionValue("r"));
    } else {
        System.out.println(PROGRAM_NAME + ": compression ratio not defined");
        printHelp();
    }

    if (cmd.hasOption("help")) {
        printHelp();
    }

    String infile = null;
    if (cmd.getArgs().length != 0) {
        infile = cmd.getArgs()[0];
    } else {
        printHelp();
    }

    boolean kmedoids = false;
    int kmedoidsK = 3;
    String kmedoidsOutput = "clusters.txt";
    if (cmd.hasOption("kmedoids"))
        kmedoids = true;
    if (cmd.hasOption("kmedoids_k"))
        kmedoidsK = Integer.parseInt(cmd.getOptionValue("kmedoids_k"));
    if (cmd.hasOption("kmedoids_output"))
        kmedoidsOutput = cmd.getOptionValue("kmedoids_output");

    String edgeAttrib = "goodness";
    if (cmd.hasOption("edge_attribute"))
        edgeAttrib = cmd.getOptionValue("edge_attribute");

    // This program should directly use bmgraph-java to read and
    // DefaultGraph should have a constructor that takes a BMGraph as an
    // argument.

    //VisualGraph vg = new VisualGraph(infile, edgeAttrib, false);
    //System.out.println("vg read");
    //SimpleVisualGraph origSG = new SimpleVisualGraph(vg);
    BMGraph bmg = BMGraphUtils.readBMGraph(infile);

    int origN = bmg.getNodes().size();

    //for(int i=0;i<origN;i++)
    //System.out.println(i+"="+origSG.getVisualNode(i));
    System.out.println("bmgraph read");

    BMNode[] i2n = new BMNode[origN];
    HashMap<BMNode, Integer> n2i = new HashMap<BMNode, Integer>();
    {
        int pi = 0;
        for (BMNode nod : bmg.getNodes()) {
            n2i.put(nod, pi);
            i2n[pi++] = nod;
        }
    }

    DefaultGraph dg = new DefaultGraph();
    for (BMEdge e : bmg.getEdges()) {
        dg.addEdge(n2i.get(e.getSource()), n2i.get(e.getTarget()), Double.parseDouble(e.get(edgeAttrib)));
    }

    DefaultGraph origDG = dg.copy();

    System.out.println("inputs read");
    RatioCompression nopCompressor = new RatioCompression.DefaultRatioCompression();
    ResultGraph nopResult = nopCompressor.compressGraph(dg, 1);

    long start = System.currentTimeMillis();
    ResultGraph result = compression.compressGraph(dg, ratio);
    long timeSpent = System.currentTimeMillis() - start;
    double seconds = timeSpent * 0.001;

    BufferedWriter timesWriter = new BufferedWriter(new FileWriter("times.txt", true));
    timesWriter.append("" + seconds + "\n");
    timesWriter.close();

    if (cmd.hasOption("only_times")) {
        System.out.println("Compression done, exiting.");
        System.exit(0);
    }

    BufferedWriter costsWriter = new BufferedWriter(new FileWriter("costs.txt", true));
    costsWriter.append("" + nopResult.getCompressorCosts() + " " + result.getCompressorCosts() + "\n");
    costsWriter.close();

    double[][] origProb;
    double[][] compProb;
    int[] group = new int[origN];

    for (int i = 0; i < result.partition.size(); i++)
        for (int x : result.partition.get(i))
            group[x] = i;

    if (ctype == ConnectivityType.LOCAL) {
        origProb = new double[origN][origN];
        compProb = new double[origN][origN];
        DefaultGraph g = result.uncompressedGraph();
        for (int i = 0; i < origN; i++) {
            for (int j = 0; j < origN; j++) {
                origProb[i][j] = dg.getEdgeWeight(i, j);
                compProb[i][j] = g.getEdgeWeight(i, j);
            }
        }
        System.out.println("Writing edge-dissimilarity");
    } else {

        origProb = ProbDijkstra.getProbMatrix(origDG);

        compProb = new double[origN][origN];

        System.out.println("nodeCount = " + result.graph.getNodeCount());
        double[][] ccProb = ProbDijkstra.getProbMatrix(result.graph);
        System.out.println("ccProb.length = " + ccProb.length);

        System.out.println("ccProb[0].length = " + ccProb[0].length);

        for (int i = 0; i < origN; i++) {
            for (int j = 0; j < origN; j++) {
                if (group[i] == group[j])
                    compProb[i][j] = result.graph.getEdgeWeight(group[i], group[j]);
                else {
                    int gj = group[j];
                    int gi = group[i];
                    compProb[i][j] = ccProb[group[i]][group[j]];
                }
            }
        }

        System.out.println("Writing best-path-dissimilarity");
        //compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph());

    }

    {
        BufferedWriter connWr = null;//

        if (connectivity) {
            connWr = new BufferedWriter(new FileWriter("connectivity.txt", true));
        }
        double totalDiff = 0;

        for (int i = 0; i < origN; i++) {
            for (int j = i + 1; j < origN; j++) {

                double diff = Math.abs(origProb[i][j] - compProb[i][j]);
                //VisualNode ni = origSG.getVisualNode(i);
                //VisualNode nj = origSG.getVisualNode(j);
                BMNode ni = i2n[i];
                BMNode nj = i2n[j];
                if (connectivity)
                    connWr.append(ni + "\t" + nj + "\t" + origProb[i][j] + "\t" + compProb[i][j] + "\t" + diff
                            + "\n");
                totalDiff += diff * diff;
            }
        }

        if (connectivity) {
            connWr.append("\n");
            connWr.close();
        }

        totalDiff = Math.sqrt(totalDiff);
        BufferedWriter dissWr = new BufferedWriter(new FileWriter("dissimilarity.txt", true));
        dissWr.append("" + totalDiff + "\n");
        dissWr.close();
    }

    if (cmd.hasOption("output_bmg")) {
        BMGraph outgraph = new BMGraph();

        String outputfile = cmd.getOptionValue("output_bmg");
        HashMap<Integer, BMNode> nodes = new HashMap<Integer, BMNode>();

        for (int i = 0; i < result.partition.size(); i++) {
            ArrayList<Integer> g = result.partition.get(i);
            if (g.size() == 0)
                continue;
            BMNode node = new BMNode("Supernode_" + i);
            HashMap<String, String> attributes = new HashMap<String, String>();
            StringBuffer contents = new StringBuffer();
            for (int x : g)
                contents.append(i2n[x] + ",");
            contents.delete(contents.length() - 1, contents.length());

            attributes.put("nodes", contents.toString());
            attributes.put("self-edge", "" + result.graph.getEdgeWeight(i, i));
            node.setAttributes(attributes);
            nodes.put(i, node);
            outgraph.ensureHasNode(node);
        }

        for (int i = 0; i < result.partition.size(); i++) {
            if (result.partition.get(i).size() == 0)
                continue;
            for (int x : result.graph.getNeighbors(i)) {
                if (x < i)
                    continue;
                BMNode from = nodes.get(i);
                BMNode to = nodes.get(x);
                if (from == null || to == null) {
                    System.out.println(from + "->" + to);
                    System.out.println(i + "->" + x);
                    System.out.println("");
                }
                BMEdge e = new BMEdge(nodes.get(i), nodes.get(x), "notype");

                e.setAttributes(new HashMap<String, String>());
                e.put("goodness", "" + result.graph.getEdgeWeight(i, x));
                outgraph.ensureHasEdge(e);
            }
        }
        BMGraphUtils.writeBMGraph(outgraph, outputfile);
    }

    // k medoids!
    if (kmedoids) {
        //KMedoidsResult clustersOrig=KMedoids.runKMedoids(origProb,kmedoidsK);

        if (ctype == ConnectivityType.LOCAL) {
            compProb = ProbDijkstra.getProbMatrix(result.uncompressedGraph());
        }

        //KMedoidsResult compClusters = KMedoids.runKMedoids(ProbDijkstra.getProbMatrix(result.graph),kmedoidsK);
        KMedoidsResult clustersComp = KMedoids.runKMedoids(compProb, kmedoidsK);

        BufferedWriter bw = new BufferedWriter(new FileWriter(kmedoidsOutput));

        for (int i = 0; i < origN; i++) {
            int g = group[i];
            //bw.append(origSG.getVisualNode(i).getBMNode()+" "+compClusters.clusters[g]+"\n");
            bw.append(i2n[i] + " " + clustersComp.clusters[i] + "\n");
        }
        bw.close();
    }

    System.exit(0);
}

From source file:eu.scape_project.tb.lsdr.hocrparser.HocrParser.java

/**
 * The main entry point.//w w  w.  j  a  v a 2 s.  co m
 */
public static void main(String[] args) throws ParseException {
    Configuration conf = new Configuration();

    //conf.setBoolean("mapreduce.client.genericoptionsparser.used", true);
    GenericOptionsParser gop = new GenericOptionsParser(conf, args);
    HocrParserCliConfig pc = new HocrParserCliConfig();
    CommandLineParser cmdParser = new PosixParser();
    CommandLine cmd = cmdParser.parse(HocrParserOptions.OPTIONS, gop.getRemainingArgs());
    if ((args.length == 0) || (cmd.hasOption(HocrParserOptions.HELP_OPT))) {
        HocrParserOptions.exit("Usage", 0);
    } else {
        HocrParserOptions.initOptions(cmd, pc);
    }
    String dir = pc.getDirStr();

    String name = pc.getHadoopJobName();
    if (name == null || name.equals("")) {
        name = "hocr_parser";
    }

    try {
        Job job = new Job(conf, name);
        job.setJarByClass(HocrParser.class);

        job.setMapperClass(HocrParserMapper.class);
        //job.setCombinerClass(HocrParserReducer.class);
        job.setReducerClass(HocrParserReducer.class);

        job.setInputFormatClass(SequenceFileInputFormat.class);

        job.setOutputFormatClass(TextOutputFormat.class);
        //SequenceFileOutputFormat.setOutputCompressionType(job, SequenceFile.CompressionType.NONE);

        //conf.setMapOutputKeyClass(Text.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(Text.class);

        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);

        SequenceFileInputFormat.addInputPath(job, new Path(dir));
        String outpath = "output/" + System.currentTimeMillis() + "hop";
        FileOutputFormat.setOutputPath(job, new Path(outpath));
        job.waitForCompletion(true);
        System.out.print(outpath);
        System.exit(0);
    } catch (Exception e) {
        logger.error("IOException occurred", e);
    }
}

From source file:it.nibbles.javacoin.BlockTool.java

public static void main(String[] args) throws Exception {
    OptionParser parser = new OptionParser();
    parser.accepts("help");
    parser.accepts("import");
    parser.accepts("export");
    parser.accepts("testnet2");
    parser.accepts("testnet3");
    parser.accepts("prodnet");
    parser.accepts("first").withRequiredArg().ofType(Integer.class);
    parser.accepts("last").withRequiredArg().ofType(Integer.class);
    parser.accepts("hash").withRequiredArg();
    parser.accepts("port").withRequiredArg().ofType(Integer.class);
    optBdbPath = parser.accepts("bdbPath").withRequiredArg().defaultsTo("data");
    //optJdbcDriver = parser.accepts("driver").withRequiredArg().defaultsTo("com.mysql.jdbc.Driver");
    optJdbcUrl = parser.accepts("url").withRequiredArg().defaultsTo("jdbc:mysql://localhost/javacoin_testnet3");
    optJdbcUser = parser.accepts("dbuser").withRequiredArg().defaultsTo("javacoin");
    optJdbcPassword = parser.accepts("dbpass").withRequiredArg().defaultsTo("pw");
    inputfile = parser.accepts("inputfile").withRequiredArg();
    outputfile = parser.accepts("outputfile").withRequiredArg();
    //    String[] args = {
    //      "--inputfile", "blocks-0-100000.txt", "--prodnet", "--load", "--url", "jdbc:mysql://localhost/javacoin_test"
    //    };/*from   w  ww .j  a va 2 s  . com*/
    OptionSet options = parser.parse(args);
    if (args.length == 0 || options.hasArgument("help") || options.nonOptionArguments().size() > 0
            || (options.has("export") && options.has("import"))
            || (options.has("export") && !options.has("outputfile"))
            || (options.has("import") && !options.has("inputfile"))
            || (options.has("testnet2") && options.has("testnet3"))
            || (options.has("testnet2") && options.has("prodnet"))
            || (options.has("testnet3") && options.has("prodnet"))) {
        println(HELP_TEXT);
        return;
    }
    if (options.hasArgument("port")) {
        //listenPort = ((Integer) options.valueOf("port")).intValue();
    }
    cmdExportBlockchain = options.has("export");
    cmdImportBlockchain = options.has("import");
    isProdnet = options.has("prodnet");
    isTestNet2 = options.has("testnet2");
    isTestNet3 = options.has("testnet3");
    if (!isProdnet && !isTestNet2 && !isTestNet3)
        isTestNet3 = true;
    if (options.hasArgument("first")) {
        firstBlock = ((Integer) options.valueOf("first")).intValue();
        if (!options.hasArgument("last"))
            lastBlock = firstBlock;
    }
    if (options.hasArgument("last")) {
        lastBlock = ((Integer) options.valueOf("last")).intValue();
        if (!options.hasArgument("first"))
            firstBlock = lastBlock;
    }
    if (options.hasArgument("hash"))
        blockHash = (String) options.valueOf("hash");
    if (cmdExportBlockchain && blockHash == null && firstBlock == 0 && lastBlock == 0) {
        println("To save blocks you have to specify a range or an hash");
        return;
    }

    //println("save: " + cmdSaveBlockchain + " load: " + cmdLoadBlockchain + " prodnet: " + isProdnet + " testnet2: " + isTestNet2 + " testnet3: " + isTestNet3);
    //println("FirstBlock: " + firstBlock + " lastBlock: " + lastBlock + " inputfile: " + inputfile.value(options) + " outputfile: " + outputfile.value(options));
    BlockTool app = new BlockTool();
    app.init(options);
    if (cmdImportBlockchain) {
        //System.out.println("Press return to start import blocks to blockchain");
        //System.in.read();
        BufferedReader reader;
        if ("-".equals(inputfile.value(options)))
            reader = new BufferedReader(new InputStreamReader(System.in));
        else
            reader = new BufferedReader(new FileReader(inputfile.value(options)));
        int numBlocks = 0;
        Block block = app.readBlock(reader, false);
        while (block != null) {
            numBlocks++;
            long startTime = System.currentTimeMillis();
            blockChain.addBlock(block);
            long insertTime = System.currentTimeMillis() - startTime;
            System.out.printf(
                    "%6d Block " + BtcUtil.hexOut(block.getHash()) + " #txs: %4d insertTime(ms): %d%n",
                    block.getTransactions().size(), insertTime);
            block = app.readBlock(reader, false);
        }
        System.out.println("Numero blocchi letti: " + numBlocks);
    } else if (cmdExportBlockchain) {
        BlockChainLink blockLink;
        try (PrintWriter writer = new PrintWriter(new File(outputfile.value(options)))) {
            if (blockHash != null) {
                blockLink = storage.getLink(BtcUtil.hexIn(blockHash));
                app.writeBlock(writer, blockLink.getBlock());
            } else {
                for (int i = firstBlock; i <= lastBlock; i++) {
                    blockLink = storage.getLinkAtHeight(i);
                    app.writeBlock(writer, blockLink.getBlock());
                }
            }
        }
    }
    app.close();
}

From source file:net.sf.xmm.moviemanager.MovieManager.java

public static void main(String args[]) {

    boolean sandbox = SysUtil.isRestrictedSandbox();

    // Uses this to check if the app is running in a sandbox with limited privileges
    try {//  w  w w . j  a  va2s.c o m
        /* Disable HTTPClient logging output */
        System.setProperty("org.apache.commons.logging.Log", "org.apache.commons.logging.impl.SimpleLog"); //$NON-NLS-1$ //$NON-NLS-2$

    } catch (java.security.AccessControlException s) {
        s.printStackTrace();
        sandbox = true;
    }

    if (!sandbox) {
        // Disables logging for cobra html renderer
        java.util.logging.Logger.getLogger("").setLevel(java.util.logging.Level.OFF);

        File log4jConfigFile = FileUtil.getFile("config/log4j.properties"); //$NON-NLS-1$

        if (log4jConfigFile.isFile()) {
            PropertyConfigurator.configure(log4jConfigFile.getAbsolutePath());
        } else {
            BasicConfigurator.configure();
        }
    } else
        BasicConfigurator.configure();

    log = Logger.getRootLogger();

    // Places the Log file in the user directory (the program location)
    RollingFileAppender appndr = (RollingFileAppender) log.getAppender("FileAppender");

    String logFile = null;

    try {
        if (SysUtil.isMac() || SysUtil.isWindowsVista() || SysUtil.isWindows7())
            logFile = new File(SysUtil.getConfigDir(), "Log.txt").getAbsolutePath();
    } catch (Exception e1) {
        e1.printStackTrace();
    } finally {

        if (logFile == null)
            logFile = new File(SysUtil.getUserDir(), "Log.txt").getAbsolutePath();
    }

    if (appndr != null && appndr.getFile() == null) {
        appndr.setFile(logFile);
        appndr.activateOptions();
    }

    /* Writes the date. */
    log.debug("================================================================================"); //$NON-NLS-1$
    log.debug("Log Start: " + new Date(System.currentTimeMillis())); //$NON-NLS-1$
    log.debug("MeD's Movie Manager v" + config.sysSettings.getVersion()); //$NON-NLS-1$
    log.debug("MovieManager release:" + MovieManager.getConfig().sysSettings.getRelease() + " - "
            + "IMDb Lib release:" + IMDbLib.getRelease() + " (" + IMDbLib.getVersion() + ")");
    log.debug(SysUtil.getSystemInfo(SysUtil.getLineSeparator())); //$NON-NLS-1$

    /* Loads the config */
    if (!sandbox)
        config.loadConfig();

    // Calls the plugin startup method 
    MovieManagerStartupHandler startupHandler = MovieManager.getConfig().getStartupHandler();

    if (startupHandler != null) {
        startupHandler.startUp();
    }

    if (!sandbox) {

        if (SysUtil.isAtLeastJRE6()) {
            SysUtil.includeJarFilesInClasspath("lib/LookAndFeels/1.6");
        }

        SysUtil.includeJarFilesInClasspath("lib/LookAndFeels");
        SysUtil.includeJarFilesInClasspath("lib/drivers");

        /* Must be called before the GUI is created */
        if (SysUtil.isMac()) {
            SysUtil.includeJarFilesInClasspath("lib/mac");
            lookAndFeelManager.setupOSXLaF();
        }
    }

    movieManager = new MovieManager();
    movieManager.sandbox = sandbox;

    //       Loads the HTML templates
    templateHandler.loadHTMLTemplates();

    EventQueue.invokeLater(new Runnable() {
        public final void run() {

            try {

                /* Installs the Look&Feels */
                lookAndFeelManager.instalLAFs();

                if (!MovieManager.isApplet())
                    lookAndFeelManager.setLookAndFeel();

                log.debug("Look & Feels installed.");

                log.debug("Creating MovieManager Dialog");
                movieManager.createDialog();

                /* Starts the MovieManager. */
                MovieManager.getDialog().setUp();
                log.debug("MovieManager Dialog - setup.");

                MovieManager.getDialog().showDialog();

                /* SetUp the Application Menu for OSX */
                if (SysUtil.isMac()) {
                    LookAndFeelManager.macOSXRegistration(MovieManager.getDialog());
                }

                // Calls the plugin startup method 
                MovieManagerLoginHandler loginHandler = MovieManager.getConfig().getLoginHandler();

                if (loginHandler != null) {
                    loginHandler.loginStartUp();
                }

                log.debug("Loading Database....");

                /* Loads the database. */
                databaseHandler.loadDatabase(true);

                log.debug("Database loaded.");

                AppUpdater.handleVersionUpdate();

            } catch (Exception e) {
                log.error("Exception occured while intializing MeD's Movie Manager", e);
            }
        }
    });
}