Example usage for java.util ArrayList addAll

List of usage examples for java.util ArrayList addAll

Introduction

In this page you can find the example usage for java.util ArrayList addAll.

Prototype

public boolean addAll(Collection<? extends E> c) 

Source Link

Document

Appends all of the elements in the specified collection to the end of this list, in the order that they are returned by the specified collection's Iterator.

Usage

From source file:deck36.storm.plan9.nodejs.ExtendedKittenRobbersTopology.java

public static void main(String[] args) throws Exception {

    String env = null;/*from  w ww.  j a va 2  s .  c  o  m*/

    if (args != null && args.length > 0) {
        env = args[0];
    }

    if (!"dev".equals(env))
        if (!"prod".equals(env)) {
            System.out.println("Usage: $0 (dev|prod)\n");
            System.exit(1);
        }

    // Topology config
    Config conf = new Config();

    // Load parameters and add them to the Config
    Map configMap = YamlLoader.loadYamlFromResource("storm_" + env + ".yml");

    conf.putAll(configMap);

    log.info(JSONValue.toJSONString((conf)));

    // Set topology loglevel to DEBUG
    conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug"));

    // Create Topology builder
    TopologyBuilder builder = new TopologyBuilder();

    // if there are not special reasons, start with parallelism hint of 1
    // and multiple tasks. By that, you can scale dynamically later on.
    int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint");
    int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks");

    String badgeName = ExtendedKittenRobbersTopology.class.getSimpleName();

    // Create Stream from RabbitMQ messages
    // bind new queue with name of the topology
    // to the main plan9 exchange (from properties config)
    // consuming only CBT-related events by using the rounting key 'cbt.#'

    String rabbitQueueName = badgeName; // use topology class name as name for the queue
    String rabbitExchangeName = JsonPath.read(conf,
            "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.exchange");
    String rabbitRoutingKey = JsonPath.read(conf,
            "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.routing_key");

    // Get JSON deserialization scheme
    Scheme rabbitScheme = new SimpleJSONScheme();

    // Setup a Declarator to configure exchange/queue/routing key
    RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName,
            rabbitRoutingKey);

    // Create Configuration for the Spout
    ConnectionConfig connectionConfig = new ConnectionConfig(
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat"));

    ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName)
            .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail()
            .build();

    // add global parameters to topology config - the RabbitMQSpout will read them from there
    conf.putAll(spoutConfig.asMap());

    // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch
    // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md
    if ("prod".equals(env)) {
        conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING,
                (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch"));
    }

    // Add RabbitMQ spout to topology
    builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint)
            .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks"));

    // construct command to invoke the external bolt implementation
    ArrayList<String> command = new ArrayList(15);

    // Add main execution program (php, hhvm, zend, ..) and parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.nodejs.executor"));

    // Add main route to be invoked and its parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.main"));
    List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.params");
    if (boltParams != null)
        command.addAll(boltParams);

    // Log the final command
    log.info("Command to start bolt for Extended Kitten Robbers From Outer Space: "
            + Arrays.toString(command.toArray()));

    // CODE1
    /* We need to use the tick tuple adapter instead of the general adapter:
            
    // Add constructed external bolt command to topology using MultilangAdapterTickTupleBolt
    builder.setBolt("badge",
        new MultilangAdapterTickTupleBolt(
                command,
                (Integer) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.robber_frequency"),
                "badge"
        ),
        parallelism_hint)
        .setNumTasks(num_tasks)
        .shuffleGrouping("incoming");
            
    */

    builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt(
            (String) JsonPath.read(conf, "$.deck36_storm.ExtendedKittenRobbersBolt.rabbitmq.target_exchange"),
            "KittenRobbers" // RabbitMQ routing key
    ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge");

    builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks)
            .shuffleGrouping("rabbitmq_router");

    if ("dev".equals(env)) {
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology());
        Thread.sleep(2000000);
    }

    if ("prod".equals(env)) {
        StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf,
                builder.createTopology());
    }

}

From source file:org.n52.oss.testdata.sml.GeneratorClient.java

/**
 * @param args//from   ww  w.  j a  v a  2  s.  c  o m
 */
public static void main(String[] args) {
    if (sending)
        log.info("Starting insertion of test sensors into " + sirURL);
    else
        log.warn("Starting generation of test sensors, NOT sending!");

    TestSensorFactory factory = new TestSensorFactory();

    // create sensors
    List<TestSensor> sensors = new ArrayList<TestSensor>();

    for (int i = 0; i < nSensorsToGenerate; i++) {
        TestSensor s = factory.createRandomTestSensor();
        sensors.add(s);
        log.info("Added new random sensor: " + s);
    }

    ArrayList<String> insertedSirIds = new ArrayList<String>();

    // insert sensors to service
    int startOfSubList = 0;
    int endOfSubList = nSensorsInOneInsertRequest;
    while (endOfSubList <= sensors.size() + nSensorsInOneInsertRequest) {
        List<TestSensor> currentSensors = sensors.subList(startOfSubList,
                Math.min(endOfSubList, sensors.size()));

        if (currentSensors.isEmpty())
            break;

        try {
            String[] insertedSirIDs = insertSensorsInSIR(currentSensors);
            if (insertedSirIDs == null) {
                log.error("Did not insert dummy sensors.");
            } else {
                insertedSirIds.addAll(Arrays.asList(insertedSirIDs));
            }
        } catch (HttpException e) {
            log.error("Error inserting sensors.", e);
        } catch (IOException e) {
            log.error("Error inserting sensors.", e);
        }

        startOfSubList = Math.min(endOfSubList, sensors.size());
        endOfSubList = endOfSubList + nSensorsInOneInsertRequest;

        if (sending) {
            try {
                if (log.isDebugEnabled())
                    log.debug("Sleeping for " + SLEEP_BETWEEN_REQUESTS + " msecs.");
                Thread.sleep(SLEEP_BETWEEN_REQUESTS);
            } catch (InterruptedException e) {
                log.error("Interrupted!", e);
            }
        }
    }

    log.info("Added sensors (ids in sir): " + Arrays.toString(insertedSirIds.toArray()));
}

From source file:edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques.java

/**
 * @param args//from   w  w w  . java 2s. com
 * @throws ParseException 
 */
@SuppressWarnings({ "deprecation" })
public static void main(String[] args) throws IOException, InterruptedException, ClassNotFoundException {

    Options options = new Options();

    Option forceOption = new Option("f", "force", false,
            "force the computation of the relationship " + "even if files already exist");
    forceOption.setRequired(false);
    options.addOption(forceOption);

    Option g1Option = new Option("g1", "first-group", true, "set first group of datasets");
    g1Option.setRequired(true);
    g1Option.setArgName("FIRST GROUP");
    g1Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g1Option);

    Option g2Option = new Option("g2", "second-group", true, "set second group of datasets");
    g2Option.setRequired(false);
    g2Option.setArgName("SECOND GROUP");
    g2Option.setArgs(Option.UNLIMITED_VALUES);
    options.addOption(g2Option);

    Option machineOption = new Option("m", "machine", true, "machine identifier");
    machineOption.setRequired(true);
    machineOption.setArgName("MACHINE");
    machineOption.setArgs(1);
    options.addOption(machineOption);

    Option nodesOption = new Option("n", "nodes", true, "number of nodes");
    nodesOption.setRequired(true);
    nodesOption.setArgName("NODES");
    nodesOption.setArgs(1);
    options.addOption(nodesOption);

    Option s3Option = new Option("s3", "s3", false, "data on Amazon S3");
    s3Option.setRequired(false);
    options.addOption(s3Option);

    Option awsAccessKeyIdOption = new Option("aws_id", "aws-id", true,
            "aws access key id; " + "this is required if the execution is on aws");
    awsAccessKeyIdOption.setRequired(false);
    awsAccessKeyIdOption.setArgName("AWS-ACCESS-KEY-ID");
    awsAccessKeyIdOption.setArgs(1);
    options.addOption(awsAccessKeyIdOption);

    Option awsSecretAccessKeyOption = new Option("aws_key", "aws-id", true,
            "aws secrect access key; " + "this is required if the execution is on aws");
    awsSecretAccessKeyOption.setRequired(false);
    awsSecretAccessKeyOption.setArgName("AWS-SECRET-ACCESS-KEY");
    awsSecretAccessKeyOption.setArgs(1);
    options.addOption(awsSecretAccessKeyOption);

    Option bucketOption = new Option("b", "s3-bucket", true,
            "bucket on s3; " + "this is required if the execution is on aws");
    bucketOption.setRequired(false);
    bucketOption.setArgName("S3-BUCKET");
    bucketOption.setArgs(1);
    options.addOption(bucketOption);

    Option helpOption = new Option("h", "help", false, "display this message");
    helpOption.setRequired(false);
    options.addOption(helpOption);

    HelpFormatter formatter = new HelpFormatter();
    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;

    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    if (cmd.hasOption("h")) {
        formatter.printHelp(
                "hadoop jar data-polygamy.jar "
                        + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                options, true);
        System.exit(0);
    }

    boolean s3 = cmd.hasOption("s3");
    String s3bucket = "";
    String awsAccessKeyId = "";
    String awsSecretAccessKey = "";

    if (s3) {
        if ((!cmd.hasOption("aws_id")) || (!cmd.hasOption("aws_key")) || (!cmd.hasOption("b"))) {
            System.out.println(
                    "Arguments 'aws_id', 'aws_key', and 'b'" + " are mandatory if execution is on AWS.");
            formatter.printHelp(
                    "hadoop jar data-polygamy.jar "
                            + "edu.nyu.vida.data_polygamy.standard_techniques.CorrelationTechniques",
                    options, true);
            System.exit(0);
        }
        s3bucket = cmd.getOptionValue("b");
        awsAccessKeyId = cmd.getOptionValue("aws_id");
        awsSecretAccessKey = cmd.getOptionValue("aws_key");
    }

    boolean snappyCompression = false;
    boolean bzip2Compression = false;
    String machine = cmd.getOptionValue("m");
    int nbNodes = Integer.parseInt(cmd.getOptionValue("n"));

    Configuration s3conf = new Configuration();
    if (s3) {
        s3conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        s3conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        s3conf.set("bucket", s3bucket);
    }

    Path path = null;
    FileSystem fs = FileSystem.get(new Configuration());

    ArrayList<String> shortDataset = new ArrayList<String>();
    ArrayList<String> firstGroup = new ArrayList<String>();
    ArrayList<String> secondGroup = new ArrayList<String>();
    HashMap<String, String> datasetAgg = new HashMap<String, String>();

    boolean removeExistingFiles = cmd.hasOption("f");

    String[] firstGroupCmd = cmd.getOptionValues("g1");
    String[] secondGroupCmd = cmd.hasOption("g2") ? cmd.getOptionValues("g2") : new String[0];
    addDatasets(firstGroupCmd, firstGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);
    addDatasets(secondGroupCmd, secondGroup, shortDataset, datasetAgg, path, fs, s3conf, s3, s3bucket);

    if (shortDataset.size() == 0) {
        System.out.println("No datasets to process.");
        System.exit(0);
    }

    if (firstGroup.isEmpty()) {
        System.out.println("First group of datasets (G1) is empty. " + "Doing G1 = G2.");
        firstGroup.addAll(secondGroup);
    }

    if (secondGroup.isEmpty()) {
        System.out.println("Second group of datasets (G2) is empty. " + "Doing G2 = G1.");
        secondGroup.addAll(firstGroup);
    }

    // getting dataset ids

    String datasetNames = "";
    String datasetIds = "";
    HashMap<String, String> datasetId = new HashMap<String, String>();
    Iterator<String> it = shortDataset.iterator();
    while (it.hasNext()) {
        datasetId.put(it.next(), null);
    }

    if (s3) {
        path = new Path(s3bucket + FrameworkUtils.datasetsIndexDir);
        fs = FileSystem.get(path.toUri(), s3conf);
    } else {
        path = new Path(fs.getHomeDirectory() + "/" + FrameworkUtils.datasetsIndexDir);
    }
    BufferedReader br = new BufferedReader(new InputStreamReader(fs.open(path)));
    String line = br.readLine();
    while (line != null) {
        String[] dt = line.split("\t");
        if (datasetId.containsKey(dt[0])) {
            datasetId.put(dt[0], dt[1]);
            datasetNames += dt[0] + ",";
            datasetIds += dt[1] + ",";
        }
        line = br.readLine();
    }
    br.close();
    if (s3)
        fs.close();

    datasetNames = datasetNames.substring(0, datasetNames.length() - 1);
    datasetIds = datasetIds.substring(0, datasetIds.length() - 1);
    it = shortDataset.iterator();
    while (it.hasNext()) {
        String dataset = it.next();
        if (datasetId.get(dataset) == null) {
            System.out.println("No dataset id for " + dataset);
            System.exit(0);
        }
    }

    String firstGroupStr = "";
    String secondGroupStr = "";
    for (String dataset : firstGroup) {
        firstGroupStr += datasetId.get(dataset) + ",";
    }
    for (String dataset : secondGroup) {
        secondGroupStr += datasetId.get(dataset) + ",";
    }
    firstGroupStr = firstGroupStr.substring(0, firstGroupStr.length() - 1);
    secondGroupStr = secondGroupStr.substring(0, secondGroupStr.length() - 1);

    FrameworkUtils.createDir(s3bucket + FrameworkUtils.correlationTechniquesDir, s3conf, s3);

    String dataAttributesInputDirs = "";
    String noRelationship = "";

    HashSet<String> dirs = new HashSet<String>();

    String dataset1;
    String dataset2;
    String datasetId1;
    String datasetId2;
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            datasetId1 = datasetId.get(dataset1);
            datasetId2 = datasetId.get(dataset2);

            if (dataset1.equals(dataset2))
                continue;
            String correlationOutputFileName = s3bucket + FrameworkUtils.correlationTechniquesDir + "/"
                    + dataset1 + "-" + dataset2 + "/";

            if (removeExistingFiles) {
                FrameworkUtils.removeFile(correlationOutputFileName, s3conf, s3);
            }
            if (!FrameworkUtils.fileExists(correlationOutputFileName, s3conf, s3)) {
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset1);
                dirs.add(s3bucket + FrameworkUtils.aggregatesDir + "/" + dataset2);
            } else {
                noRelationship += datasetId1 + "-" + datasetId2 + ",";
            }
        }
    }

    if (dirs.isEmpty()) {
        System.out.println("All the relationships were already computed.");
        System.out.println("Use -f in the beginning of the command line to force the computation.");
        System.exit(0);
    }

    for (String dir : dirs) {
        dataAttributesInputDirs += dir + ",";
    }

    Configuration conf = new Configuration();
    Machine machineConf = new Machine(machine, nbNodes);

    String jobName = "correlation";
    String correlationOutputDir = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/";

    FrameworkUtils.removeFile(correlationOutputDir, s3conf, s3);

    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg", datasetAgg.get(shortDataset.get(i)));
    }
    for (int i = 0; i < shortDataset.size(); i++) {
        conf.set("dataset-" + datasetId.get(shortDataset.get(i)) + "-agg-size",
                Integer.toString(datasetAgg.get(shortDataset.get(i)).split(",").length));
    }
    conf.set("dataset-keys", datasetIds);
    conf.set("dataset-names", datasetNames);
    conf.set("first-group", firstGroupStr);
    conf.set("second-group", secondGroupStr);
    conf.set("main-dataset-id", datasetId.get(shortDataset.get(0)));
    if (noRelationship.length() > 0) {
        conf.set("no-relationship", noRelationship.substring(0, noRelationship.length() - 1));
    }

    conf.set("mapreduce.tasktracker.map.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.tasktracker.reduce.tasks.maximum", String.valueOf(machineConf.getMaximumTasks()));
    conf.set("mapreduce.jobtracker.maxtasks.perjob", "-1");
    conf.set("mapreduce.reduce.shuffle.parallelcopies", "20");
    conf.set("mapreduce.input.fileinputformat.split.minsize", "0");
    conf.set("mapreduce.task.io.sort.mb", "200");
    conf.set("mapreduce.task.io.sort.factor", "100");
    conf.set("mapreduce.task.timeout", "2400000");

    if (s3) {
        machineConf.setMachineConfiguration(conf);
        conf.set("fs.s3.awsAccessKeyId", awsAccessKeyId);
        conf.set("fs.s3.awsSecretAccessKey", awsSecretAccessKey);
        conf.set("bucket", s3bucket);
    }

    if (snappyCompression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.SnappyCodec");
    }
    if (bzip2Compression) {
        conf.set("mapreduce.map.output.compress", "true");
        conf.set("mapreduce.map.output.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
        //conf.set("mapreduce.output.fileoutputformat.compress.codec", "org.apache.hadoop.io.compress.BZip2Codec");
    }

    Job job = new Job(conf);
    job.setJobName(jobName);

    job.setMapOutputKeyClass(PairAttributeWritable.class);
    job.setMapOutputValueClass(SpatioTemporalValueWritable.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);

    job.setMapperClass(CorrelationTechniquesMapper.class);
    job.setReducerClass(CorrelationTechniquesReducer.class);
    job.setNumReduceTasks(machineConf.getNumberReduces());

    job.setInputFormatClass(SequenceFileInputFormat.class);
    LazyOutputFormat.setOutputFormatClass(job, TextOutputFormat.class);

    FileInputFormat.setInputDirRecursive(job, true);
    FileInputFormat.setInputPaths(job,
            dataAttributesInputDirs.substring(0, dataAttributesInputDirs.length() - 1));
    FileOutputFormat.setOutputPath(job, new Path(correlationOutputDir));

    job.setJarByClass(CorrelationTechniques.class);

    long start = System.currentTimeMillis();
    job.submit();
    job.waitForCompletion(true);
    System.out.println(jobName + "\t" + (System.currentTimeMillis() - start));

    // moving files to right place
    for (int i = 0; i < firstGroup.size(); i++) {
        for (int j = 0; j < secondGroup.size(); j++) {

            if (Integer.parseInt(datasetId.get(firstGroup.get(i))) < Integer
                    .parseInt(datasetId.get(secondGroup.get(j)))) {
                dataset1 = firstGroup.get(i);
                dataset2 = secondGroup.get(j);
            } else {
                dataset1 = secondGroup.get(j);
                dataset2 = firstGroup.get(i);
            }

            if (dataset1.equals(dataset2))
                continue;

            String from = s3bucket + FrameworkUtils.correlationTechniquesDir + "/tmp/" + dataset1 + "-"
                    + dataset2 + "/";
            String to = s3bucket + FrameworkUtils.correlationTechniquesDir + "/" + dataset1 + "-" + dataset2
                    + "/";
            FrameworkUtils.renameFile(from, to, s3conf, s3);
        }
    }
}

From source file:de.prozesskraft.ptest.Fingerprint.java

public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException {

    //      try/*from  w  w w . j ava2  s .  c  om*/
    //      {
    //         if (args.length != 3)
    //         {
    //            System.out.println("Please specify processdefinition file (xml) and an outputfilename");
    //         }
    //         
    //      }
    //      catch (ArrayIndexOutOfBoundsException e)
    //      {
    //         System.out.println("***ArrayIndexOutOfBoundsException: Please specify processdefinition.xml, openoffice_template.od*, newfile_for_processdefinitions.odt\n" + e.toString());
    //      }

    /*----------------------------
      get options from ini-file
    ----------------------------*/
    File inifile = new java.io.File(
            WhereAmI.getInstallDirectoryAbsolutePath(Fingerprint.class) + "/" + "../etc/ptest-fingerprint.ini");

    if (inifile.exists()) {
        try {
            ini = new Ini(inifile);
        } catch (InvalidFileFormatException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        System.err.println("ini file does not exist: " + inifile.getAbsolutePath());
        System.exit(1);
    }

    /*----------------------------
      create boolean options
    ----------------------------*/
    Option ohelp = new Option("help", "print this message");
    Option ov = new Option("v", "prints version and build-date");

    /*----------------------------
      create argument options
    ----------------------------*/
    Option opath = OptionBuilder.withArgName("PATH").hasArg()
            .withDescription(
                    "[mandatory; default: .] the root path for the tree you want to make a fingerprint from.")
            //            .isRequired()
            .create("path");

    Option osizetol = OptionBuilder.withArgName("FLOAT").hasArg().withDescription(
            "[optional; default: 0.02] the sizeTolerance (as factor in percent) of all file entries will be set to this value. [0.0 < sizetol < 1.0]")
            //            .isRequired()
            .create("sizetol");

    Option omd5 = OptionBuilder.withArgName("no|yes").hasArg()
            .withDescription("[optional; default: yes] should be the md5sum of files determined? no|yes")
            //            .isRequired()
            .create("md5");

    Option oignore = OptionBuilder.withArgName("STRING").hasArgs()
            .withDescription("[optional] path-pattern that should be ignored when creating the fingerprint")
            //            .isRequired()
            .create("ignore");

    Option oignorefile = OptionBuilder.withArgName("FILE").hasArg().withDescription(
            "[optional] file with path-patterns (one per line) that should be ignored when creating the fingerprint")
            //            .isRequired()
            .create("ignorefile");

    Option ooutput = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory; default: <path>/fingerprint.xml] fingerprint file")
            //            .isRequired()
            .create("output");

    Option of = new Option("f", "[optional] force overwrite fingerprint file if it already exists");

    /*----------------------------
      create options object
    ----------------------------*/
    Options options = new Options();

    options.addOption(ohelp);
    options.addOption(ov);
    options.addOption(opath);
    options.addOption(osizetol);
    options.addOption(omd5);
    options.addOption(oignore);
    options.addOption(oignorefile);
    options.addOption(ooutput);
    options.addOption(of);

    /*----------------------------
      create the parser
    ----------------------------*/
    CommandLineParser parser = new GnuParser();
    try {
        // parse the command line arguments
        commandline = parser.parse(options, args);

    } catch (Exception exp) {
        // oops, something went wrong
        System.err.println("Parsing failed. Reason: " + exp.getMessage());
        exiter();
    }

    /*----------------------------
      usage/help
    ----------------------------*/
    if (commandline.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("fingerprint", options);
        System.exit(0);
    }

    else if (commandline.hasOption("v")) {
        System.out.println("web:     " + web);
        System.out.println("author: " + author);
        System.out.println("version:" + version);
        System.out.println("date:     " + date);
        System.exit(0);
    }

    /*----------------------------
      ueberpruefen ob eine schlechte kombination von parametern angegeben wurde
    ----------------------------*/
    String path = "";
    String sizetol = "";
    boolean md5 = false;
    Float sizetolFloat = null;
    String output = "";
    java.io.File ignorefile = null;
    ArrayList<String> ignore = new ArrayList<String>();

    if (!(commandline.hasOption("path"))) {
        System.err.println("setting default for -path=.");
        path = ".";
    } else {
        path = commandline.getOptionValue("path");
    }

    if (!(commandline.hasOption("sizetol"))) {
        System.err.println("setting default for -sizetol=0.02");
        sizetol = "0.02";
        sizetolFloat = 0.02F;
    } else {
        sizetol = commandline.getOptionValue("sizetol");
        sizetolFloat = Float.parseFloat(sizetol);

        if ((sizetolFloat > 1) || (sizetolFloat < 0)) {
            System.err.println("use only values >=0.0 and <1.0 for -sizetol");
            System.exit(1);
        }
    }

    if (!(commandline.hasOption("md5"))) {
        System.err.println("setting default for -md5=yes");
        md5 = true;
    } else if (commandline.getOptionValue("md5").equals("no")) {
        md5 = false;
    } else if (commandline.getOptionValue("md5").equals("yes")) {
        md5 = true;
    } else {
        System.err.println("use only values no|yes for -md5");
        System.exit(1);
    }

    if (commandline.hasOption("ignore")) {
        ignore.addAll(Arrays.asList(commandline.getOptionValues("ignore")));
    }

    if (commandline.hasOption("ignorefile")) {
        ignorefile = new java.io.File(commandline.getOptionValue("ignorefile"));
        if (!ignorefile.exists()) {
            System.err.println("warn: ignore file does not exist: " + ignorefile.getCanonicalPath());
        }
    }

    if (!(commandline.hasOption("output"))) {
        System.err.println("setting default for -output=" + path + "/fingerprint.xml");
        output = path + "/fingerprint.xml";
    } else {
        output = commandline.getOptionValue("output");
    }

    // wenn output bereits existiert -> abbruch
    java.io.File outputFile = new File(output);
    if (outputFile.exists()) {
        if (commandline.hasOption("f")) {
            outputFile.delete();
        } else {
            System.err
                    .println("error: output file (" + output + ") already exists. use -f to force overwrite.");
            System.exit(1);
        }
    }

    //      if ( !( commandline.hasOption("output")) )
    //      {
    //         System.err.println("option -output is mandatory.");
    //         exiter();
    //      }

    /*----------------------------
      die lizenz ueberpruefen und ggf abbrechen
    ----------------------------*/

    // check for valid license
    ArrayList<String> allPortAtHost = new ArrayList<String>();
    allPortAtHost.add(ini.get("license-server", "license-server-1"));
    allPortAtHost.add(ini.get("license-server", "license-server-2"));
    allPortAtHost.add(ini.get("license-server", "license-server-3"));

    MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1");

    // lizenz-logging ausgeben
    for (String actLine : (ArrayList<String>) lic.getLog()) {
        System.err.println(actLine);
    }

    // abbruch, wenn lizenz nicht valide
    if (!lic.isValid()) {
        System.exit(1);
    }

    /*----------------------------
      die eigentliche business logic
    ----------------------------*/
    Dir dir = new Dir();
    dir.setBasepath(path);
    dir.setOutfilexml(output);

    // ignore file in ein Array lesen
    if ((ignorefile != null) && (ignorefile.exists())) {
        Scanner sc = new Scanner(ignorefile);
        while (sc.hasNextLine()) {
            ignore.add(sc.nextLine());
        }
        sc.close();
    }

    //      // autoignore hinzufuegen
    //      String autoIgnoreString = ini.get("autoignore", "autoignore");
    //      ignoreLines.addAll(Arrays.asList(autoIgnoreString.split(",")));

    //      // debug
    //      System.out.println("ignorefile content:");
    //      for(String actLine : ignore)
    //      {
    //         System.out.println("line: "+actLine);
    //      }

    try {
        dir.genFingerprint(sizetolFloat, md5, ignore);
    } catch (NullPointerException e) {
        System.err.println("file/dir does not exist " + path);
        e.printStackTrace();
        exiter();
    } catch (IOException e) {
        e.printStackTrace();
        exiter();
    }

    System.out.println("writing to file: " + dir.getOutfilexml());
    dir.writeXml();

}

From source file:com.zimbra.cs.util.ProxyPurgeUtil.java

public static void main(String[] args) throws ServiceException {
    CommandLine commandLine;//from   www  .j a v a 2s .c  o  m
    ArrayList<String> servers;
    ArrayList<String> accounts;
    String outputformat;
    boolean purge = false;
    Provisioning prov;
    List<Server> memcachedServers;
    String logLevel = "ERROR";

    /* Parse the command-line arguments, and display usage if necessary */
    try {
        commandLine = parseCommandLine(args);
    } catch (ParseException pe) {
        commandLine = null;
    }

    if ((commandLine == null) || commandLine.hasOption("h") || commandLine.hasOption("u")) {
        usage();
        System.exit(1);
    }

    if (commandLine.hasOption("v")) {
        logLevel = "DEBUG";
    }
    ZimbraLog.toolSetupLog4j(logLevel, null, false);

    /* Initialize the logging system and the zimbra environment */
    prov = Provisioning.getInstance();

    /* Get the list of servers running the memcached service
       this is equivalent to the $(zmprov gamcs) command
     */
    memcachedServers = prov.getAllServers(Provisioning.SERVICE_MEMCACHED);
    servers = new ArrayList<String>();

    for (Iterator<Server> it = memcachedServers.iterator(); it.hasNext();) {
        Server s = it.next();
        String serverName = s.getAttr(Provisioning.A_zimbraServiceHostname, "localhost");
        String servicePort = s.getAttr(Provisioning.A_zimbraMemcachedBindPort, memcachedPort);
        servers.add(serverName + ":" + servicePort);
    }

    accounts = getAccounts(commandLine);
    servers.addAll(getCacheServers(commandLine));

    if (servers.size() == 0) {
        System.err.println("No memcached servers found, and none specified (--help for help)");
        System.exit(1);
    }

    if (accounts.size() == 0) {
        System.err.println("No accounts specified (--help for help)");
        System.exit(1);
    }

    /* Assume purge unless `-i' is specified */
    /* -i (info) indicates that account route info should be printed
    -p (purge) indicates that account route info should be purged
    */
    purge = (commandLine.hasOption("i") == false);

    /* parse the format string */
    if (commandLine.hasOption("o")) {
        outputformat = commandLine.getOptionValue("o");
    } else {
        outputformat = "[%1$s] %2$s -- %3$s";
    }

    purgeAccounts(servers, accounts, purge, outputformat);
}

From source file:Main.java

public static <T> List<T> union(Collection<T> a, Collection<T> b) {
    ArrayList result = new ArrayList(a);
    result.addAll(b);
    return result;
}

From source file:Main.java

public static <T> ArrayList<T> constructArrayList(T... elements) {
    ArrayList<T> list = new ArrayList<T>();
    list.addAll(Arrays.asList(elements));
    return list;//from ww w .  ja  va2  s. com
}

From source file:Main.java

@Deprecated
// use com.google.common.collect.Lists.newArrayList
public static <T> ArrayList<T> newArrayList(Collection<T> xs) {
    ArrayList<T> r = newArrayList();
    r.addAll(xs);
    return r;//from  ww  w  .ja  va2 s.c om
}

From source file:Main.java

public static <T> ArrayList<T> concat(Collection<T> arr1, Collection<T> arr2) {
    ArrayList<T> res = new ArrayList<>(arr1);
    res.addAll(arr2);
    return res;/*from   w ww  . java2s  .c o  m*/
}

From source file:Main.java

public static <T> ArrayList<T> subtract(Collection<T> l1, Collection<T> l2) {
    ArrayList<T> al = new ArrayList<T>();
    al.addAll(l1);
    for (T item : l2) {
        al.remove(item);//w  w  w  .  j a  v a 2s.  c om
    }
    return al;
}