Example usage for java.util ArrayList add

List of usage examples for java.util ArrayList add

Introduction

In this page you can find the example usage for java.util ArrayList add.

Prototype

public boolean add(E e) 

Source Link

Document

Appends the specified element to the end of this list.

Usage

From source file:com.aestel.chemistry.openEye.fp.DistMatrix.java

public static void main(String... args) throws IOException {
    long start = System.currentTimeMillis();

    // create command line Options object
    Options options = new Options();
    Option opt = new Option("i", true, "input file [.tsv from FingerPrinter]");
    opt.setRequired(true);/* www. j  a  va2  s  .co m*/
    options.addOption(opt);

    opt = new Option("o", true, "outpur file [.tsv ");
    opt.setRequired(true);
    options.addOption(opt);

    CommandLineParser parser = new PosixParser();
    CommandLine cmd = null;
    try {
        cmd = parser.parse(options, args);
    } catch (Exception e) {
        System.err.println(e.getMessage());
        exitWithHelp(options);
    }
    args = cmd.getArgs();

    if (args.length != 0)
        exitWithHelp(options);

    String file = cmd.getOptionValue("i");
    BufferedReader in = new BufferedReader(new FileReader(file));

    file = cmd.getOptionValue("o");
    PrintWriter out = new PrintWriter(new BufferedWriter(new FileWriter(file)));

    ArrayList<Fingerprint> fps = new ArrayList<Fingerprint>();
    ArrayList<String> ids = new ArrayList<String>();
    String line;
    while ((line = in.readLine()) != null) {
        String[] parts = line.split("\t");
        if (parts.length == 3) {
            ids.add(parts[0]);
            fps.add(new ByteFingerprint(parts[2]));
        }
    }
    in.close();

    out.print("ID");
    for (int i = 0; i < ids.size(); i++) {
        out.print('\t');
        out.print(ids.get(i));
    }
    out.println();

    for (int i = 0; i < ids.size(); i++) {
        out.print(ids.get(i));
        Fingerprint fp1 = fps.get(i);

        for (int j = 0; j <= i; j++) {
            out.printf("\t%.4g", fp1.tanimoto(fps.get(j)));
        }
        out.println();
    }
    out.close();

    System.err.printf("Done %d fingerprints in %.2gsec\n", fps.size(),
            (System.currentTimeMillis() - start) / 1000D);
}

From source file:clustering.KMeans.java

public static void main(String[] args) throws UnknownHostException {
    if (args.length != 1) {
        System.out.println("Usage : KMeans <nrClusters>");
        System.exit(-1);/*from w  w w  .ja v a  2s. c  o  m*/
    }

    int kClusters = Integer.parseInt(args[0]);

    ArrayList<Artist> artists = new ArrayList<Artist>();
    DBHelper dbHelper = DBHelper.getInstance();
    DBCursor result = dbHelper.findArtistsWithFBandTW();

    while (result.hasNext()) {
        DBObject currentArtist = result.next();
        artists.add(Artist.fromDBObject(currentArtist));
    }

    //System.out.println(artists.size());
    KMeansPlusPlusClusterer<Artist> clusterer = new KMeansPlusPlusClusterer<Artist>(kClusters);
    List<CentroidCluster<Artist>> clusters = clusterer.cluster(artists);
    //System.out.println(clusters.size());
    dbHelper.emptyClusterCenters();

    for (CentroidCluster<Artist> cluster : clusters) {
        double[] center = cluster.getCenter().getPoint();
        ObjectId centerId = dbHelper.insertClusterCenter(center[0], center[1], center[2]);

        List<Artist> artC = cluster.getPoints();
        for (Artist artist : artC) {
            dbHelper.updateMatrixRowCluster(artist.getDBObject(), centerId);
            //System.out.print("("+artist.fb_likes+","+artist.twitter_followers+","+artist.album_count+") ");
        }
    }
}

From source file:deck36.storm.plan9.nodejs.HighFiveStreamJoinTopology.java

public static void main(String[] args) throws Exception {

    String env = null;/*from ww w  .j  a  v  a2 s .  c o  m*/

    if (args != null && args.length > 0) {
        env = args[0];
    }

    if (!"dev".equals(env))
        if (!"local".equals(env))
            if (!"prod".equals(env)) {
                System.out.println("Usage: $0 (local|dev|prod)\n");
                System.exit(1);
            }

    // Topology config
    Config conf = new Config();

    // Load parameters and add them to the Config
    Map configMap = YamlLoader.loadYamlFromResource("storm_" + env + ".yml");

    conf.putAll(configMap);

    log.info(JSONValue.toJSONString((conf)));

    // Set topology loglevel to DEBUG
    conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug"));

    // Create Topology builder
    TopologyBuilder builder = new TopologyBuilder();

    // if there are not special reasons, start with parallelism hint of 1
    // and multiple tasks. By that, you can scale dynamically later on.
    int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint");
    int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks");

    // Create Stream from RabbitMQ messages
    // bind new queue with name of the topology
    // to the main plan9 exchange (from properties config)
    // consuming only CBT-related events by using the rounting key 'cbt.#'

    String badgeName = HighFiveStreamJoinTopology.class.getSimpleName();

    String rabbitQueueName = badgeName; // use topology class name as name for the queue
    String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.exchange");
    String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.routing_key");

    // Get JSON deserialization scheme
    Scheme rabbitScheme = new SimpleJSONScheme();

    // Setup a Declarator to configure exchange/queue/routing key
    RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName,
            rabbitRoutingKey);

    // Create Configuration for the Spout
    ConnectionConfig connectionConfig = new ConnectionConfig(
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat"));

    ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName)
            .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail()
            .build();

    // add global parameters to topology config - the RabbitMQSpout will read them from there
    conf.putAll(spoutConfig.asMap());

    // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch
    // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md
    if ("prod".equals(env)) {
        conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING,
                (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch"));
    }

    // Add RabbitMQ spout to topology
    builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint)
            .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks"));

    // construct command to invoke the external bolt implementation
    ArrayList<String> command = new ArrayList(15);

    // Add main execution program (node, ..) and parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.nodejs.executor"));

    // Add main route to be invoked and its parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.main"));
    List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.params");
    if (boltParams != null)
        command.addAll(boltParams);

    // Log the final command
    log.info("Command to start bolt for HighFive badge: " + Arrays.toString(command.toArray()));

    // Add constructed external bolt command to topology using MultilangAdapterBolt
    builder.setBolt("badge", new MultilangAdapterBolt(command, "badge"), parallelism_hint)
            .setNumTasks(num_tasks).shuffleGrouping("incoming");

    builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt(
            (String) JsonPath.read(conf, "$.deck36_storm.HighFiveStreamJoinBolt.rabbitmq.target_exchange"),
            "HighFive" // RabbitMQ routing key
    ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge");

    builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks)
            .shuffleGrouping("rabbitmq_router");

    if ("dev".equals(env) || "local".equals(env)) {
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology());
        Thread.sleep(2000000);
    }

    if ("prod".equals(env)) {
        StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf,
                builder.createTopology());
    }

}

From source file:createSod.java

/**
 * @param args/*from   w w  w  . j  av  a2 s.co  m*/
 * @throws CMSException 
 */
public static void main(String[] args) throws Exception {

    try {
        CommandLine options = verifyArgs(args);
        String privateKeyLocation = options.getOptionValue("privatekey");
        String keyPassword = options.getOptionValue("keypass");
        String certificate = options.getOptionValue("certificate");
        String sodContent = options.getOptionValue("content");
        String sod = "";
        if (options.hasOption("out")) {
            sod = options.getOptionValue("out");
        }

        // CHARGEMENT DU FICHIER PKCS#12

        KeyStore ks = null;
        char[] password = null;

        Security.addProvider(new BouncyCastleProvider());
        try {
            ks = KeyStore.getInstance("PKCS12");
            // Password pour le fichier personnal_nyal.p12
            password = keyPassword.toCharArray();
            ks.load(new FileInputStream(privateKeyLocation), password);
        } catch (Exception e) {
            System.out.println("Erreur: fichier " + privateKeyLocation
                    + " n'est pas un fichier pkcs#12 valide ou passphrase incorrect");
            return;
        }

        // RECUPERATION DU COUPLE CLE PRIVEE/PUBLIQUE ET DU CERTIFICAT PUBLIQUE

        X509Certificate cert = null;
        PrivateKey privatekey = null;
        PublicKey publickey = null;

        try {
            Enumeration en = ks.aliases();
            String ALIAS = "";
            Vector vectaliases = new Vector();

            while (en.hasMoreElements())
                vectaliases.add(en.nextElement());
            String[] aliases = (String[]) (vectaliases.toArray(new String[0]));
            for (int i = 0; i < aliases.length; i++)
                if (ks.isKeyEntry(aliases[i])) {
                    ALIAS = aliases[i];
                    break;
                }
            privatekey = (PrivateKey) ks.getKey(ALIAS, password);
            cert = (X509Certificate) ks.getCertificate(ALIAS);
            publickey = ks.getCertificate(ALIAS).getPublicKey();
        } catch (Exception e) {
            e.printStackTrace();
            return;
        }

        // Chargement du certificat  partir du fichier

        InputStream inStream = new FileInputStream(certificate);
        CertificateFactory cf = CertificateFactory.getInstance("X.509");
        cert = (X509Certificate) cf.generateCertificate(inStream);
        inStream.close();

        // Chargement du fichier qui va tre sign

        File file_to_sign = new File(sodContent);
        byte[] buffer = new byte[(int) file_to_sign.length()];
        DataInputStream in = new DataInputStream(new FileInputStream(file_to_sign));
        in.readFully(buffer);
        in.close();

        // Chargement des certificats qui seront stocks dans le fichier .p7
        // Ici, seulement le certificat personnal_nyal.cer sera associ.
        // Par contre, la chane des certificats non.

        ArrayList certList = new ArrayList();
        certList.add(cert);
        CertStore certs = CertStore.getInstance("Collection", new CollectionCertStoreParameters(certList),
                "BC");

        CMSSignedDataGenerator signGen = new CMSSignedDataGenerator();

        // privatekey correspond  notre cl prive rcupre du fichier PKCS#12
        // cert correspond au certificat publique personnal_nyal.cer
        // Le dernier argument est l'algorithme de hachage qui sera utilis

        signGen.addSigner(privatekey, cert, CMSSignedDataGenerator.DIGEST_SHA1);
        signGen.addCertificatesAndCRLs(certs);
        CMSProcessable content = new CMSProcessableByteArray(buffer);

        // Generation du fichier CMS/PKCS#7
        // L'argument deux permet de signifier si le document doit tre attach avec la signature
        //     Valeur true:  le fichier est attach (c'est le cas ici)
        //     Valeur false: le fichier est dtach

        CMSSignedData signedData = signGen.generate(content, true, "BC");
        byte[] signeddata = signedData.getEncoded();

        // Ecriture du buffer dans un fichier.   

        if (sod.equals("")) {
            System.out.print(signeddata.toString());
        } else {
            FileOutputStream envfos = new FileOutputStream(sod);
            envfos.write(signeddata);
            envfos.close();
        }

    } catch (OptionException oe) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(NAME, getOptions());
        System.exit(-1);
    } catch (Exception e) {
        e.printStackTrace();
        return;
    }

}

From source file:ch.epfl.lsir.xin.test.UserAverageTest.java

/**
 * @param args/*from   w  ww . j  a v a2s  .  c  o  m*/
 */
public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    PrintWriter logger = new PrintWriter(".//results//UserAverage");
    PropertiesConfiguration config = new PropertiesConfiguration();
    config.setFile(new File(".//conf//UserAverage.properties"));
    try {
        config.load();
    } catch (ConfigurationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data...");
    DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt");
    loader.readSimple();
    DataSetNumeric dataset = loader.getDataset();
    System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: "
            + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size());
    logger.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: "
            + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size());
    logger.flush();

    double totalMAE = 0;
    double totalRMSE = 0;
    int F = 5;
    logger.println(F + "- folder cross validation.");
    ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>();
    for (int i = 0; i < F; i++) {
        folders.add(new ArrayList<NumericRating>());
    }
    while (dataset.getRatings().size() > 0) {
        int index = new Random().nextInt(dataset.getRatings().size());
        int r = new Random().nextInt(F);
        folders.get(r).add(dataset.getRatings().get(index));
        dataset.getRatings().remove(index);
    }
    for (int folder = 1; folder <= F; folder++) {
        logger.println("Folder: " + folder);
        System.out.println("Folder: " + folder);
        ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>();
        ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>();
        for (int i = 0; i < folders.size(); i++) {
            if (i == folder - 1)//test data
            {
                testRatings.addAll(folders.get(i));
            } else {//training data
                trainRatings.addAll(folders.get(i));
            }
        }

        //create rating matrix
        HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>();
        HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>();
        for (int i = 0; i < dataset.getUserIDs().size(); i++) {
            userIDIndexMapping.put(dataset.getUserIDs().get(i), i);
        }
        for (int i = 0; i < dataset.getItemIDs().size(); i++) {
            itemIDIndexMapping.put(dataset.getItemIDs().get(i), i);
        }
        RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < trainRatings.size(); i++) {
            trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue());
        }
        trainRatingMatrix.calculateGlobalAverage();
        RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < testRatings.size(); i++) {
            testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue());
        }
        System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: "
                + testRatingMatrix.getTotalRatingNumber());

        logger.println("Initialize a recommendation model based on user average method.");
        UserAverage algo = new UserAverage(trainRatingMatrix);
        algo.setLogger(logger);
        algo.build();
        algo.saveModel(".//localModels//" + config.getString("NAME"));
        logger.println("Save the model.");
        System.out.println(trainRatings.size() + " vs. " + testRatings.size());

        double RMSE = 0;
        double MAE = 0;
        int count = 0;
        for (int i = 0; i < testRatings.size(); i++) {
            NumericRating rating = testRatings.get(i);
            double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()),
                    itemIDIndexMapping.get(rating.getItemID()));
            if (Double.isNaN(prediction)) {
                System.out.println("no prediction");
                continue;
            }
            MAE = MAE + Math.abs(rating.getValue() - prediction);
            RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2);
            count++;
        }
        MAE = MAE / count;
        RMSE = Math.sqrt(RMSE / count);

        logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE
                + " RMSE: " + RMSE);
        logger.flush();
        totalMAE = totalMAE + MAE;
        totalRMSE = totalRMSE + RMSE;
    }

    System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F);
    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: "
            + totalMAE / F + " RMSE: " + totalRMSE / F);
    logger.flush();
    logger.close();
    //MAE: 0.8353035962363073 RMSE: 1.0422971886952053 (MovieLens 100k)
}

From source file:fitmon.DietAPI.java

public static void main(String[] args) throws InvalidKeyException, NoSuchAlgorithmException,
        ParserConfigurationException, SAXException, IOException {
    DietAPI obj = new DietAPI();
    ArrayList<Food> foodList = new ArrayList<Food>();
    ArrayList<ArrayList<Food>> listOfFoodList = obj.searchFood("apple");
    for (int i = 0; i < listOfFoodList.size(); i++) {
        for (int j = 0; j < listOfFoodList.get(i).size(); j++) {
            foodList.add(listOfFoodList.get(i).get(j));
        }//from  w w w .  j  a va2  s.  c  o m
    }

}

From source file:de.prozesskraft.pkraft.Createmap.java

public static void main(String[] args) throws org.apache.commons.cli.ParseException, IOException {

    //      try//from   w  w  w  . j  ava2 s. c  o m
    //      {
    //         if (args.length != 3)
    //         {
    //            System.out.println("Please specify processdefinition file (xml) and an outputfilename");
    //         }
    //         
    //      }
    //      catch (ArrayIndexOutOfBoundsException e)
    //      {
    //         System.out.println("***ArrayIndexOutOfBoundsException: Please specify processdefinition.xml, openoffice_template.od*, newfile_for_processdefinitions.odt\n" + e.toString());
    //      }

    /*----------------------------
      get options from ini-file
    ----------------------------*/
    File inifile = new java.io.File(
            WhereAmI.getInstallDirectoryAbsolutePath(Createmap.class) + "/" + "../etc/pkraft-createmap.ini");

    if (inifile.exists()) {
        try {
            ini = new Ini(inifile);
        } catch (InvalidFileFormatException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        } catch (IOException e1) {
            // TODO Auto-generated catch block
            e1.printStackTrace();
        }
    } else {
        System.err.println("ini file does not exist: " + inifile.getAbsolutePath());
        System.exit(1);
    }

    /*----------------------------
      create boolean options
    ----------------------------*/
    Option ohelp = new Option("help", "print this message");

    /*----------------------------
      create argument options
    ----------------------------*/
    Option ooutput = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory; default: process.dot] file for generated map.")
            //            .isRequired()
            .create("output");

    Option odefinition = OptionBuilder.withArgName("FILE").hasArg()
            .withDescription("[mandatory] process definition file.")
            //            .isRequired()
            .create("definition");

    /*----------------------------
      create options object
    ----------------------------*/
    Options options = new Options();

    options.addOption(ohelp);
    options.addOption(ooutput);
    options.addOption(odefinition);

    /*----------------------------
      create the parser
    ----------------------------*/
    CommandLineParser parser = new GnuParser();
    try {
        // parse the command line arguments
        commandline = parser.parse(options, args);

    } catch (Exception exp) {
        // oops, something went wrong
        System.err.println("Parsing failed. Reason: " + exp.getMessage());
        exiter();
    }

    /*----------------------------
      usage/help
    ----------------------------*/
    if (commandline.hasOption("help")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp("createmap", options);
        System.out.println("");
        System.out.println("author: " + author + " | version: " + version + " | date: " + date);
        System.exit(0);
    }

    /*----------------------------
      ueberpruefen ob eine schlechte kombination von parametern angegeben wurde
    ----------------------------*/
    if (!(commandline.hasOption("definition"))) {
        System.err.println("option -definition is mandatory.");
        exiter();
    }

    if (!(commandline.hasOption("output"))) {
        System.err.println("option -output is mandatory.");
        exiter();
    }

    /*----------------------------
      die lizenz ueberpruefen und ggf abbrechen
    ----------------------------*/

    // check for valid license
    ArrayList<String> allPortAtHost = new ArrayList<String>();
    allPortAtHost.add(ini.get("license-server", "license-server-1"));
    allPortAtHost.add(ini.get("license-server", "license-server-2"));
    allPortAtHost.add(ini.get("license-server", "license-server-3"));

    MyLicense lic = new MyLicense(allPortAtHost, "1", "user-edition", "0.1");

    // lizenz-logging ausgeben
    for (String actLine : (ArrayList<String>) lic.getLog()) {
        System.err.println(actLine);
    }

    // abbruch, wenn lizenz nicht valide
    if (!lic.isValid()) {
        System.exit(1);
    }

    /*----------------------------
      die eigentliche business logic
    ----------------------------*/
    Process p1 = new Process();
    java.io.File output = new java.io.File(commandline.getOptionValue("output"));

    if (output.exists()) {
        System.err.println("warn: already exists: " + output.getCanonicalPath());
        exiter();
    }

    p1.setInfilexml(commandline.getOptionValue("definition"));
    System.err.println("info: reading process definition " + commandline.getOptionValue("definition"));

    // dummy process
    Process p2 = null;

    try {
        p2 = p1.readXml();
    } catch (JAXBException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
        System.err.println("error");
        exiter();
    }

    // den wrapper process generieren
    ArrayList<String> dot = p2.getProcessAsDotGraph();

    // dot file rausschreiben
    writeFile.writeFile(output, dot);
}

From source file:deck36.storm.plan9.php.HighFiveBadgeTopology.java

public static void main(String[] args) throws Exception {

    String env = null;//  www. j a  v a  2  s  .  c om

    if (args != null && args.length > 0) {
        env = args[0];
    }

    if (!"dev".equals(env))
        if (!"prod".equals(env)) {
            System.out.println("Usage: $0 (dev|prod)\n");
            System.exit(1);
        }

    // Topology config
    Config conf = new Config();

    // Load parameters and add them to the Config
    Map configMap = YamlLoader.loadYamlFromResource("config_" + env + ".yml");

    conf.putAll(configMap);

    log.info(JSONValue.toJSONString((conf)));

    // Set topology loglevel to DEBUG
    conf.put(Config.TOPOLOGY_DEBUG, JsonPath.read(conf, "$.deck36_storm.debug"));

    // Create Topology builder
    TopologyBuilder builder = new TopologyBuilder();

    // if there are not special reasons, start with parallelism hint of 1
    // and multiple tasks. By that, you can scale dynamically later on.
    int parallelism_hint = JsonPath.read(conf, "$.deck36_storm.default_parallelism_hint");
    int num_tasks = JsonPath.read(conf, "$.deck36_storm.default_num_tasks");

    // Create Stream from RabbitMQ messages
    // bind new queue with name of the topology
    // to the main plan9 exchange (from properties config)
    // consuming only CBT-related events by using the rounting key 'cbt.#'

    String badgeName = HighFiveBadgeTopology.class.getSimpleName();

    String rabbitQueueName = badgeName; // use topology class name as name for the queue
    String rabbitExchangeName = JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.rabbitmq.exchange");
    String rabbitRoutingKey = JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.rabbitmq.routing_key");

    // Get JSON deserialization scheme
    Scheme rabbitScheme = new SimpleJSONScheme();

    // Setup a Declarator to configure exchange/queue/routing key
    RabbitMQDeclarator rabbitDeclarator = new RabbitMQDeclarator(rabbitExchangeName, rabbitQueueName,
            rabbitRoutingKey);

    // Create Configuration for the Spout
    ConnectionConfig connectionConfig = new ConnectionConfig(
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.host"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.port"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.user"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.pass"),
            (String) JsonPath.read(conf, "$.deck36_storm.rabbitmq.vhost"),
            (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.heartbeat"));

    ConsumerConfig spoutConfig = new ConsumerConfigBuilder().connection(connectionConfig).queue(rabbitQueueName)
            .prefetch((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch")).requeueOnFail()
            .build();

    // add global parameters to topology config - the RabbitMQSpout will read them from there
    conf.putAll(spoutConfig.asMap());

    // For production, set the spout pending value to the same value as the RabbitMQ pre-fetch
    // see: https://github.com/ppat/storm-rabbitmq/blob/master/README.md
    if ("prod".equals(env)) {
        conf.put(Config.TOPOLOGY_MAX_SPOUT_PENDING,
                (Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.prefetch"));
    }

    // Add RabbitMQ spout to topology
    builder.setSpout("incoming", new RabbitMQSpout(rabbitScheme, rabbitDeclarator), parallelism_hint)
            .setNumTasks((Integer) JsonPath.read(conf, "$.deck36_storm.rabbitmq.spout_tasks"));

    // construct command to invoke the external bolt implementation
    ArrayList<String> command = new ArrayList(15);

    // Add main execution program (php, hhvm, zend, ..) and parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.php.executor"));
    command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.executor_params"));

    // Add main command to be executed (app/console, the phar file, etc.) and global context parameters (environment etc.)
    command.add((String) JsonPath.read(conf, "$.deck36_storm.php.main"));
    command.addAll((List<String>) JsonPath.read(conf, "$.deck36_storm.php.main_params"));

    // Add main route to be invoked and its parameters
    command.add((String) JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.main"));
    List boltParams = (List<String>) JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.params");
    if (boltParams != null)
        command.addAll(boltParams);

    // Log the final command
    log.info("Command to start bolt for HighFive badge: " + Arrays.toString(command.toArray()));

    // Add constructed external bolt command to topology using MultilangAdapterBolt
    builder.setBolt("badge", new MultilangAdapterBolt(command, "badge"), parallelism_hint)
            .setNumTasks(num_tasks).shuffleGrouping("incoming");

    builder.setBolt("rabbitmq_router", new Plan9RabbitMQRouterBolt(
            (String) JsonPath.read(conf, "$.deck36_storm.HighFiveBolt.rabbitmq.target_exchange"), "HighFive" // RabbitMQ routing key
    ), parallelism_hint).setNumTasks(num_tasks).shuffleGrouping("badge");

    builder.setBolt("rabbitmq_producer", new Plan9RabbitMQPushBolt(), parallelism_hint).setNumTasks(num_tasks)
            .shuffleGrouping("rabbitmq_router");

    if ("dev".equals(env)) {
        LocalCluster cluster = new LocalCluster();
        cluster.submitTopology(badgeName + System.currentTimeMillis(), conf, builder.createTopology());
        Thread.sleep(2000000);
    }

    if ("prod".equals(env)) {
        StormSubmitter.submitTopology(badgeName + "-" + System.currentTimeMillis(), conf,
                builder.createTopology());
    }

}

From source file:ch.epfl.lsir.xin.test.GlobalMeanTest.java

/**
 * @param args/*from w  w  w.  j  ava2 s. c om*/
 */
public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    PrintWriter logger = new PrintWriter(".//results//GlobalMean");

    PropertiesConfiguration config = new PropertiesConfiguration();
    config.setFile(new File("conf//GlobalMean.properties"));
    try {
        config.load();
    } catch (ConfigurationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data...");
    DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt");
    loader.readSimple();
    DataSetNumeric dataset = loader.getDataset();
    System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: "
            + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size());
    logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: "
            + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size());

    double totalMAE = 0;
    double totalRMSE = 0;
    int F = 5;
    logger.println(F + "- folder cross validation.");
    logger.flush();
    ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>();
    for (int i = 0; i < F; i++) {
        folders.add(new ArrayList<NumericRating>());
    }
    while (dataset.getRatings().size() > 0) {
        int index = new Random().nextInt(dataset.getRatings().size());
        int r = new Random().nextInt(F);
        folders.get(r).add(dataset.getRatings().get(index));
        dataset.getRatings().remove(index);
    }
    for (int folder = 1; folder <= F; folder++) {
        System.out.println("Folder: " + folder);
        logger.println("Folder: " + folder);
        ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>();
        ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>();
        for (int i = 0; i < folders.size(); i++) {
            if (i == folder - 1)//test data
            {
                testRatings.addAll(folders.get(i));
            } else {//training data
                trainRatings.addAll(folders.get(i));
            }
        }

        //create rating matrix
        HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>();
        HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>();
        for (int i = 0; i < dataset.getUserIDs().size(); i++) {
            userIDIndexMapping.put(dataset.getUserIDs().get(i), i);
        }
        for (int i = 0; i < dataset.getItemIDs().size(); i++) {
            itemIDIndexMapping.put(dataset.getItemIDs().get(i), i);
        }
        RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < trainRatings.size(); i++) {
            trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue());
        }
        RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < testRatings.size(); i++) {
            testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue());
        }
        System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: "
                + testRatingMatrix.getTotalRatingNumber());

        logger.println("Initialize a recommendation model based on global average method.");
        GlobalAverage algo = new GlobalAverage(trainRatingMatrix);
        algo.setLogger(logger);
        algo.build();
        algo.saveModel(".//localModels//" + config.getString("NAME"));
        logger.println("Save the model.");
        logger.flush();

        System.out.println(trainRatings.size() + " vs. " + testRatings.size());

        double RMSE = 0;
        double MAE = 0;
        int count = 0;
        for (int i = 0; i < testRatings.size(); i++) {
            NumericRating rating = testRatings.get(i);
            double prediction = algo.predict(rating.getUserID(), rating.getItemID());
            if (Double.isNaN(prediction)) {
                System.out.println("no prediction");
                continue;
            }
            MAE = MAE + Math.abs(rating.getValue() - prediction);
            RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2);
            count++;
        }
        MAE = MAE / count;
        RMSE = Math.sqrt(RMSE / count);

        //         System.out.println("MAE: " + MAE + " RMSE: " + RMSE);
        logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE
                + " RMSE: " + RMSE);
        logger.flush();
        totalMAE = totalMAE + MAE;
        totalRMSE = totalRMSE + RMSE;
    }

    System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F);
    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: "
            + totalMAE / F + " RMSE: " + totalRMSE / F);
    logger.flush();
    logger.close();
    //MAE: 0.9338607074893257 RMSE: 1.1170971131112037 (MovieLens1M)
    //MAE: 0.9446876509332618 RMSE: 1.1256517870920375 (MovieLens100K)

}

From source file:ch.epfl.lsir.xin.test.ItemAverageTest.java

/**
 * @param args//from   w ww . ja  v  a2  s.  c o  m
 */
public static void main(String[] args) throws Exception {
    // TODO Auto-generated method stub

    PrintWriter logger = new PrintWriter(".//results//ItemAverage");
    PropertiesConfiguration config = new PropertiesConfiguration();
    config.setFile(new File(".//conf//ItemAverage.properties"));
    try {
        config.load();
    } catch (ConfigurationException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Read rating data...");
    DataLoaderFile loader = new DataLoaderFile(".//data//MoveLens100k.txt");
    loader.readSimple();
    DataSetNumeric dataset = loader.getDataset();
    System.out.println("Number of ratings: " + dataset.getRatings().size() + " Number of users: "
            + dataset.getUserIDs().size() + " Number of items: " + dataset.getItemIDs().size());
    logger.println("Number of ratings: " + dataset.getRatings().size() + ", Number of users: "
            + dataset.getUserIDs().size() + ", Number of items: " + dataset.getItemIDs().size());
    logger.flush();

    double totalMAE = 0;
    double totalRMSE = 0;
    int F = 5;
    logger.println(F + "- folder cross validation.");
    ArrayList<ArrayList<NumericRating>> folders = new ArrayList<ArrayList<NumericRating>>();
    for (int i = 0; i < F; i++) {
        folders.add(new ArrayList<NumericRating>());
    }
    while (dataset.getRatings().size() > 0) {
        int index = new Random().nextInt(dataset.getRatings().size());
        int r = new Random().nextInt(F);
        folders.get(r).add(dataset.getRatings().get(index));
        dataset.getRatings().remove(index);
    }
    for (int folder = 1; folder <= F; folder++) {
        logger.println("Folder: " + folder);
        logger.flush();
        System.out.println("Folder: " + folder);
        ArrayList<NumericRating> trainRatings = new ArrayList<NumericRating>();
        ArrayList<NumericRating> testRatings = new ArrayList<NumericRating>();
        for (int i = 0; i < folders.size(); i++) {
            if (i == folder - 1)//test data
            {
                testRatings.addAll(folders.get(i));
            } else {//training data
                trainRatings.addAll(folders.get(i));
            }
        }

        //create rating matrix
        HashMap<String, Integer> userIDIndexMapping = new HashMap<String, Integer>();
        HashMap<String, Integer> itemIDIndexMapping = new HashMap<String, Integer>();
        for (int i = 0; i < dataset.getUserIDs().size(); i++) {
            userIDIndexMapping.put(dataset.getUserIDs().get(i), i);
        }
        for (int i = 0; i < dataset.getItemIDs().size(); i++) {
            itemIDIndexMapping.put(dataset.getItemIDs().get(i), i);
        }
        RatingMatrix trainRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < trainRatings.size(); i++) {
            trainRatingMatrix.set(userIDIndexMapping.get(trainRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(trainRatings.get(i).getItemID()), trainRatings.get(i).getValue());
        }
        trainRatingMatrix.calculateGlobalAverage();
        RatingMatrix testRatingMatrix = new RatingMatrix(dataset.getUserIDs().size(),
                dataset.getItemIDs().size());
        for (int i = 0; i < testRatings.size(); i++) {
            testRatingMatrix.set(userIDIndexMapping.get(testRatings.get(i).getUserID()),
                    itemIDIndexMapping.get(testRatings.get(i).getItemID()), testRatings.get(i).getValue());
        }
        System.out.println("Training: " + trainRatingMatrix.getTotalRatingNumber() + " vs Test: "
                + testRatingMatrix.getTotalRatingNumber());

        logger.println("Initialize a recommendation model based on item average method.");
        ItemAverage algo = new ItemAverage(trainRatingMatrix);
        algo.setLogger(logger);
        algo.build();
        algo.saveModel(".//localModels//" + config.getString("NAME"));
        logger.println("Save the model.");
        logger.flush();
        System.out.println(trainRatings.size() + " vs. " + testRatings.size());

        double RMSE = 0;
        double MAE = 0;
        int count = 0;
        for (int i = 0; i < testRatings.size(); i++) {
            NumericRating rating = testRatings.get(i);
            double prediction = algo.predict(userIDIndexMapping.get(rating.getUserID()),
                    itemIDIndexMapping.get(rating.getItemID()));
            if (Double.isNaN(prediction)) {
                System.out.println("no prediction");
                continue;
            }
            MAE = MAE + Math.abs(rating.getValue() - prediction);
            RMSE = RMSE + Math.pow((rating.getValue() - prediction), 2);
            count++;
        }
        MAE = MAE / count;
        RMSE = Math.sqrt(RMSE / count);

        logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " MAE: " + MAE
                + " RMSE: " + RMSE);
        logger.flush();
        //         System.out.println("MAE: " + MAE + " RMSE: " + RMSE);
        totalMAE = totalMAE + MAE;
        totalRMSE = totalRMSE + RMSE;
    }

    System.out.println("MAE: " + totalMAE / F + " RMSE: " + totalRMSE / F);
    logger.println(new SimpleDateFormat("yyyy-MM-dd HH:mm:ss").format(new Date()) + " Final results: MAE: "
            + totalMAE / F + " RMSE: " + totalRMSE / F);
    logger.flush();
    //MAE: 0.8173633324758338 RMSE: 1.0251973503888645 (MovieLens 100K)

}