Example usage for java.util Map get

List of usage examples for java.util Map get

Introduction

In this page you can find the example usage for java.util Map get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:Main.java

public static void main(String[] args) {
    Map<Integer, String> grades = new HashMap<Integer, String>();

    grades.put(1, "A");
    grades.put(2, "B");
    grades.put(3, "C");
    grades.put(4, "D");
    grades.put(5, "E");

    String value = grades.get(1);

    List<String> dayNames = new ArrayList<String>();
    dayNames.add("Sunday");
    dayNames.add("Monday");
    dayNames.add("Tuesday");
    dayNames.add("Wednesday");

    String firstDay = dayNames.get(0);
}

From source file:Counter.java

public static void main(String[] args) {
    Map hm = new HashMap();
    for (int i = 0; i < 10000; i++) {
        // Produce a number between 0 and 20:
        Integer r = new Integer(rand.nextInt(20));
        if (hm.containsKey(r))
            ((Counter) hm.get(r)).i++;
        else/*w w w. j  a v  a 2 s .  c o m*/
            hm.put(r, new Counter());
    }
    System.out.println(hm);
}

From source file:Main.java

public static final void main(String[] args) {
    Map<String, String> map = new HashMap<>();
    map.put("one", "a");
    System.out.println("Size = " + map.size());
    map.put(null, "b");
    System.out.println("Size = " + map.size());
    System.out.println("map.get(null) = " + map.get(null));
}

From source file:drpc.KMeansDrpcQuery.java

public static void main(final String[] args)
        throws IOException, TException, DRPCExecutionException, DecoderException, ClassNotFoundException {
    if (args.length < 3) {
        System.err.println("Where are the arguments? args -- DrpcServer DrpcFunctionName folder");
        return;//  ww  w  .  jav a  2s. c om
    }

    final DRPCClient client = new DRPCClient(args[0], 3772, 1000000 /*timeout*/);
    final Queue<String> featureFiles = new ArrayDeque<String>();
    SpoutUtils.listFilesForFolder(new File(args[2]), featureFiles);

    Scanner scanner = new Scanner(featureFiles.peek());
    int i = 0;
    while (scanner.hasNextLine() && i++ < 10) {
        List<Map<String, List<Double>>> dict = SpoutUtils.pythonDictToJava(scanner.nextLine());
        for (Map<String, List<Double>> map : dict) {
            i++;

            Double[] features = map.get("chi2").toArray(new Double[0]);
            Double[] moreFeatures = map.get("chi1").toArray(new Double[0]);
            Double[] rmsd = map.get("rmsd").toArray(new Double[0]);
            Double[] both = (Double[]) ArrayUtils.addAll(features, moreFeatures);
            String parameters = serializeFeatureVector(ArrayUtils.toPrimitive(both));

            String centroidsSerialized = runQuery(args[1], parameters, client);

            Gson gson = new Gson();
            Object[] deserialized = gson.fromJson(centroidsSerialized, Object[].class);

            for (Object obj : deserialized) {
                // result we get is of the form List<result>
                List l = ((List) obj);
                centroidsSerialized = (String) l.get(0);

                String[] centroidSerializedArrays = centroidsSerialized
                        .split(MlStormClustererQuery.KmeansClustererQuery.CENTROID_DELIM);
                List<double[]> centroids = new ArrayList<double[]>();
                for (String centroid : centroidSerializedArrays) {
                    centroids.add(MlStormFeatureVectorUtils.deserializeToFeatureVector(centroid));
                }

                double[] rmsdPrimitive = ArrayUtils.toPrimitive(both);
                double[] rmsdKmeans = new double[centroids.size()];

                for (int k = 0; k < centroids.size(); k++) {
                    System.out.println("centroid        -- " + Arrays.toString(centroids.get(k)));
                    double[] centroid = centroids.get(k);
                    rmsdKmeans[k] = computeRootMeanSquare(centroid);
                }

                System.out.println("1 rmsd original -- " + Arrays.toString(rmsd));
                System.out.println("2 rmsd k- Means -- " + Arrays.toString(rmsdKmeans));
                System.out.println();
            }

        }
    }
    client.close();
}

From source file:BeanUtilsExampleV3.java

  public static void main(String args[]) throws Exception {
    BeanUtilsExampleV3 diff = new BeanUtilsExampleV3();
    Actor actor = diff.prepareData();//from  www .java2s . c  o m

    Map describedData = BeanUtils.describe(actor);

    // check the map
    System.err.println(describedData.get("name"));

    // change this value
    describedData.put("name", "Robert Redford");

    // create a new Actor Bean
    Actor newActor = new Actor();
    BeanUtils.populate(newActor, describedData);

    System.err.println(BeanUtils.getProperty(newActor, "name"));

}

From source file:Main.java

public static void main(String[] args) {

    String sentence = "is this a sentence this is a test";
    String[] myStringArray = sentence.split("\\s"); // Split the sentence by
                                                    // space.

    Map<String, Integer> wordOccurrences = new HashMap<String, Integer>(myStringArray.length);

    for (String word : myStringArray) {
        if (wordOccurrences.containsKey(word)) {
            wordOccurrences.put(word, wordOccurrences.get(word) + 1);
        } else {/*from   www .  j  a va  2 s  .  c o  m*/
            wordOccurrences.put(word, 1);
        }
    }
    for (String word : wordOccurrences.keySet()) {
        if (wordOccurrences.get(word) > 1) {
            System.out.println("1b. - Tokens that occurs more than once: " + word + "\n");
        }
    }
}

From source file:com.github.jramos.snowplow.RedshiftSink.java

public static void main(String[] args) {
    try {//from   w  w  w . j  a  v  a 2  s.  co m
        // make sure PostGres driver is on class path
        Class.forName(POSTGRES_DRIVER);
    } catch (ClassNotFoundException e) {
        throw new IllegalStateException("Could not load PostgreSQL driver from classpath");
    }

    Map<String, String> argMap = validateArgs(args);
    if (argMap != null) {
        String configFile = argMap.get(CMD_ARG_CONFIG_FILE);
        KinesisConnectorExecutorBase<SnowplowEventModel, byte[]> redshiftExecutor = new RedshiftSink(
                configFile);
        redshiftExecutor.run();
    }
}

From source file:cat.tv3.eng.rec.recomana.lupa.visualization.ClustersToJson.java

public static void main(String[] args) throws IOException {

    String host = args[0];/*from   w w  w. jav a2  s  . c o m*/
    int port = Integer.parseInt(args[1]);
    Jedis jedis = new Jedis(host, port, 20000);

    // Cluster to binary tree visualitzation
    Map<String, String> attr_cluster = jedis.hgetAll("ClusterBinaryTree-Arrel");
    String cluster_name = attr_cluster.get("cluster_ids_name");

    JSONObject cluster;
    if (!cluster_name.equals("cluster_splited")) {
        cluster = new JSONObject();
        cluster.put("name", "arrel");
    } else {
        String id_left_centroid = attr_cluster.get("id_left_centroid");
        String id_right_centroid = attr_cluster.get("id_right_centroid");

        String hash_left = attr_cluster.get("hash_left");
        String hash_right = attr_cluster.get("hash_right");

        cluster = new JSONObject();
        cluster.put("name", "arrel");
        cluster.put("children", hashToJSONArrayRepresentationBinaryTree(id_left_centroid, hash_left, jedis,
                id_right_centroid, hash_right));
    }
    jedis.disconnect();

    Writer out = new BufferedWriter(
            new OutputStreamWriter(new FileOutputStream("data_toVisualize/cluster.json"), "UTF-8"));
    try {
        out.write(cluster.toJSONString());
    } finally {
        out.close();
    }
}

From source file:common.ReverseWordsCount.java

public static void main(String[] args) throws IOException {
    List<String> readLines = FileUtils.readLines(new File("G:\\\\LTNMT\\LTNMT\\sougou\\sougou2500.txt"));
    Map<String, Integer> words = new HashMap<>();

    for (String line : readLines) {
        String[] split = line.split(" ");
        for (String wprd : split) {
            Integer get = words.get(wprd);
            if (get == null) {
                words.put(wprd, 1);/*www  .  j a  va2s  . co  m*/
            } else {
                words.put(wprd, get + 1);
            }
        }
    }
    Set<Map.Entry<String, Integer>> entrySet = words.entrySet();
    List<Map.Entry<String, Integer>> reverseLists = new ArrayList<>(entrySet);
    Collections.sort(reverseLists, new Comparator<Map.Entry<String, Integer>>() {
        @Override
        public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
            return o2.getValue().compareTo(o1.getValue());
        }
    });
    PrintStream ps = new PrintStream("c:/reverseWords.txt");
    for (Map.Entry<String, Integer> teEntry : reverseLists) {
        ps.println(teEntry.getKey() + " " + teEntry.getValue());
    }
    ps.close();
}

From source file:com.doculibre.constellio.utils.SolrSchemaUtils.java

public static void main(String[] args) throws Exception {
    ConnectorType connectorType = new ConnectorType();
    connectorType.setName("mail");
    IndexSchema schema = getSchema(connectorType);
    Map<String, FieldType> fieldTypes = schema.getFieldTypes();
    for (String key : fieldTypes.keySet()) {
        FieldType fieldType = fieldTypes.get(key);
        System.out.print(key + ":");
        System.out.print(fieldType.getClass());
        System.out.print("\n");
        System.out.print(fieldType.getAnalyzer().getClass());
    }//w ww  .j  a  va2 s  . c  o  m
}