List of usage examples for java.util LinkedHashMap LinkedHashMap
public LinkedHashMap(Map<? extends K, ? extends V> m)
From source file:Main.java
public static void main(String[] args) { LinkedHashMap<String, Integer> map = new LinkedHashMap<String, Integer>(5); // add some values in the map map.put("One", 1); map.put("Two", 2); map.put("Three", 3); System.out.println(map);//from w ww.j a va2 s . c o m // get key "Three" System.out.println(map.get("Three")); // get key "Five" System.out.println(map.get("Five")); }
From source file:Main.java
public static void main(String[] args) { LinkedHashMap<String, Integer> map = new LinkedHashMap<String, Integer>(5); // add some values in the map map.put("One", 1); map.put("Two", 2); map.put("Three", 3); System.out.println(map);// w w w. ja v a2 s . c om // clear the map map.clear(); System.out.println(map); }
From source file:Main.java
public static void main(String[] args) { Map<String, Integer> map = new HashMap<String, Integer>(); // add some values in the map map.put("One", 1); map.put("Two", 2); map.put("Three", 3); LinkedHashMap<String, Integer> linkMap = new LinkedHashMap<String, Integer>(map); System.out.println(linkMap);/* ww w .ja v a 2 s . co m*/ }
From source file:com.ikanow.aleph2.enrichment.utils.services.JsScriptEngineTestService.java
/** Entry point * @param args/*from ww w. j a va 2s .c o m*/ * @throws IOException */ public static void main(String[] args) throws IOException { if (args.length < 3) { System.out .println("ARGS: <script-file> <input-file> <output-prefix> [{[len: <LEN>], [group: <GROUP>]}]"); } // STEP 1: load script file final String user_script = Files.toString(new File(args[0]), Charsets.UTF_8); // STEP 2: get a stream for the JSON file final InputStream io_stream = new FileInputStream(new File(args[1])); // STEP 3: set up control if applicable Optional<JsonNode> json = Optional.of("").filter(__ -> args.length > 3).map(__ -> args[3]) .map(Lambdas.wrap_u(j -> _mapper.readTree(j))); // STEP 4: set up the various objects final DataBucketBean bucket = Mockito.mock(DataBucketBean.class); final JsScriptEngineService service_under_test = new JsScriptEngineService(); final LinkedList<ObjectNode> emitted = new LinkedList<>(); final LinkedList<JsonNode> grouped = new LinkedList<>(); final LinkedList<JsonNode> externally_emitted = new LinkedList<>(); final IEnrichmentModuleContext context = Mockito.mock(IEnrichmentModuleContext.class, new Answer<Void>() { @SuppressWarnings("unchecked") public Void answer(InvocationOnMock invocation) { try { Object[] args = invocation.getArguments(); if (invocation.getMethod().getName().equals("emitMutableObject")) { final Optional<JsonNode> grouping = (Optional<JsonNode>) args[3]; if (grouping.isPresent()) { grouped.add(grouping.get()); } emitted.add((ObjectNode) args[1]); } else if (invocation.getMethod().getName().equals("externalEmit")) { final DataBucketBean to = (DataBucketBean) args[0]; final Either<JsonNode, Map<String, Object>> out = (Either<JsonNode, Map<String, Object>>) args[1]; externally_emitted .add(((ObjectNode) out.left().value()).put("__a2_bucket", to.full_name())); } } catch (Exception e) { e.printStackTrace(); } return null; } }); final EnrichmentControlMetadataBean control = BeanTemplateUtils.build(EnrichmentControlMetadataBean.class) .with(EnrichmentControlMetadataBean::config, new LinkedHashMap<String, Object>( ImmutableMap.<String, Object>builder().put("script", user_script).build())) .done().get(); service_under_test.onStageInitialize(context, bucket, control, Tuples._2T(ProcessingStage.batch, ProcessingStage.grouping), Optional.empty()); final BeJsonParser json_parser = new BeJsonParser(); // Run the file through final Stream<Tuple2<Long, IBatchRecord>> json_stream = StreamUtils .takeUntil(Stream.generate(() -> json_parser.getNextRecord(io_stream)), i -> null == i) .map(j -> Tuples._2T(0L, new BatchRecord(j))); service_under_test.onObjectBatch(json_stream, json.map(j -> j.get("len")).map(j -> (int) j.asLong(0L)), json.map(j -> j.get("group"))); System.out.println("RESULTS: "); System.out.println("emitted: " + emitted.size()); System.out.println("grouped: " + grouped.size()); System.out.println("externally emitted: " + externally_emitted.size()); Files.write(emitted.stream().map(j -> j.toString()).collect(Collectors.joining(";")), new File(args[2] + "emit.json"), Charsets.UTF_8); Files.write(grouped.stream().map(j -> j.toString()).collect(Collectors.joining(";")), new File(args[2] + "group.json"), Charsets.UTF_8); Files.write(externally_emitted.stream().map(j -> j.toString()).collect(Collectors.joining(";")), new File(args[2] + "external_emit.json"), Charsets.UTF_8); }
From source file:act.installer.pubchem.PubchemSynonymFinder.java
public static void main(String[] args) throws Exception { org.apache.commons.cli.Options opts = new org.apache.commons.cli.Options(); for (Option.Builder b : OPTION_BUILDERS) { opts.addOption(b.build());//from w w w . j a v a2 s.c om } CommandLine cl = null; try { CommandLineParser parser = new DefaultParser(); cl = parser.parse(opts, args); } catch (ParseException e) { System.err.format("Argument parsing failed: %s\n", e.getMessage()); HELP_FORMATTER.printHelp(PubchemSynonymFinder.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } if (cl.hasOption("help")) { HELP_FORMATTER.printHelp(PubchemSynonymFinder.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); return; } File rocksDBFile = new File(cl.getOptionValue(OPTION_INDEX_PATH)); if (!rocksDBFile.isDirectory()) { System.err.format("Index directory does not exist or is not a directory at '%s'", rocksDBFile.getAbsolutePath()); HELP_FORMATTER.printHelp(PubchemSynonymFinder.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } List<String> compoundIds = null; if (cl.hasOption(OPTION_PUBCHEM_COMPOUND_ID)) { compoundIds = Collections.singletonList(cl.getOptionValue(OPTION_PUBCHEM_COMPOUND_ID)); } else if (cl.hasOption(OPTION_IDS_FILE)) { File idsFile = new File(cl.getOptionValue(OPTION_IDS_FILE)); if (!idsFile.exists()) { System.err.format("Cannot find Pubchem CIDs file at %s", idsFile.getAbsolutePath()); HELP_FORMATTER.printHelp(PubchemSynonymFinder.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } compoundIds = getCIDsFromFile(idsFile); if (compoundIds.size() == 0) { System.err.format("Found zero Pubchem CIDs to process in file at '%s', exiting", idsFile.getAbsolutePath()); HELP_FORMATTER.printHelp(PubchemSynonymFinder.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } } else { System.err.format("Must specify one of '%s' or '%s'; index is too big to print all synonyms.", OPTION_PUBCHEM_COMPOUND_ID, OPTION_IDS_FILE); HELP_FORMATTER.printHelp(PubchemSynonymFinder.class.getCanonicalName(), HELP_MESSAGE, opts, null, true); System.exit(1); } // Run a quick check to warn users of malformed ids. compoundIds.forEach(x -> { if (!PC_CID_PATTERN.matcher(x).matches()) { // Use matches() for complete matching. LOGGER.warn("Specified compound id does not match expected format: %s", x); } }); LOGGER.info("Opening DB and searching for %d Pubchem CIDs", compoundIds.size()); Pair<RocksDB, Map<PubchemTTLMerger.COLUMN_FAMILIES, ColumnFamilyHandle>> dbAndHandles = null; Map<String, PubchemSynonyms> results = new LinkedHashMap<>(compoundIds.size()); try { dbAndHandles = PubchemTTLMerger.openExistingRocksDB(rocksDBFile); RocksDB db = dbAndHandles.getLeft(); ColumnFamilyHandle cidToSynonymsCfh = dbAndHandles.getRight() .get(PubchemTTLMerger.COLUMN_FAMILIES.CID_TO_SYNONYMS); for (String cid : compoundIds) { PubchemSynonyms synonyms = null; byte[] val = db.get(cidToSynonymsCfh, cid.getBytes(UTF8)); if (val != null) { ObjectInputStream oi = new ObjectInputStream(new ByteArrayInputStream(val)); // We're relying on our use of a one-value-type per index model here so we can skip the instanceof check. synonyms = (PubchemSynonyms) oi.readObject(); } else { LOGGER.warn("No synonyms available for compound id '%s'", cid); } results.put(cid, synonyms); } } finally { if (dbAndHandles != null) { dbAndHandles.getLeft().close(); } } try (OutputStream outputStream = cl.hasOption(OPTION_OUTPUT) ? new FileOutputStream(cl.getOptionValue(OPTION_OUTPUT)) : System.out) { OBJECT_MAPPER.writerWithDefaultPrettyPrinter().writeValue(outputStream, results); new OutputStreamWriter(outputStream).append('\n'); } LOGGER.info("Done searching for Pubchem synonyms"); }
From source file:Main.java
public static <K, V> Map<K, V> map(Entry<K, V>... entries) { Map<K, V> map = new LinkedHashMap<>(entries.length); for (Entry<K, V> entry : entries) { map.put(entry.getKey(), entry.getValue()); }/* w w w . j a v a 2 s.c o m*/ return map; }
From source file:Main.java
public static <K, V> LinkedHashMap<K, V> createLinkedHashMap(int initialCapacity) { return new LinkedHashMap(initialCapacity); }
From source file:Main.java
public static <T> List<T> mapToList(Map<String, T> map, String... excludeKey) { Map<String, T> tmpMap = new LinkedHashMap<String, T>(map); for (String exclude : excludeKey) { tmpMap.remove(exclude);//from w w w . j av a 2 s .co m } List<T> list = new ArrayList<>(); for (Map.Entry<String, T> entry : tmpMap.entrySet()) { list.add(entry.getValue()); } return list; }
From source file:Main.java
public static <K, V> LinkedHashMap<K, V> createLinkedHashMap(int initialCapacity) { return new LinkedHashMap<K, V>(initialCapacity); }
From source file:Main.java
public static <K, V> Map<K, V> map(Collection<Entry<K, V>> entries) { Map<K, V> map = new LinkedHashMap<>(entries.size()); for (Entry<K, V> entry : entries) { map.put(entry.getKey(), entry.getValue()); }// w w w . java 2 s . c om return map; }