List of usage examples for java.util Map values
Collection<V> values();
From source file:org.training.fulfilmentprocess.test.PaymentIntegrationTest.java
@AfterClass public static void removeProcessDefinitions() { LOG.debug("cleanup..."); final ApplicationContext appCtx = Registry.getGlobalApplicationContext(); assertTrue("Application context of type " + appCtx.getClass() + " is not a subclass of " + ConfigurableApplicationContext.class, appCtx instanceof ConfigurableApplicationContext); final ConfigurableApplicationContext applicationContext = (ConfigurableApplicationContext) appCtx; final ConfigurableListableBeanFactory beanFactory = applicationContext.getBeanFactory(); assertTrue("Bean Factory of type " + beanFactory.getClass() + " is not of type " + BeanDefinitionRegistry.class, beanFactory instanceof BeanDefinitionRegistry); final XmlBeanDefinitionReader xmlReader = new XmlBeanDefinitionReader((BeanDefinitionRegistry) beanFactory); xmlReader.loadBeanDefinitions(new ClassPathResource( "/trainingfulfilmentprocess/test/trainingfulfilmentprocess-spring-testcleanup.xml")); //cleanup command factory final Map<String, CommandFactory> commandFactoryList = applicationContext .getBeansOfType(CommandFactory.class); commandFactoryList.remove("mockupCommandFactory"); final DefaultCommandFactoryRegistryImpl commandFactoryReg = appCtx .getBean(DefaultCommandFactoryRegistryImpl.class); commandFactoryReg.setCommandFactoryList(commandFactoryList.values()); // if (definitonFactory != null) // {/*w w w . ja v a 2 s. co m*/ // TODO this test seems to let processes run after method completion - therefore we cannot // remove definitions !!! // definitonFactory.remove("testPlaceorder"); // definitonFactory.remove("testConsignmentFulfilmentSubprocess"); // } processService.setTaskService(appCtx.getBean(DefaultTaskService.class)); definitonFactory = null; processService = null; }
From source file:com.vmware.photon.controller.common.dcp.ServiceHostUtils.java
public static <H extends ServiceHost> void deleteAllDocuments(H host, String referrer, long timeout, TimeUnit timeUnit) throws Throwable { QueryTask.Query selfLinkClause = new QueryTask.Query() .setTermPropertyName(ServiceDocument.FIELD_NAME_SELF_LINK).setTermMatchValue("*") .setTermMatchType(QueryTask.QueryTerm.MatchType.WILDCARD); QueryTask.QuerySpecification querySpecification = new QueryTask.QuerySpecification(); querySpecification.query.addBooleanClause(selfLinkClause); QueryTask queryTask = QueryTask.create(querySpecification).setDirect(true); NodeGroupBroadcastResponse queryResponse = ServiceHostUtils.sendBroadcastQueryAndWait(host, referrer, queryTask);//from ww w .j a v a 2s. com Set<String> documentLinks = QueryTaskUtils.getBroadcastQueryResults(queryResponse); if (documentLinks == null || documentLinks.size() <= 0) { return; } CountDownLatch latch = new CountDownLatch(1); OperationJoin.JoinedCompletionHandler handler = new OperationJoin.JoinedCompletionHandler() { @Override public void handle(Map<Long, Operation> ops, Map<Long, Throwable> failures) { if (failures != null && !failures.isEmpty()) { for (Throwable e : failures.values()) { logger.error("deleteAllDocuments failed", e); } } latch.countDown(); } }; Collection<Operation> deletes = new LinkedList<>(); for (String documentLink : documentLinks) { Operation deleteOperation = Operation.createDelete(UriUtils.buildUri(host, documentLink)).setBody("{}") .setReferer(UriUtils.buildUri(host, referrer)); deletes.add(deleteOperation); } OperationJoin join = OperationJoin.create(deletes); join.setCompletion(handler); join.sendWith(host); if (!latch.await(timeout, timeUnit)) { throw new TimeoutException(String .format("Deletion of all documents timed out. Timeout:{%s}, TimeUnit:{%s}", timeout, timeUnit)); } }
From source file:com.qwarz.graph.process.GraphProcess.java
public static List<GraphNodeResult> request(GraphBase base, GraphRequest request) throws Exception { GraphProcessInterface graphProcess = getImplementation(base.data); List<GraphNodeResult> resultList = new ArrayList<GraphNodeResult>(request.getRowsOrDefault()); // Execute the request graphProcess.request(base, request, resultList); Map<String, GraphNodeResult> nodeResultMap = new TreeMap<String, GraphNodeResult>(); for (GraphNodeResult nodeResult : resultList) { nodeResult.node_id = nodeResult.node_id.intern(); nodeResultMap.put(nodeResult.node_id, nodeResult); }//from w w w.j av a 2 s . co m // Retrieve the nodes from the database Map<String, GraphNode> graphNodeMap = graphProcess.getNodes(base, nodeResultMap.keySet()); if (graphNodeMap != null) { for (GraphNodeResult graphNodeResult : nodeResultMap.values()) { GraphNode graphNode = graphNodeMap.get(graphNodeResult.node_id); if (graphNode != null) { graphNodeResult.edges = graphNode.edges; graphNodeResult.properties = graphNode.properties; } } } return resultList; }
From source file:Main.java
/** * Constructs a new StartElement that merges the attributes and namespaces * found in the specified StartElement, with the provided attributes. The * returned StartElement will contain all the attributes and namespaces of * the original, plus those defined in the map. * /* w w w .ja v a 2 s .c om*/ * @param tag The original StartElement * @param attrs An iterator of Atributes to add to the element. * @return A new StartElement that contains all the original attributes and * namespaces, plus the provided attributes. */ public static StartElement mergeAttributes(StartElement tag, Iterator attrs, XMLEventFactory factory) { // create Attribute map Map attributes = new HashMap(); // iterate through start tag's attributes for (Iterator i = tag.getAttributes(); i.hasNext();) { Attribute attr = (Attribute) i.next(); attributes.put(attr.getName(), attr); } // iterate through new attributes while (attrs.hasNext()) { Attribute attr = (Attribute) attrs.next(); attributes.put(attr.getName(), attr); } factory.setLocation(tag.getLocation()); QName tagName = tag.getName(); return factory.createStartElement(tagName.getPrefix(), tagName.getNamespaceURI(), tagName.getLocalPart(), attributes.values().iterator(), tag.getNamespaces(), tag.getNamespaceContext()); }
From source file:com.linkedin.pinot.core.startree.StarTreeSerDe.java
/** * Helper method that returns a list of children for the given node, sorted based on * the dimension value.//w w w.j a va 2 s .c o m * * @param node * @return A list of sorted child nodes (empty if no children). */ private static List<StarTreeIndexNode> getSortedChildren(StarTreeIndexNode node) { Map<Integer, StarTreeIndexNode> children = node.getChildren(); if (children == null) { return Collections.EMPTY_LIST; } List<StarTreeIndexNode> sortedChildren = new ArrayList<>(); sortedChildren.addAll(children.values()); Collections.sort(sortedChildren, new Comparator<StarTreeIndexNode>() { @Override public int compare(StarTreeIndexNode node1, StarTreeIndexNode node2) { int v1 = node1.getDimensionValue(); int v2 = node2.getDimensionValue(); if (v1 < v2) { return -1; } else if (v1 > v2) { return v1; } else { return 0; } } }); return sortedChildren; }
From source file:dk.dma.ais.reader.AisReaders.java
/** * Equivalent to {@link #parseSource(String)} except that it will parse an array of sources. Returning a map making * sure there all source names are unique */// w ww . j av a 2s . c o m public static AisReaderGroup createGroup(String name, List<String> sources) { Map<String, AisTcpReader> readers = new HashMap<>(); for (String s : sources) { AisTcpReader r = parseSource(s); if (readers.put(r.getSourceId(), r) != null) { // Make sure its unique throw new Error("More than one reader specified with the same source id (id =" + r.getSourceId() + "), source string = " + sources); } } AisReaderGroup g = new AisReaderGroup(name); for (AisTcpReader r : readers.values()) { g.add(r); } return g; }
From source file:com.newatlanta.appengine.datastore.CachingDatastoreService.java
@SuppressWarnings("unchecked") private static void doWriteBehindTask(HttpServletRequest req, HttpServletResponse res) throws IOException { Object payload = null;//from w ww. j a va2 s . c o m try { payload = deserialize(req); if (payload == null) { return; } } catch (Exception e) { log.warning(e.toString()); return; } MemcacheService memcache = getMemcacheService(); List<Key> keys; if (payload instanceof Key) { keys = new ArrayList<Key>(); keys.add((Key) payload); // delete flag that prevents multiple tasks from being queued memcache.delete(keyToString((Key) payload)); } else if (payload instanceof List) { keys = (List) payload; } else { log.warning(payload.getClass().getName()); return; } Map<String, Entity> entityMap = (Map) memcache.getAll((List) keys); if ((entityMap != null) && !entityMap.isEmpty()) { try { if (getDatastoreService().put(entityMap.values()).size() != entityMap.size()) { log.info("failed to write all entities - retrying"); res.sendError(HttpServletResponse.SC_PARTIAL_CONTENT); } } catch (DatastoreTimeoutException e) { // retry task log.info(e.getMessage()); res.sendError(HttpServletResponse.SC_REQUEST_TIMEOUT); } catch (ConcurrentModificationException e) { // retry task log.info(e.getMessage()); res.sendError(HttpServletResponse.SC_CONFLICT); } catch (Exception e) { // don't retry log.warning(e.toString()); } } }
From source file:at.molindo.dbcopy.Database.java
private static Map<String, Table> readTables(Connection connection) throws SQLException { Map<String, Table> tables = new HashMap<String, Table>(); DatabaseMetaData meta = connection.getMetaData(); String catalog = connection.getCatalog(); // for each table in current catalog ResultSet rs = meta.getTables(catalog, null, null, null); try {//from w w w . ja va 2 s . c o m while (rs.next()) { String tableName = rs.getString("TABLE_NAME"); Table.Builder table = Table.builder(tableName); // columns String columnsQuery = "select COLUMN_NAME,COLLATION_NAME from information_schema.COLUMNS where TABLE_SCHEMA=? and TABLE_NAME=? order by ORDINAL_POSITION"; Map<String, Column> columns = Utils.executePrepared(connection, columnsQuery, new ColumnHandler(), catalog, tableName); if (columns.isEmpty()) { throw new IllegalStateException("table (" + tableName + ") without columns?"); } table.addColumns(columns.values()); // unique keys String uniqueKeysQuery = "show keys from `" + tableName + "` in `" + catalog + "` where `Non_unique` = 0 and `Null` = ''"; List<Map<String, Object>> uniqueKeyColumns = Utils.executePrepared(connection, uniqueKeysQuery, new MapListHandler()); ListMap<String, Column> uniqeKeys = new ListMap<String, Column>(); for (Map<String, Object> keyColumn : uniqueKeyColumns) { String name = (String) keyColumn.get("INDEX_NAME"); String columnName = (String) keyColumn.get("COLUMN_NAME"); if (name == null) { throw new IllegalStateException("KEY_NAME must not be null"); } if (columnName == null) { throw new IllegalStateException("COLUMN_NAME must not be null"); } Column column = columns.get(columnName); if (column == null) { throw new IllegalStateException("COLUMN_NAME unknown: " + columnName); } uniqeKeys.add(name, column); } for (Map.Entry<String, List<Column>> e : uniqeKeys.entrySet()) { table.addUniqueKey(e.getKey(), e.getValue()); } if (uniqeKeys.isEmpty()) { log.warn("table without primary key not supported: " + tableName); } else { tables.put(tableName, table.build()); } } } finally { Utils.close(rs); } return tables; }
From source file:com.microsoft.azure.shortcuts.resources.samples.NetworkInterfacesSample.java
private static void printNetworkInterface(NetworkInterface nic) throws Exception { StringBuilder output = new StringBuilder(); NetworkInterfaceIpConfiguration ipConfig = nic.inner().getIpConfigurations().get(0); Map<String, PublicIpAddress> pips = nic.publicIpAddresses(); output.append(String.format("Network interface ID: %s\n", nic.id())) .append(String.format("\tName: %s\n", nic.name())) .append(String.format("\tGroup: %s\n", nic.resourceGroup())) .append(String.format("\tRegion: %s\n", nic.region())) .append(String.format("\tPrimary subnet ID: %s\n", ipConfig.getSubnet().getId())) .append(String.format("\tPrimary private IP: %s\n", ipConfig.getPrivateIpAddress())) .append(String.format("\tPublic IPs:\n")); for (PublicIpAddress pip : pips.values()) { output.append(String.format("\t\tName:%s\n", pip.name())) .append(String.format("\t\tLeaf domain label:%s\n", pip.leafDomainLabel())) .append(String.format("\t\tIP address:%s\n", pip.ipAddress())); }/*from www .j ava 2 s. com*/ System.out.println(output.toString()); }
From source file:edu.stanford.muse.graph.directed.Digraph.java
/** input is a set of docid -> terms in the doc map * @throws FileNotFoundException */ public static void doIt(Map<Integer, Collection<Collection<String>>> docMap, String outfile) throws FileNotFoundException { // index stores for each term, count of how many times it co-occurs with another in a doc. Map<String, Map<String, Integer>> index = new LinkedHashMap<String, Map<String, Integer>>(); Map<String, Integer> termFreq = new LinkedHashMap<String, Integer>(); // compute index for (Integer num : docMap.keySet()) { Collection<Collection<String>> paras = docMap.get(num); for (Collection<String> paraNames : paras) { System.out.println(num + ". " + paraNames.size() + " names " + " prev index size " + index.size() + " term freq size " + termFreq.size()); if (paraNames.size() > 100) { log.warn("skipping long para" + paraNames); continue; }// w ww.j a va2s . c om for (String s : paraNames) { s = s.toLowerCase(); // bump up term freq for this term Integer X = termFreq.get(s); termFreq.put(s, (X == null) ? 1 : X + 1); // bump up counts for co-occurring terms... // unfortunately n^2 operation here for (String s1 : paraNames) { if (s == s1) continue; Map<String, Integer> termMap = index.get(s); if (termMap == null) { // allocate termMap if this is the first time we've seen s termMap = new LinkedHashMap<String, Integer>(1); index.put(s, termMap); } // bump the count Integer I = termMap.get(s1); termMap.put(s1, (I == null) ? 1 : I + 1); } } } } // process index and store it as a graph structure Digraph<String> graph = new Digraph<String>(); for (String term : index.keySet()) { Map<String, Integer> map = index.get(term); if (map == null) { // no edges, just add it to the graph and continue graph.add(term); continue; } // compute total co-occurrence across all other terms this term is associated with int total = 0; for (Integer x : map.values()) total += x; // proportionately allocate weight for (String x : map.keySet()) graph.addEdge(term, x, ((float) map.get(x))); // graph.addEdge(term, x, ((float) map.get(x))/total); } String s = graph.dump(); PrintStream pw = new PrintStream(new FileOutputStream(outfile)); pw.print(s); pw.close(); }