List of usage examples for java.util Map values
Collection<V> values();
From source file:com.mitre.fulfilmentprocess.test.PaymentIntegrationTest.java
@AfterClass public static void removeProcessDefinitions() { LOG.debug("cleanup..."); final ApplicationContext appCtx = Registry.getGlobalApplicationContext(); assertTrue("Application context of type " + appCtx.getClass() + " is not a subclass of " + ConfigurableApplicationContext.class, appCtx instanceof ConfigurableApplicationContext); final ConfigurableApplicationContext applicationContext = (ConfigurableApplicationContext) appCtx; final ConfigurableListableBeanFactory beanFactory = applicationContext.getBeanFactory(); assertTrue("Bean Factory of type " + beanFactory.getClass() + " is not of type " + BeanDefinitionRegistry.class, beanFactory instanceof BeanDefinitionRegistry); final XmlBeanDefinitionReader xmlReader = new XmlBeanDefinitionReader((BeanDefinitionRegistry) beanFactory); xmlReader.loadBeanDefinitions(new ClassPathResource( "/mitrefulfilmentprocess/test/mitrefulfilmentprocess-spring-testcleanup.xml")); //cleanup command factory final Map<String, CommandFactory> commandFactoryList = applicationContext .getBeansOfType(CommandFactory.class); commandFactoryList.remove("mockupCommandFactory"); final DefaultCommandFactoryRegistryImpl commandFactoryReg = appCtx .getBean(DefaultCommandFactoryRegistryImpl.class); commandFactoryReg.setCommandFactoryList(commandFactoryList.values()); // if (definitonFactory != null) // {//from w ww . j av a 2 s .c o m // TODO this test seems to let processes run after method completion - therefore we cannot // remove definitions !!! // definitonFactory.remove("testPlaceorder"); // definitonFactory.remove("testConsignmentFulfilmentSubprocess"); // } processService.setTaskService(appCtx.getBean(DefaultTaskService.class)); definitonFactory = null; processService = null; }
From source file:eu.itesla_project.modules.validation.OfflineValidationTool.java
private static void writeCsv(Map<String, Map<RuleId, ValidationStatus>> statusPerRulePerCase, Map<String, Map<RuleId, Map<HistoDbAttributeId, Object>>> valuesPerRulePerCase, Path outputDir) throws IOException { Set<RuleId> rulesIds = new TreeSet<>(); statusPerRulePerCase.values().stream().forEach(e -> rulesIds.addAll(e.keySet())); writeComparisonFiles(rulesIds, statusPerRulePerCase, outputDir); writeAttributesFiles(rulesIds, valuesPerRulePerCase, outputDir); List<String> categories = Arrays.asList(toCategory(OK_S, OK_R), toCategory(OK_S, NOK_R), toCategory(NOK_S, OK_R), toCategory(NOK_S, NOK_R), toCategory(OK_S, UNDEF_R), toCategory(NOK_S, UNDEF_R), toCategory(UNDEF_S, OK_R), toCategory(UNDEF_S, NOK_R), toCategory(UNDEF_S, UNDEF_R)); Map<RuleId, Map<String, AtomicInteger>> synthesisPerRule = new HashMap<>(); for (RuleId ruleId : rulesIds) { synthesisPerRule.put(ruleId,//from w w w. ja v a2s .c om categories.stream().collect(Collectors.toMap(Function.identity(), e -> new AtomicInteger()))); } for (Map.Entry<String, Map<RuleId, ValidationStatus>> e : statusPerRulePerCase.entrySet()) { Map<RuleId, ValidationStatus> statusPerRule = e.getValue(); for (RuleId ruleId : rulesIds) { ValidationStatus status = statusPerRule.get(ruleId); String category = toCategory(status.isSimulationOkToStr(), status.isRuleOkToStr()); synthesisPerRule.get(ruleId).get(category).incrementAndGet(); } } writeSynthesisFile(synthesisPerRule, categories, outputDir.resolve("synthesis.csv")); }
From source file:gridool.db.partitioning.phihash.csv.distmm.InMemoryIndexHelper.java
/** * Synchronization is required./*from ww w .j a va 2s .com*/ */ public static void writeToFile(final InputStream in) throws IOException { final byte[] recordBuf = new byte[2048]; // big buffer enough for a record final Map<String, OutputStream> outputMap = new HashMap<String, OutputStream>(12); while (in.available() > 0) { String fkIdxName = IOUtils.readString(in); int bucket = IOUtils.readInt(in); int recordlen = IOUtils.readInt(in); IOUtils.readFully(in, recordBuf, 0, recordlen); OutputStream out = prepareFkOutputStream(fkIdxName, bucket, outputMap); out.write(recordBuf, 0, recordlen); } for (OutputStream out : outputMap.values()) { out.flush(); out.close(); } }
From source file:com.sirma.itt.cmf.integration.workflow.WorkflowUtil.java
/** * Prepares the given node for persistence in the workflow engine. * * @param node//from ww w.ja va 2 s . c o m * The node to package up for persistence * @return The map of data representing the node */ @SuppressWarnings("unchecked") public static Map<QName, Serializable> prepareTaskParams(Node node) { Map<QName, Serializable> params = new HashMap<QName, Serializable>(); // marshal the properties and associations captured by the property // sheet // back into a Map to pass to the workflow service // go through all the properties in the transient node and add them to // params map Map<String, Object> props = node.getProperties(); for (String propName : props.keySet()) { QName propQName = resolveToQName(propName); params.put(propQName, (Serializable) props.get(propName)); } // go through any associations that have been added to the start task // and build a list of NodeRefs representing the targets Map<String, Map<String, AssociationRef>> assocs = node.getAddedAssociations(); for (String assocName : assocs.keySet()) { QName assocQName = resolveToQName(assocName); // get the associations added and create list of targets Map<String, AssociationRef> addedAssocs = assocs.get(assocName); List<AssociationRef> originalAssocRefs = (List<AssociationRef>) node.getAssociations().get(assocName); List<NodeRef> targets = new ArrayList<NodeRef>(addedAssocs.size()); if (originalAssocRefs != null) { for (AssociationRef assoc : originalAssocRefs) { targets.add(assoc.getTargetRef()); } } for (AssociationRef assoc : addedAssocs.values()) { targets.add(assoc.getTargetRef()); } params.put(assocQName, (Serializable) targets); } // go through the removed associations and either setup or adjust the // parameters map accordingly assocs = node.getRemovedAssociations(); for (String assocName : assocs.keySet()) { QName assocQName = resolveToQName(assocName); // get the associations removed and create list of targets Map<String, AssociationRef> removedAssocs = assocs.get(assocName); List<NodeRef> targets = (List<NodeRef>) params.get(assocQName); if (targets == null) { // if there weren't any assocs of this type added get the // current // set of assocs from the node List<AssociationRef> originalAssocRefs = (List<AssociationRef>) node.getAssociations() .get(assocName); targets = new ArrayList<NodeRef>(originalAssocRefs.size()); for (AssociationRef assoc : originalAssocRefs) { targets.add(assoc.getTargetRef()); } } // remove the assocs the user deleted for (AssociationRef assoc : removedAssocs.values()) { targets.remove(assoc.getTargetRef()); } params.put(assocQName, (Serializable) targets); } // TODO: Deal with child associations if and when we need to support // them for workflow tasks, for now warn that they are being used Map<?, ?> childAssocs = node.getAddedChildAssociations(); if (childAssocs.size() > 0) { if (logger.isWarnEnabled()) logger.warn("Child associations are present but are not supported for workflow tasks, ignoring..."); } return params; }
From source file:contestTabulation.Main.java
private static void tabulateCategorySweepstakesWinners(Map<String, School> schools, Map<Subject, List<School>> categorySweepstakesWinners) { for (final Subject subject : Subject.values()) { ArrayList<School> schoolList = new ArrayList<School>(schools.values()); Collections.sort(schoolList, School.getScoreComparator(subject)); Collections.reverse(schoolList); categorySweepstakesWinners.put(subject, schoolList); }//from w w w. ja v a 2s.c om }
From source file:net.ontopia.topicmaps.db2tm.Utils.java
/** * INTERNAL: Returns a map where the keys are data sources and each * entry is a collection of their individual relations. Before * returning all relations will be verified against the relations * declared in the mapping. If relations are missing an error is * issued indicating which ones are missing. *//*from w w w . j ava 2 s . c om*/ public static Map<DataSourceIF, Collection<Relation>> verifyRelationsForMapping(RelationMapping rmapping) { // build return value Collection<DataSourceIF> ds = rmapping.getDataSources(); Map<DataSourceIF, Collection<Relation>> foundRelations = new HashMap<DataSourceIF, Collection<Relation>>( ds.size()); for (DataSourceIF datasource : ds) { foundRelations.put(datasource, datasource.getRelations()); } // detect missing relations List<Relation> missingRelations = new ArrayList<Relation>(); for (Relation relation : rmapping.getRelations()) { boolean relationMapped = false; for (Collection<Relation> frels : foundRelations.values()) { if (frels.contains(relation)) { relationMapped = true; break; } ; } if (!relationMapped) missingRelations.add(relation); } // complain if found mappings without relations int size = missingRelations.size(); if (size > 1) { String[] relnames = new String[size]; for (int i = 0; i < relnames.length; i++) { relnames[i] = missingRelations.get(i).getName(); } throw new DB2TMException("No relations found for mappings: " + StringUtils.join(relnames, ", ")); } else if (size == 1) { throw new DB2TMException("No relation found for mapping: " + missingRelations.get(0).getName()); } return foundRelations; }
From source file:ch.cyberduck.ui.cocoa.odb.EditorFactory.java
/** * @return All statically registered but possibly not installed editors *//*from w w w . j av a 2 s . c o m*/ public static Map<String, String> getSupportedEditors() { if (log.isTraceEnabled()) { log.trace("getSupportedEditors"); } Map<String, String> supported = new HashMap<String, String>(); if (Preferences.instance().getBoolean("editor.odb.enable")) { supported.putAll(ODBEditor.getSupportedEditors()); } if (Preferences.instance().getBoolean("editor.kqueue.enable")) { supported.putAll(WatchEditor.getSupportedEditors()); } final String defaultEditor = defaultEditor(); if (null == defaultEditor) { return supported; } if (!supported.values().contains(defaultEditor)) { supported.put(EditorFactory.getApplicationName(defaultEditor), defaultEditor); } return supported; }
From source file:org.bigtester.ate.GlobalUtils.java
/** * Gets the case data files./*from w w w . j ava 2 s . c om*/ * * @param appCtx * the app ctx * @return the case data files */ @Nullable public static List<Resource> getCaseDataFiles(ApplicationContext appCtx) { Map<String, Homepage> homepages = appCtx.getBeansOfType(Homepage.class, true, true); Map<String, Lastpage> lastpages = appCtx.getBeansOfType(Lastpage.class); Map<String, RegularPage> regularpages = appCtx.getBeansOfType(RegularPage.class); List<Resource> dataFiles = new ArrayList<Resource>(); for (int i = 0; i < homepages.size(); i++) { if (null != homepages.values().iterator().next().getDataFile()) dataFiles.add(homepages.values().iterator().next().getDataFile()); } for (int i = 0; i < lastpages.size(); i++) { if (null != lastpages.values().iterator().next().getDataFile()) dataFiles.add(lastpages.values().iterator().next().getDataFile()); } for (int i = 0; i < regularpages.size(); i++) { if (null != regularpages.values().iterator().next().getDataFile()) dataFiles.add(regularpages.values().iterator().next().getDataFile()); } return dataFiles; }
From source file:com.netflix.spinnaker.clouddriver.kubernetes.v2.caching.agent.KubernetesCacheDataConverter.java
public static Collection<CacheData> dedupCacheData(Collection<CacheData> input) { Map<String, CacheData> cacheDataById = new HashMap<>(); for (CacheData cd : input) { String id = cd.getId();/*from ww w.java 2 s .c o m*/ if (cacheDataById.containsKey(id)) { CacheData other = cacheDataById.get(id); cd = mergeCacheData(cd, other); } cacheDataById.put(id, cd); } return cacheDataById.values(); }
From source file:de.escalon.hypermedia.spring.uber.UberUtils.java
/** * Recursively converts object to nodes of uber data. * * @param objectNode/*from w w w . j a va2 s . c o m*/ * to append to * @param object * to convert */ public static void toUberData(AbstractUberNode objectNode, Object object) { Set<String> filtered = FILTER_RESOURCE_SUPPORT; if (object == null) { return; } try { // TODO: move all returns to else branch of property descriptor handling if (object instanceof Resource) { Resource<?> resource = (Resource<?>) object; objectNode.addLinks(resource.getLinks()); toUberData(objectNode, resource.getContent()); return; } else if (object instanceof Resources) { Resources<?> resources = (Resources<?>) object; // TODO set name using EVO see HypermediaSupportBeanDefinitionRegistrar objectNode.addLinks(resources.getLinks()); Collection<?> content = resources.getContent(); toUberData(objectNode, content); return; } else if (object instanceof ResourceSupport) { ResourceSupport resource = (ResourceSupport) object; objectNode.addLinks(resource.getLinks()); // wrap object attributes below to avoid endless loop } else if (object instanceof Collection) { Collection<?> collection = (Collection<?>) object; for (Object item : collection) { // TODO name must be repeated for each collection item UberNode itemNode = new UberNode(); objectNode.addData(itemNode); toUberData(itemNode, item); } return; } if (object instanceof Map) { Map<?, ?> map = (Map<?, ?>) object; for (Entry<?, ?> entry : map.entrySet()) { String key = entry.getKey().toString(); Object content = entry.getValue(); Object value = getContentAsScalarValue(content); UberNode entryNode = new UberNode(); objectNode.addData(entryNode); entryNode.setName(key); if (value != null) { entryNode.setValue(value); } else { toUberData(entryNode, content); } } } else { Map<String, PropertyDescriptor> propertyDescriptors = PropertyUtils.getPropertyDescriptors(object); for (PropertyDescriptor propertyDescriptor : propertyDescriptors.values()) { String name = propertyDescriptor.getName(); if (filtered.contains(name)) { continue; } UberNode propertyNode = new UberNode(); Object content = propertyDescriptor.getReadMethod().invoke(object); if (isEmptyCollectionOrMap(content, propertyDescriptor.getPropertyType())) { continue; } Object value = getContentAsScalarValue(content); propertyNode.setName(name); objectNode.addData(propertyNode); if (value != null) { // for each scalar property of a simple bean, add valuepair nodes to data propertyNode.setValue(value); } else { toUberData(propertyNode, content); } } Field[] fields = object.getClass().getFields(); for (Field field : fields) { String name = field.getName(); if (!propertyDescriptors.containsKey(name)) { Object content = field.get(object); Class<?> type = field.getType(); if (isEmptyCollectionOrMap(content, type)) { continue; } UberNode propertyNode = new UberNode(); Object value = getContentAsScalarValue(content); propertyNode.setName(name); objectNode.addData(propertyNode); if (value != null) { // for each scalar property of a simple bean, add valuepair nodes to data propertyNode.setValue(value); } else { toUberData(propertyNode, content); } } } } } catch (Exception ex) { throw new RuntimeException("failed to transform object " + object, ex); } }