List of usage examples for java.util Map values
Collection<V> values();
From source file:imperial.modaclouds.monitoring.sda.weka.CreateArff.java
/** * Create arff file given the data//from w w w .j a v a 2 s .com * * @param timestamps_str the timestamps data * @param data the values of the metrics * @param metricName the metric name * @param fileName the file name to keep the arff file */ public static void create(ArrayList<ArrayList<String>> timestamps_str, ArrayList<ArrayList<String>> data, ArrayList<String> metricName, String fileName) { System.out.println("data: " + data.get(0)); long min_timestamp = Long.valueOf(Collections.min(timestamps_str.get(0))); long max_timestamp = Long.valueOf(Collections.max(timestamps_str.get(0))); for (int i = 1; i < timestamps_str.size(); i++) { long min_temp = Long.valueOf(Collections.min(timestamps_str.get(i))); long max_temp = Long.valueOf(Collections.max(timestamps_str.get(i))); if (max_temp < max_timestamp) { max_timestamp = max_temp; } if (min_temp > min_timestamp) { min_timestamp = min_temp; } } for (int i = 0; i < timestamps_str.size(); i++) { Iterator<String> iter_time = timestamps_str.get(i).iterator(); Iterator<String> iter_data = data.get(i).iterator(); while (iter_time.hasNext()) { long temp_timestamps = Long.valueOf(iter_time.next()); if (temp_timestamps < min_timestamp || temp_timestamps > max_timestamp) { iter_time.remove(); iter_data.next(); iter_data.remove(); } } } double[] timestamps = convertDoubles(timestamps_str.get(0)); double[] targetData = convertDoubles(data.get(0)); double[][] otherData = new double[data.size() - 1][timestamps.length]; for (int i = 0; i < data.size() - 1; i++) { double[] timestamps_temp = convertDoubles(timestamps_str.get(i)); double[] targetData_temp = convertDoubles(data.get(i)); SplineInterpolator spline = new SplineInterpolator(); Map<Double, Integer> map = new TreeMap<Double, Integer>(); for (int j = 0; j < timestamps_temp.length; j++) { map.put(timestamps_temp[j], j); } Collection<Integer> indices = map.values(); int[] indices_int = ArrayUtils.toPrimitive(indices.toArray(new Integer[indices.size()])); double[] timestamps_temp_new = new double[indices_int.length]; double[] targetData_temp_new = new double[indices_int.length]; for (int j = 0; j < indices_int.length; j++) { timestamps_temp_new[j] = timestamps_temp[indices_int[j]]; targetData_temp_new[j] = targetData_temp[indices_int[j]]; } PolynomialSplineFunction polynomical = spline.interpolate(timestamps_temp_new, targetData_temp_new); for (int j = 0; j < timestamps.length; j++) { try { otherData[i][j] = polynomical.value(timestamps[j]); } catch (Exception ex) { otherData[i][j] = targetData_temp_new[j]; } } } ArrayList<Attribute> attributes; Instances dataSet; attributes = new ArrayList<Attribute>(); for (String metric : metricName) { attributes.add(new Attribute(metric)); } dataSet = new Instances("data", attributes, 0); for (int i = 0; i < timestamps.length; i++) { double[] instanceValue1 = new double[dataSet.numAttributes()]; instanceValue1[0] = timestamps[i]; instanceValue1[1] = targetData[i]; for (int j = 0; j < data.size() - 1; j++) { instanceValue1[2 + j] = otherData[j][i]; } DenseInstance denseInstance1 = new DenseInstance(1.0, instanceValue1); dataSet.add(denseInstance1); } ArffSaver saver = new ArffSaver(); saver.setInstances(dataSet); try { String workingDir = System.getProperty("user.dir"); System.out.println("workingDir: " + workingDir); saver.setFile(new File(workingDir + "/" + fileName)); saver.writeBatch(); } catch (IOException e) { e.printStackTrace(); } }
From source file:com.autentia.intra.util.SpringUtils.java
/** * Configure this class/*www . j a v a 2s . co m*/ * * @param appCtx */ public synchronized static void configure(ApplicationContext ctx) { // Do not let configure more than once if (appCtx != null) { throw new IllegalStateException("Spring's application context cannot be set more than once"); } // Store application context appCtx = ctx; // Find AclService Map map = appCtx.getBeansOfType(AclService.class); if (map.size() != 1) { throw new IllegalStateException( "Found incorrect number of AclService instances in application context - you must have only have one!"); } aclService = (AclService) map.values().iterator().next(); // Find SidRetrievalStrategy map = appCtx.getBeansOfType(SidRetrievalStrategy.class); if (map.size() == 0) { sidRetrievalStrategy = new SidRetrievalStrategyImpl(); } else if (map.size() == 1) { sidRetrievalStrategy = (SidRetrievalStrategy) map.values().iterator().next(); } else { throw new IllegalStateException( "Found incorrect number of SidRetrievalStrategy instances in application context - you must have only have one!"); } // Find ObjectIdentityRetrievalStrategy map = appCtx.getBeansOfType(ObjectIdentityRetrievalStrategy.class); if (map.size() == 0) { objectIdentityRetrievalStrategy = new ObjectIdentityRetrievalStrategyImpl(); } else if (map.size() == 1) { objectIdentityRetrievalStrategy = (ObjectIdentityRetrievalStrategy) map.values().iterator().next(); } else { throw new IllegalStateException( "Found incorrect number of ObjectIdentityRetrievalStrategy instances in application context - you must have only have one!"); } }
From source file:com.gargoylesoftware.htmlunit.source.JQueryExtractor.java
private static List<Test> computeTestsList(final Map<String, Expectations> browserExpectations) { final Map<String, Test> map = new HashMap<>(); for (final Expectations expectations : browserExpectations.values()) { for (final Expectation expectation : expectations) { final String testName = expectation.getTestName(); Test test = map.get(testName); if (test == null) { test = new Test(testName); map.put(testName, test); }/*from ww w . j a v a2 s. c om*/ test.addLine(expectation.getLine()); } } final List<Test> tests = new ArrayList<>(map.values()); Collections.sort(tests); return tests; }
From source file:com.alibaba.jstorm.daemon.worker.Worker.java
/** * get current task's output task list/* w ww .ja va 2 s. c o m*/ */ public static Set<Integer> worker_output_tasks(WorkerData workerData) { ContextMaker context_maker = workerData.getContextMaker(); Set<Integer> task_ids = workerData.getTaskids(); StormTopology topology = workerData.getSysTopology(); Set<Integer> rtn = new HashSet<Integer>(); for (Integer taskid : task_ids) { TopologyContext context = context_maker.makeTopologyContext(topology, taskid, null); // <StreamId, <ComponentId, Grouping>> Map<String, Map<String, Grouping>> targets = context.getThisTargets(); for (Map<String, Grouping> e : targets.values()) { for (String componentId : e.keySet()) { List<Integer> tasks = context.getComponentTasks(componentId); rtn.addAll(tasks); } } } return rtn; }
From source file:com.opengamma.financial.security.cds.CDSIndexComponentBundle.java
private static Iterable<CreditDefaultSwapIndexComponent> deduplicate( Iterable<CreditDefaultSwapIndexComponent> components) { Map<ExternalId, CreditDefaultSwapIndexComponent> redCodeMapping = Maps.newHashMap(); for (CreditDefaultSwapIndexComponent component : components) { redCodeMapping.put(component.getObligorRedCode(), component); }/*ww w . j a va 2 s . c o m*/ return redCodeMapping.values(); }
From source file:gr.abiss.calipso.tiers.specifications.GenericSpecifications.java
protected static void addNestedJunctionPredicates(final Class clazz, Root<Persistable> root, CriteriaBuilder cb, LinkedList<Predicate> predicates, Map<String, Map<String, String[]>> andJunctions, String mode) { if (!CollectionUtils.isEmpty(andJunctions)) { String[] searchMode = { mode }; for (Map<String, String[]> params : andJunctions.values()) { params.put(SEARCH_MODE, searchMode); // TODO Predicate nestedPredicate = buildRootPredicate(clazz, params, root, cb/*, true*/); if (nestedPredicate != null) { predicates.add(nestedPredicate); }//from w w w . ja va 2 s.c om } } }
From source file:com.vmware.admiral.compute.container.volume.VolumeUtil.java
/** * Creates additional affinity rules between container descriptions which share * local volumes. Each container group should be deployed on a single host. *///from w w w.j a va2 s . c om public static void applyLocalNamedVolumeConstraints(Collection<ComponentDescription> componentDescriptions) { Map<String, ContainerVolumeDescription> volumes = filterDescriptions(ContainerVolumeDescription.class, componentDescriptions); List<String> localVolumes = volumes.values().stream().filter(v -> DEFAULT_VOLUME_DRIVER.equals(v.driver)) .map(v -> v.name).collect(Collectors.toList()); if (localVolumes.isEmpty()) { return; } Map<String, ContainerDescription> containers = filterDescriptions(ContainerDescription.class, componentDescriptions); // sort containers by local volume: each set is a group of container names // that share a particular local volume List<Set<String>> localVolumeContainers = localVolumes.stream() .map(v -> filterByVolume(v, containers.values())).filter(s -> !s.isEmpty()) .collect(Collectors.toList()); if (localVolumeContainers.isEmpty()) { return; } /** Merge sets of containers sharing local volumes * * C1 C2 C3 C4 C5 C6 * \ /\ / | \ / * L1 L2 L3 L4 * * Input: [C1, C2], [C2, C3], [C4], [C5, C6] * Output: [C1, C2, C3], [C4], [C5, C6] */ localVolumeContainers = mergeSets(localVolumeContainers); Map<String, List<ContainerVolumeDescription>> containerToVolumes = containers.values().stream() .collect(Collectors.toMap(cd -> cd.name, cd -> filterVolumes(cd, volumes.values()))); Map<String, Integer> containerToDriverCount = containerToVolumes.entrySet().stream() .collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().stream().map(vd -> vd.driver).collect(Collectors.toSet()).size())); for (Set<String> s : localVolumeContainers) { if (s.size() > 1) { // find the container with highest number of required drivers int max = s.stream().map(cn -> containerToDriverCount.get(cn)) .max((vc1, vc2) -> Integer.compare(vc1, vc2)).get(); Set<String> maxDrivers = s.stream().filter(cn -> containerToDriverCount.get(cn) == max) .collect(Collectors.toSet()); String maxCont = maxDrivers.iterator().next(); s.remove(maxCont); s.stream().forEach(cn -> addAffinity(maxCont, containers.get(cn))); } } }
From source file:freenet.client.async.ContainerInserter.java
@SuppressWarnings("unchecked") public static void resumeMetadata(Map<String, Object> map, ClientContext context) throws ResumeFailedException { Map<String, Object> manifestElements = (Map<String, Object>) map; for (Object o : manifestElements.values()) { if (o instanceof HashMap) { resumeMetadata((Map<String, Object>) o, context); } else if (o instanceof ManifestElement) { ManifestElement e = (ManifestElement) o; e.onResume(context);//from w w w.ja va2 s . com } else if (o instanceof Metadata) { // Ignore } else if (o instanceof PutHandler) { PutHandler handler = (PutHandler) o; handler.onResume(context); } else if (o instanceof ManifestElement) { ((ManifestElement) o).onResume(context); } else throw new IllegalArgumentException("Unknown manifest element: " + o); } }
From source file:com.opengamma.analytics.financial.riskfactor.TaylorExpansionMultiplierCalculator.java
public static double getValue(final Map<UnderlyingType, Double> underlyingData, final Underlying underlying) { Validate.notNull(underlying, "underlying"); Validate.notNull(underlyingData, "underlying data"); Validate.notEmpty(underlyingData, "underlying data"); Validate.noNullElements(underlyingData.keySet(), "underlying data keys"); Validate.noNullElements(underlyingData.values(), "underlying data values"); if (underlying instanceof NthOrderUnderlying) { final NthOrderUnderlying nthOrder = (NthOrderUnderlying) underlying; final int n = nthOrder.getOrder(); if (n == 0) { return 1; }/*from w w w . ja v a 2 s. c om*/ final UnderlyingType type = nthOrder.getUnderlying(); Validate.isTrue(underlyingData.containsKey(type)); final double value = Math.pow(underlyingData.get(type), n); return value * getMultiplier(underlying); } else if (underlying instanceof MixedOrderUnderlying) { final MixedOrderUnderlying mixedOrder = (MixedOrderUnderlying) underlying; Double result = null; double multiplier; for (final NthOrderUnderlying underlyingOrder : mixedOrder.getUnderlyingOrders()) { if (result == null) { result = getValue(underlyingData, underlyingOrder); } else { multiplier = getValue(underlyingData, underlyingOrder); result = result * multiplier; } } if (result != null) { return result; } } throw new IllegalArgumentException( "Order was neither NthOrderUnderlying nor MixedOrderUnderlying: have " + underlying.getClass()); }
From source file:com.exxonmobile.ace.hybris.storefront.servlets.util.FilterSpringUtil.java
/** * The same as {@link #getSpringBean(HttpServletRequest, String, Class)} but uses ServletContext as the first * parameter. It might be used in places, where HttpServletRequest is not available, but ServletContext is. *///from ww w . j av a 2 s . com public static <T> T getSpringBean(final ServletContext servletContext, final String beanName, final Class<T> beanClass) { T ret = null; final WebApplicationContext appContext = WebApplicationContextUtils .getRequiredWebApplicationContext(servletContext); if (StringUtils.isNotBlank(beanName)) { try { ret = (T) appContext.getBean(beanName); } catch (final NoSuchBeanDefinitionException ex) { LOG.warn("No bean found with the specified name. Trying to resolve bean using type..."); } } if (ret == null) { if (beanClass == null) { LOG.warn("No bean could be resolved. Reason: No type specified."); } else { final Map<String, T> beansOfType = appContext.getBeansOfType(beanClass); if (beansOfType != null && !beansOfType.isEmpty()) { if (beansOfType.size() > 1) { LOG.warn("More than one matching bean found of type " + beanClass.getSimpleName() + ". Returning the first one found."); } ret = beansOfType.values().iterator().next(); } } } return ret; }