Example usage for java.util Map.Entry get

List of usage examples for java.util Map.Entry get

Introduction

In this page you can find the example usage for java.util Map.Entry get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:org.apache.phoenix.log.TableLogWriter.java

@Override
public void write(RingBufferEvent event) throws SQLException, IOException, ClassNotFoundException {
    if (isClosed()) {
        LOG.warn("Unable to commit query log as Log committer is already closed");
        return;//from   w  ww .j a  v  a  2s  . c  o  m
    }
    if (connection == null) {
        synchronized (this) {
            if (connection == null) {
                connection = QueryUtil.getConnectionForQueryLog(this.config);
                this.upsertStatement = buildUpsertStatement(connection);
            }
        }
    }

    ImmutableMap<QueryLogInfo, Object> queryInfoMap = event.getQueryInfo();
    for (QueryLogInfo info : QueryLogInfo.values()) {
        if (queryInfoMap.containsKey(info)
                && info.logLevel.ordinal() <= event.getConnectionLogLevel().ordinal()) {
            upsertStatement.setObject(info.ordinal() + 1, queryInfoMap.get(info));
        } else {
            upsertStatement.setObject(info.ordinal() + 1, null);
        }
    }
    Map<MetricType, Long> overAllMetrics = event.getOverAllMetrics();
    Map<String, Map<MetricType, Long>> readMetrics = event.getReadMetrics();

    for (MetricType metric : MetricType.values()) {
        if (overAllMetrics != null && overAllMetrics.containsKey(metric)
                && metric.isLoggingEnabled(event.getConnectionLogLevel())) {
            upsertStatement.setObject(metricOrdinals.get(metric), overAllMetrics.get(metric));
        } else {
            if (metric.logLevel() != LogLevel.OFF) {
                upsertStatement.setObject(metricOrdinals.get(metric), null);
            }
        }
    }

    if (readMetrics != null && !readMetrics.isEmpty()) {
        for (Map.Entry<String, Map<MetricType, Long>> entry : readMetrics.entrySet()) {
            upsertStatement.setObject(QueryLogInfo.TABLE_NAME_I.ordinal() + 1, entry.getKey());
            for (MetricType metric : entry.getValue().keySet()) {
                if (metric.isLoggingEnabled(event.getConnectionLogLevel())) {
                    upsertStatement.setObject(metricOrdinals.get(metric), entry.getValue().get(metric));
                }
            }
            upsertStatement.executeUpdate();
        }
    } else {
        upsertStatement.executeUpdate();
    }
    connection.commit();
}

From source file:eu.europa.ec.fisheries.uvms.exchange.dao.bean.ExchangeLogDaoBean.java

private void setQueryParameters(Query query, HashMap<ExchangeSearchField, List<SearchValue>> orderedValues) {
    for (Map.Entry<ExchangeSearchField, List<SearchValue>> criteria : orderedValues.entrySet()) {
        if (criteria.getValue().size() > 1) {
            query.setParameter(criteria.getKey().getSQLReplacementToken(), criteria.getValue());
        } else {/* w ww.  jav a2 s.  com*/
            query.setParameter(criteria.getKey().getSQLReplacementToken(), SearchFieldMapper
                    .buildValueFromClassType(criteria.getValue().get(0), criteria.getKey().getClazz()));
        }
    }
}

From source file:keel.Algorithms.UnsupervisedLearning.AssociationRules.Visualization.keelassotiationrulesbarchart.ResultsProccessor.java

public void writeToFile(String outName)
        throws FileNotFoundException, UnsupportedEncodingException, IOException {
    calcMeans();//from ww  w . ja  v a 2s.  c  om
    calcAvgRulesBySeed();

    // Create JFreeChart Dataset
    DefaultCategoryDataset dataset = new DefaultCategoryDataset();

    HashMap<String, Double> measuresFirst = algorithmMeasures.entrySet().iterator().next().getValue();
    for (Map.Entry<String, Double> measure : measuresFirst.entrySet()) {
        String measureName = measure.getKey();
        //Double measureValue = measure.getValue();
        dataset.clear();

        for (Map.Entry<String, HashMap<String, Double>> entry : algorithmMeasures.entrySet()) {
            String alg = entry.getKey();
            Double measureValue = entry.getValue().get(measureName);

            // Parse algorithm name to show it correctly
            String aName = alg.substring(0, alg.length() - 1);
            int startAlgName = aName.lastIndexOf("/");
            aName = aName.substring(startAlgName + 1);

            dataset.addValue(measureValue, aName, measureName);

            ChartFactory.setChartTheme(StandardChartTheme.createLegacyTheme());
            JFreeChart barChart = ChartFactory.createBarChart("Assotiation Rules Measures", measureName,
                    measureName, dataset, PlotOrientation.VERTICAL, true, true, false);
            StandardChartTheme.createLegacyTheme().apply(barChart);

            CategoryItemRenderer renderer = barChart.getCategoryPlot().getRenderer();

            // Black and White
            int numItems = algorithmMeasures.size();
            for (int i = 0; i < numItems; i++) {
                Color color = Color.DARK_GRAY;
                if (i % 2 == 1) {
                    color = Color.LIGHT_GRAY;
                }
                renderer.setSeriesPaint(i, color);
                renderer.setSeriesOutlinePaint(i, Color.BLACK);
            }

            int width = 640 * 2; /* Width of the image */
            int height = 480 * 2; /* Height of the image */

            // JPEG
            File BarChart = new File(outName + "_" + measureName + "_barchart.jpg");
            ChartUtilities.saveChartAsJPEG(BarChart, barChart, width, height);

            // SVG
            SVGGraphics2D g2 = new SVGGraphics2D(width, height);
            Rectangle r = new Rectangle(0, 0, width, height);
            barChart.draw(g2, r);
            File BarChartSVG = new File(outName + "_" + measureName + "_barchart.svg");
            SVGUtils.writeToSVG(BarChartSVG, g2.getSVGElement());
        }
    }
    /*
    for (Map.Entry<String, HashMap<String, Double>> entry : algorithmMeasures.entrySet())
    {
    String alg = entry.getKey();
    HashMap<String, Double> measures = entry.getValue();
            
    for (Map.Entry<String, Double> entry1 : measures.entrySet())
    {
        String measureName = entry1.getKey();
        Double measureValue = entry1.getValue();
                
        dataset.addValue(measureValue, alg, measureName);
    }
    }
        */

}

From source file:io.github.retz.scheduler.RetzScheduler.java

@Override
public void resourceOffers(SchedulerDriver driver, List<Protos.Offer> offers) {
    LOG.debug("Resource offer: {}", offers.size());

    // Merge fresh offers from Mesos and offers in stock here, declining duplicate offers
    Stanchion.schedule(() -> {//w  w w . j  av a2s.  c  o  m
        List<Protos.Offer> available = new LinkedList<>();
        synchronized (OFFER_STOCK) {
            // TODO: cleanup this code, optimize for max.stock = 0 case
            Map<String, List<Protos.Offer>> allOffers = new HashMap<>();
            for (Protos.Offer offer : OFFER_STOCK.values()) {
                String key = offer.getSlaveId().getValue();
                List<Protos.Offer> list = allOffers.getOrDefault(key, new LinkedList<>());
                list.add(offer);
                allOffers.put(offer.getSlaveId().getValue(), list);
            }
            for (Protos.Offer offer : offers) {
                String key = offer.getSlaveId().getValue();
                List<Protos.Offer> list = allOffers.getOrDefault(key, new LinkedList<>());
                list.add(offer);
                allOffers.put(offer.getSlaveId().getValue(), list);
            }

            int declined = 0;
            for (Map.Entry<String, List<Protos.Offer>> e : allOffers.entrySet()) {
                if (e.getValue().size() == 1) {
                    available.add(e.getValue().get(0));
                } else {
                    for (Protos.Offer dup : e.getValue()) {
                        driver.declineOffer(dup.getId(), filters);
                        declined += 1;
                    }
                }
            }
            if (conf.fileConfig.getMaxStockSize() > 0) {
                LOG.info("Offer stock renewal: {} offers available ({} declined from stock)", available.size(),
                        declined);
            }
            OFFER_STOCK.clear();
        }

        ResourceQuantity total = new ResourceQuantity();
        for (Protos.Offer offer : available) {
            LOG.debug("offer: {}", offer);
            Resource resource = ResourceConstructor.decode(offer.getResourcesList());
            total.add(resource);
        }

        // TODO: change findFit to consider not only CPU and Memory, but GPUs and Ports
        List<Job> jobs = JobQueue.findFit(PLANNER.orderBy(), total);
        handleAll(available, jobs, driver);
        // As this section is whole serialized by Stanchion, it is safe to do fetching jobs
        // from database and updating database state change from queued => starting at
        // separate transactions
    });
}

From source file:org.apache.nifi.minifi.c2.provider.nifi.rest.NiFiRestConfigurationProvider.java

@Override
public Configuration getConfiguration(String contentType, Integer version, Map<String, List<String>> parameters)
        throws ConfigurationProviderException {
    if (!CONTENT_TYPE.equals(contentType)) {
        throw new ConfigurationProviderException(
                "Unsupported content type: " + contentType + " supported value is " + CONTENT_TYPE);
    }/* w  w  w.  j  a  v a 2 s.com*/
    String filename = templateNamePattern;
    for (Map.Entry<String, List<String>> entry : parameters.entrySet()) {
        if (entry.getValue().size() != 1) {
            throw new InvalidParameterException(
                    "Multiple values for same parameter not supported in this provider.");
        }
        filename = filename.replaceAll(Pattern.quote("${" + entry.getKey() + "}"), entry.getValue().get(0));
    }
    int index = filename.indexOf("${");
    while (index != -1) {
        int endIndex = filename.indexOf("}", index);
        if (endIndex == -1) {
            break;
        }
        String variable = filename.substring(index + 2, endIndex);
        if (!"version".equals(variable)) {
            throw new InvalidParameterException("Found unsubstituted parameter " + variable);
        }
        index = endIndex + 1;
    }

    String id = null;
    if (version == null) {
        String filenamePattern = Arrays.stream(filename.split(Pattern.quote("${version}"), -1))
                .map(Pattern::quote).collect(Collectors.joining("([0-9+])"));
        Pair<String, Integer> maxIdAndVersion = getMaxIdAndVersion(filenamePattern);
        id = maxIdAndVersion.getFirst();
        version = maxIdAndVersion.getSecond();
    }
    filename = filename.replaceAll(Pattern.quote("${version}"), Integer.toString(version));
    WriteableConfiguration configuration = configurationCache.getCacheFileInfo(contentType, parameters)
            .getConfiguration(version);
    if (configuration.exists()) {
        if (logger.isDebugEnabled()) {
            logger.debug(
                    "Configuration " + configuration + " exists and can be served from configurationCache.");
        }
    } else {
        if (logger.isDebugEnabled()) {
            logger.debug("Configuration " + configuration
                    + " doesn't exist, will need to download and convert template.");
        }
        if (id == null) {
            try {
                String tmpFilename = templateNamePattern;
                for (Map.Entry<String, List<String>> entry : parameters.entrySet()) {
                    if (entry.getValue().size() != 1) {
                        throw new InvalidParameterException(
                                "Multiple values for same parameter not supported in this provider.");
                    }
                    tmpFilename = tmpFilename.replaceAll(Pattern.quote("${" + entry.getKey() + "}"),
                            entry.getValue().get(0));
                }
                Pair<Stream<Pair<String, String>>, Closeable> streamCloseablePair = getIdAndFilenameStream();
                try {
                    String finalFilename = filename;
                    id = streamCloseablePair.getFirst().filter(p -> finalFilename.equals(p.getSecond()))
                            .map(Pair::getFirst).findFirst().orElseThrow(() -> new InvalidParameterException(
                                    "Unable to find template named " + finalFilename));
                } finally {
                    streamCloseablePair.getSecond().close();
                }
            } catch (IOException | TemplatesIteratorException e) {
                throw new ConfigurationProviderException("Unable to retrieve template list", e);
            }
        }

        HttpURLConnection urlConnection = httpConnector.get("/templates/" + id + "/download");

        try (InputStream inputStream = urlConnection.getInputStream()) {
            ConfigSchema configSchema = ConfigMain.transformTemplateToSchema(inputStream);
            SchemaSaver.saveConfigSchema(configSchema, configuration.getOutputStream());
        } catch (IOException e) {
            throw new ConfigurationProviderException(
                    "Unable to download template from url " + urlConnection.getURL(), e);
        } catch (JAXBException e) {
            throw new ConfigurationProviderException("Unable to convert template to yaml", e);
        } finally {
            urlConnection.disconnect();
        }
    }
    return configuration;
}

From source file:org.apache.falcon.hive.DefaultPartitioner.java

public ReplicationEventMetadata partition(final HiveDROptions drOptions, final String databaseName,
        final Iterator<ReplicationTask> taskIter) throws Exception {
    long lastCounter = 0;
    String dbName = databaseName.toLowerCase();
    // init filtering before partitioning
    this.eventFilter = new EventFilter(drOptions.getSourceMetastoreUri(), drOptions.getTargetMetastoreUri(),
            drOptions.getJobName(), dbName);
    String srcStagingDirProvider = drOptions.getSourceStagingPath();
    String dstStagingDirProvider = drOptions.getTargetStagingPath();

    List<Command> dbSrcEventList = Lists.newArrayList();
    List<Command> dbTgtEventList = Lists.newArrayList();

    Map<String, List<String>> eventMetaFileMap = new HashMap<>();
    Map<String, List<OutputStream>> outputStreamMap = new HashMap<>();

    String srcFilename = null;//from ww w.  java  2  s . c o m
    String tgtFilename = null;
    OutputStream srcOutputStream = null;
    OutputStream tgtOutputStream = null;

    while (taskIter.hasNext()) {
        ReplicationTask task = taskIter.next();
        if (task.needsStagingDirs()) {
            task.withSrcStagingDirProvider(
                    new StagingDirectoryProvider.TrivialImpl(srcStagingDirProvider, HiveDRUtils.SEPARATOR));
            task.withDstStagingDirProvider(
                    new StagingDirectoryProvider.TrivialImpl(dstStagingDirProvider, HiveDRUtils.SEPARATOR));
        }

        if (task.isActionable()) {
            Scope eventScope = task.getEvent().getEventScope();
            String tableName = task.getEvent().getTableName();
            if (StringUtils.isNotEmpty(tableName)) {
                tableName = tableName.toLowerCase();
            }

            boolean firstEventForTable = (eventScope == Scope.TABLE)
                    && isFirstEventForTable(eventMetaFileMap, tableName);
            if (firstEventForTable && (task.getSrcWhCommands() != null || task.getDstWhCommands() != null)) {
                ++lastCounter;
            }
            Iterable<? extends org.apache.hive.hcatalog.api.repl.Command> srcCmds = task.getSrcWhCommands();
            if (srcCmds != null) {
                if (eventScope == Scope.DB) {
                    processDBScopeCommands(dbSrcEventList, srcCmds, outputStreamMap, CMDTYPE.SRC_CMD_TYPE);
                } else if (eventScope == Scope.TABLE) {
                    OutputStream srcOut;
                    if (firstEventForTable) {
                        srcFilename = eventSourcerUtils.getSrcFileName(String.valueOf(lastCounter)).toString();
                        srcOutputStream = eventSourcerUtils.getFileOutputStream(srcFilename);
                        srcOut = srcOutputStream;
                    } else {
                        srcOut = outputStreamMap.get(tableName).get(0);
                    }
                    processTableScopeCommands(srcCmds, eventMetaFileMap, tableName, dbSrcEventList, srcOut);
                } else {
                    throw new Exception("Event scope is not DB or Table");
                }
            }

            Iterable<? extends org.apache.hive.hcatalog.api.repl.Command> dstCmds = task.getDstWhCommands();
            if (dstCmds != null) {
                if (eventScope == Scope.DB) {
                    processDBScopeCommands(dbTgtEventList, dstCmds, outputStreamMap, CMDTYPE.TGT_CMD_TYPE);
                } else if (eventScope == Scope.TABLE) {
                    OutputStream tgtOut;
                    if (firstEventForTable) {
                        tgtFilename = eventSourcerUtils.getTargetFileName(String.valueOf(lastCounter))
                                .toString();
                        tgtOutputStream = eventSourcerUtils.getFileOutputStream(tgtFilename);
                        tgtOut = tgtOutputStream;
                    } else {
                        tgtOut = outputStreamMap.get(tableName).get(1);
                    }
                    processTableScopeCommands(dstCmds, eventMetaFileMap, tableName, dbTgtEventList, tgtOut);
                } else {
                    throw new Exception("Event scope is not DB or Table");
                }
            }

            // If first table event, update the state data at the end
            if (firstEventForTable) {
                updateStateDataIfFirstTableEvent(tableName, srcFilename, tgtFilename, srcOutputStream,
                        tgtOutputStream, eventMetaFileMap, outputStreamMap);
            }
        } else {
            LOG.error("Task is not actionable with event Id : {}", task.getEvent().getEventId());
        }
    }

    ReplicationEventMetadata eventMetadata = new ReplicationEventMetadata();
    // If there were only DB events for this run
    if (eventMetaFileMap.isEmpty()) {
        ++lastCounter;
        if (!dbSrcEventList.isEmpty()) {
            srcFilename = eventSourcerUtils.getSrcFileName(String.valueOf(lastCounter)).toString();
            srcOutputStream = eventSourcerUtils.getFileOutputStream(srcFilename);
            eventSourcerUtils.persistReplicationEvents(srcOutputStream, dbSrcEventList);
        }

        if (!dbTgtEventList.isEmpty()) {
            tgtFilename = eventSourcerUtils.getTargetFileName(String.valueOf(lastCounter)).toString();
            tgtOutputStream = eventSourcerUtils.getFileOutputStream(tgtFilename);
            eventSourcerUtils.persistReplicationEvents(tgtOutputStream, dbTgtEventList);
        }

        // Close the stream
        eventSourcerUtils.closeOutputStream(srcOutputStream);
        eventSourcerUtils.closeOutputStream(tgtOutputStream);
        EventSourcerUtils.updateEventMetadata(eventMetadata, dbName, null, srcFilename, tgtFilename);
    } else {
        closeAllStreams(outputStreamMap);
        for (Map.Entry<String, List<String>> entry : eventMetaFileMap.entrySet()) {
            String srcFile = null;
            String tgtFile = null;
            if (entry.getValue() != null) {
                srcFile = entry.getValue().get(0);
                tgtFile = entry.getValue().get(1);
            }
            EventSourcerUtils.updateEventMetadata(eventMetadata, dbName, entry.getKey(), srcFile, tgtFile);
        }
    }

    return eventMetadata;
}

From source file:org.apache.storm.daemon.supervisor.SyncSupervisorEvent.java

protected Map<String, String> readStormCodeLocations(Map<String, Map<String, Object>> assignmentsSnapshot) {
    Map<String, String> stormcodeMap = new HashMap<>();
    for (Map.Entry<String, Map<String, Object>> entry : assignmentsSnapshot.entrySet()) {
        Assignment assignment = (Assignment) (entry.getValue().get(IStateStorage.DATA));
        if (assignment != null) {
            stormcodeMap.put(entry.getKey(), assignment.get_master_code_dir());
        }/*from   w w  w  .  j av a2 s.  c o m*/
    }
    return stormcodeMap;
}

From source file:org.apache.atlas.model.instance.EntityMutationResponse.java

public StringBuilder toString(StringBuilder sb) {
    if (sb == null) {
        sb = new StringBuilder();
    }/*from  ww  w.j  av a 2s .  c  om*/

    if (MapUtils.isNotEmpty(entitiesMutated)) {
        int i = 0;
        for (Map.Entry<EntityMutations.EntityOperation, List<AtlasEntityHeader>> e : entitiesMutated
                .entrySet()) {
            if (i > 0) {
                sb.append(",");
            }
            sb.append(e.getKey()).append(":");
            if (CollectionUtils.isNotEmpty(e.getValue())) {
                for (int j = 0; i < e.getValue().size(); j++) {
                    if (j > 0) {
                        sb.append(",");
                    }
                    e.getValue().get(i).toString(sb);
                }
            }
            i++;
        }
    }

    return sb;
}

From source file:com.coverity.report.analysis.ProtecodeSCToolProcessor.java

@Override
public void getAggregatedData(Report report) {
    Report.Tools.ProtecodeSC protecodeSC = new Report.Tools.ProtecodeSC();
    report.getTools().setProtecodeSC(protecodeSC);

    // Collapse the components data by removing non-unique entries.  This is needed in
    // order to make the results match what the Protecode SC GUI shows.
    Map<String, ProtecodeSCComponentsData> uniqueComponentsMap = new TreeMap<>();
    componentsDataList.stream()//from  www.  j  a  v  a2  s  .  c  o m
            .forEach(data -> uniqueComponentsMap.put(data.getComponent() + "-" + data.getVersion(), data));

    // Similarly, collapse the vulnerabilities data using the name, version and CVE of each vulnerability
    // as the key
    Map<String, ProtecodeSCVulnerabilitiesData> uniqueVulnerabilitiesMap = new TreeMap<>();
    vulnerabilitiesDataList.stream().forEach(data -> uniqueVulnerabilitiesMap
            .put(data.getComponent() + "-" + data.getVersion() + "-" + data.getCve(), data));

    // Make a map for counting the criticalities 
    int knownVulns = uniqueVulnerabilitiesMap.size();
    int vulnerableComponents = 0, totCriticalVulns = 0, totMajorVulns = 0, totMinorVulns = 0;
    Map<String, Map<Criticality, Integer>> componentVulnerabilityMap = new HashMap<>();

    // Fill the map with zeroes
    uniqueComponentsMap.values().stream().forEach(comp -> {
        Map<Criticality, Integer> severityMap = new HashMap<>();
        Arrays.stream(Criticality.values()).forEach(c -> severityMap.put(c, 0));
        componentVulnerabilityMap.put(makeKey(comp.getComponent(), comp.getVersion()), severityMap);
    });
    // Accumulate the count for each criticality level
    for (ProtecodeSCVulnerabilitiesData vulnerabilitiesData : uniqueVulnerabilitiesMap.values()) {
        Criticality criticality = Criticality.fromScore(vulnerabilitiesData.getCvss());
        Map<Criticality, Integer> severityMap = componentVulnerabilityMap
                .get(makeKey(vulnerabilitiesData.getComponent(), vulnerabilitiesData.getVersion()));
        severityMap.put(criticality, severityMap.get(criticality) + 1);
    }

    for (Map.Entry<String, Map<Criticality, Integer>> map : componentVulnerabilityMap.entrySet()) {
        totCriticalVulns += map.getValue().get(Criticality.Critical);
        totMajorVulns += map.getValue().get(Criticality.Major);
        totMinorVulns += map.getValue().get(Criticality.Minor);
    }

    // Count the number of licenses of each type.
    // First collapse the list of components by license
    Map<String, String> licenseTypeMap = new TreeMap<>();
    uniqueComponentsMap.values().forEach(c -> licenseTypeMap.put(c.getLicense(), c.getLicenseType()));
    // Then count the number of each type.
    Map<String, Integer> licenseTypeCountMap = new HashMap<>();
    licenseTypeMap.values().forEach(type -> {
        Integer licenseCount = licenseTypeCountMap.get(type);
        if (licenseCount == null) {
            licenseCount = 1;
        } else {
            ++licenseCount;
        }
        licenseTypeCountMap.put(type, licenseCount);
    });

    { // Policy elements
        for (ProtecodeSCComponentsData componentsData : uniqueComponentsMap.values()) {
            if (componentsData.getVulnerabilityCount() > 0) {
                vulnerableComponents++;
            }
        }

        protecodeSC.setPolicyElements(new PolicyElements());
        List<PolicyElements.Point> policyElements = protecodeSC.getPolicyElements().getPoint();
        policyElements.add(makePolicyElement(1, "Components with Vulnerabilities", vulnerableComponents));
        policyElements.add(makePolicyElement(2, "Known Vulnerabilities", knownVulns));
        policyElements.add(makePolicyElement(3, "Critical Vulnerabilities", totCriticalVulns));
    }

    { // Components
        int totalComponents = 0, noKnownVulnerablitiesCount = 0;
        totalComponents = uniqueComponentsMap.size();
        noKnownVulnerablitiesCount = totalComponents - vulnerableComponents;

        protecodeSC.setComponents(new Components());
        List<Components.Point> components = protecodeSC.getComponents().getPoint();
        components.add(makeComponents(1, "Vulnerable", vulnerableComponents));
        components.add(makeComponents(2, "No known vulnerabilities", noKnownVulnerablitiesCount));
    }

    { // Vulnerabilities
        protecodeSC.setVulnerabilities(new Vulnerabilities());
        List<Vulnerabilities.Point> vulnerabilities = protecodeSC.getVulnerabilities().getPoint();
        vulnerabilities.add(makeVulnerabilities(1, "Critical", totCriticalVulns));
        vulnerabilities.add(makeVulnerabilities(2, "Major", totMajorVulns));
        vulnerabilities.add(makeVulnerabilities(3, "Minor", totMinorVulns));
    }

    { // Licenses
        protecodeSC.setLicenses(new Licenses());
        List<Licenses.Point> licenses = protecodeSC.getLicenses().getPoint();
        int order = 0;
        for (Map.Entry<String, Integer> entry : licenseTypeCountMap.entrySet()) {
            Licenses.Point point = new Licenses.Point();
            point.setCount(entry.getValue());
            point.setLabel(entry.getKey());
            point.setOrder(order++);
            licenses.add(point);
        }
    }

    { // Top 10 Vulnerable Components
      // Roll up
        final int ROLLUP_INDEX = 10;
        List<ComponentCriticalities> pairs = makeRolledUpList(componentVulnerabilityMap, ROLLUP_INDEX);

        // Transform to a list of points
        int order = pairs.size();
        protecodeSC.setTop10VulnerableComponents(new Top10VulnerableComponents());
        List<Top10VulnerableComponents.Component> top10VulnerableComponents = protecodeSC
                .getTop10VulnerableComponents().getComponent();
        for (ComponentCriticalities pair : pairs) {
            Top10VulnerableComponents.Component component = new Top10VulnerableComponents.Component();
            component.setOrder(order--);
            component.setLabel(pair.name);

            List<Top10VulnerableComponents.Component.Point> points = component.getPoint();
            pair.map.forEach((k, v) -> {
                Top10VulnerableComponents.Component.Point point = new Top10VulnerableComponents.Component.Point();
                point.setCount(v);
                point.setSeverity(k.name());
                points.add(point);
            });
            top10VulnerableComponents.add(component);
        }
    }
}

From source file:org.apache.ctakes.ytex.kernel.FoldGeneratorImpl.java

@Override
public SortedMap<String, SortedMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>>> generateRuns(
        SortedMap<String, SortedMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>>> labelToInstanceMap,
        int nFolds, int nMinPerClass, Integer nSeed, int nRuns) {
    // allocate map to return
    SortedMap<String, SortedMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>>> labelToInstanceFoldMap = new TreeMap<String, SortedMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>>>();
    // initialize random seed
    Random r = new Random(nSeed != null ? nSeed : System.currentTimeMillis());
    // iterate over labels
    for (Map.Entry<String, SortedMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>>> labelRun : labelToInstanceMap
            .entrySet()) {//w  ww . j  a  v a  2s. c o  m
        String label = labelRun.getKey();
        // extract the instance id - class map
        SortedMap<Long, String> instanceClassMap = labelRun.getValue().get(0).get(0).get(true);
        // allocate the run to fold map
        SortedMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>> runMap = new TreeMap<Integer, SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>>();
        labelToInstanceFoldMap.put(label, runMap);
        // iterate over runs
        for (int run = 1; run <= nRuns; run++) {
            // generate folds for run
            List<Set<Long>> folds = createFolds(nFolds, nMinPerClass, r, instanceClassMap);
            SortedMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>> foldMap = new TreeMap<Integer, SortedMap<Boolean, SortedMap<Long, String>>>();
            // add the fold map to the run map
            runMap.put(run, foldMap);
            // iterate over folds
            for (int trainFoldNum = 1; trainFoldNum <= folds.size(); trainFoldNum++) {
                // add train/test sets for the fold
                SortedMap<Boolean, SortedMap<Long, String>> trainTestMap = new TreeMap<Boolean, SortedMap<Long, String>>();
                foldMap.put(trainFoldNum, trainTestMap);
                trainTestMap.put(true, new TreeMap<Long, String>());
                trainTestMap.put(false, new TreeMap<Long, String>());
                // populate the train/test sets
                Set<Long> testIds = folds.get(trainFoldNum - 1);
                // iterate over all instances
                for (Map.Entry<Long, String> instanceClass : instanceClassMap.entrySet()) {
                    long instanceId = instanceClass.getKey();
                    String clazz = instanceClass.getValue();
                    // add the instance to the test set if it is in testIds,
                    // else to the train set
                    trainTestMap.get(!testIds.contains(instanceId)).put(instanceId, clazz);
                }
            }
        }
    }
    return labelToInstanceFoldMap;
}