Example usage for java.util Map.Entry get

List of usage examples for java.util Map.Entry get

Introduction

In this page you can find the example usage for java.util Map.Entry get.

Prototype

V get(Object key);

Source Link

Document

Returns the value to which the specified key is mapped, or null if this map contains no mapping for the key.

Usage

From source file:com.amazonaws.services.kinesis.scaling.auto.StreamMonitor.java

protected ScalingOperationReport processCloudwatchMetrics(
        Map<KinesisOperationType, Map<StreamMetric, Map<Datapoint, Double>>> currentUtilisationMetrics,
        Map<KinesisOperationType, StreamMetrics> streamMaxCapacity, int cwSampleDuration, DateTime now) {
    ScalingOperationReport report = null;
    ScaleDirection finalScaleDirection = null;

    // for each type of operation that the customer has requested profiling
    // (PUT, GET)
    Map<KinesisOperationType, ScaleDirection> scaleVotes = new HashMap<>();

    for (Map.Entry<KinesisOperationType, Map<StreamMetric, Map<Datapoint, Double>>> entry : currentUtilisationMetrics
            .entrySet()) {//w  ww  .ja  va  2 s  . c  om
        // set the default scaling vote to 'do nothing'
        scaleVotes.put(entry.getKey(), ScaleDirection.NONE);

        Map<StreamMetric, Triplet<Integer, Integer, Double>> perMetricSamples = new HashMap<>();
        StreamMetric higherUtilisationMetric;
        Double higherUtilisationPct;

        // process each metric type, including Records and Bytes
        for (StreamMetric metric : StreamMetric.values()) {
            double currentMax = 0D;
            double currentPct = 0D;
            double latestPct = 0d;
            double latestMax = 0d;
            double latestAvg = 0d;
            DateTime lastTime = null;
            int lowSamples = 0;
            int highSamples = 0;

            Map<Datapoint, Double> metrics = new HashMap<>();

            if (!currentUtilisationMetrics.containsKey(entry.getKey())
                    || !entry.getValue().containsKey(metric)) {
                // we have no samples for this type of metric which is ok -
                // they'll later be counted as low metrics
            } else {
                metrics = entry.getValue().get(metric);
            }

            // if we got nothing back, then there are no operations of the
            // given type happening, so this is a full 'low sample'
            if (metrics.size() == 0) {
                lowSamples = this.config.getScaleDown().getScaleAfterMins();
            }

            // process the data point aggregates retrieved from CloudWatch
            // and log scale up/down votes by period
            for (Map.Entry<Datapoint, Double> datapointEntry : metrics.entrySet()) {
                currentMax = datapointEntry.getValue();
                currentPct = currentMax / streamMaxCapacity.get(entry.getKey()).get(metric);
                // keep track of the last measures
                if (lastTime == null
                        || new DateTime(datapointEntry.getKey().getTimestamp()).isAfter(lastTime)) {
                    latestPct = currentPct;
                    latestMax = currentMax;

                    // latest average is a simple moving average
                    latestAvg = latestAvg == 0d ? currentPct : (latestAvg + currentPct) / 2;
                }
                lastTime = new DateTime(datapointEntry.getKey().getTimestamp());

                // if the pct for the datapoint exceeds or is below the
                // thresholds, then add low/high samples
                if (currentPct > new Double(this.config.getScaleUp().getScaleThresholdPct()) / 100) {
                    LOG.debug(String.format("%s %s: Cached High Alarm Condition for %.2f %s/Second (%.2f%%)",
                            entry.getKey(), metric, currentMax, metric, currentPct * 100));
                    highSamples++;
                } else if (currentPct < new Double(this.config.getScaleDown().getScaleThresholdPct()) / 100) {
                    LOG.debug(String.format("%s %s: Cached Low Alarm Condition for %.2f %s/Second (%.2f%%)",
                            entry.getKey(), metric, currentMax, metric, currentPct * 100));
                    lowSamples++;
                }
            }

            // add low samples for the periods which we didn't get any
            // data points, if there are any
            if (metrics.size() < cwSampleDuration) {
                lowSamples += cwSampleDuration - metrics.size();
            }

            LOG.info(String.format(
                    metric + ": Stream %s Used %s[%s] Capacity ~ %.2f%% (%,.0f " + metric + " of %d)",
                    config.getStreamName(), entry.getKey(), metric, latestAvg * 100, latestMax,
                    streamMaxCapacity.get(entry.getKey()).get(metric)));

            // merge the per-stream metric samples together for the
            // operation
            if (!perMetricSamples.containsKey(metric)) {
                // create a new sample entry
                perMetricSamples.put(metric, new Triplet<>(highSamples, lowSamples, latestAvg));
            } else {
                // merge the samples
                Triplet<Integer, Integer, Double> previousHighLow = perMetricSamples.get(metric);
                Triplet<Integer, Integer, Double> newHighLow = new Triplet<>(
                        previousHighLow.getValue0() + highSamples, previousHighLow.getValue1() + lowSamples,
                        (previousHighLow.getValue2() + latestAvg) / 2);
                perMetricSamples.put(metric, newHighLow);
            }
        }

        /*-
         * we now have per metric samples for this operation type
         * 
         * For Example: 
         * 
         * Metric  | High Samples | Low Samples | Pct Used
         * Bytes   | 3            | 0           | .98
         * Records | 0            | 10          | .2
         * 
         * Check these values against the provided configuration. If we have
         * been above the 'scaleAfterMins' with high samples for either
         * metric, then we scale up. If not, then if we've been below the
         * scaleAfterMins with low samples, then we scale down. Otherwise
         * the vote stays as NONE
         */

        // first find out which of the dimensions of stream utilisation are
        // higher - we'll use the higher of the two for time checks
        if (perMetricSamples.get(StreamMetric.Bytes).getValue2() >= perMetricSamples.get(StreamMetric.Records)
                .getValue2()) {
            higherUtilisationMetric = StreamMetric.Bytes;
            higherUtilisationPct = perMetricSamples.get(StreamMetric.Bytes).getValue2();
        } else {
            higherUtilisationMetric = StreamMetric.Records;
            higherUtilisationPct = perMetricSamples.get(StreamMetric.Records).getValue2();
        }

        LOG.info(String.format(
                "Will decide scaling action based on metric %s[%s] due to higher utilisation metric %.2f%%",
                entry.getKey(), higherUtilisationMetric, higherUtilisationPct * 100));

        if (perMetricSamples.get(higherUtilisationMetric).getValue0() >= config.getScaleUp()
                .getScaleAfterMins()) {
            scaleVotes.put(entry.getKey(), ScaleDirection.UP);
        } else if (perMetricSamples.get(higherUtilisationMetric).getValue1() >= config.getScaleDown()
                .getScaleAfterMins()) {
            scaleVotes.put(entry.getKey(), ScaleDirection.DOWN);
        }
    }

    // process the scaling votes
    ScaleDirection getVote = scaleVotes.get(KinesisOperationType.GET);
    ScaleDirection putVote = scaleVotes.get(KinesisOperationType.PUT);

    // check if we have both get and put votes - if we have both then
    // implement the decision matrix
    if (getVote != null && putVote != null) {
        // if either of the votes are to scale up, then do so. If both are
        // None,
        // then do nothing. Otherwise scale down
        if (getVote == ScaleDirection.UP || putVote == ScaleDirection.UP) {
            finalScaleDirection = ScaleDirection.UP;
        } else if (getVote == ScaleDirection.NONE && putVote == ScaleDirection.NONE) {
            finalScaleDirection = ScaleDirection.NONE;
        } else {
            finalScaleDirection = ScaleDirection.DOWN;
        }
    } else {
        // we only have get or put votes, so use the non-null one
        finalScaleDirection = (getVote == null ? putVote : getVote);
    }

    try {
        int currentShardCount = this.scaler.getOpenShardCount(this.config.getStreamName());

        // if the metric stats indicate a scale up or down, then do the
        // action
        if (finalScaleDirection.equals(ScaleDirection.UP)) {
            // submit a scale up task
            Integer scaleUpCount = this.config.getScaleUp().getScaleCount();

            LOG.info(String.format(
                    "Requesting Scale Up of Stream %s by %s as %s has been above %s%% for %s Minutes",
                    this.config.getStreamName(),
                    (scaleUpCount != null) ? scaleUpCount : this.config.getScaleUp().getScalePct() + "%",
                    this.config.getScaleOnOperations().toString(),
                    this.config.getScaleUp().getScaleThresholdPct(),
                    this.config.getScaleUp().getScaleAfterMins()));

            // TODO migrate this block to UpdateShardCount API
            if (scaleUpCount != null) {
                report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                        currentShardCount + scaleUpCount, this.config.getMinShards(),
                        this.config.getMaxShards());
            } else {
                report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                        new Double(
                                currentShardCount * (new Double(this.config.getScaleUp().getScalePct()) / 100))
                                        .intValue(),
                        this.config.getMinShards(), this.config.getMaxShards());

            }

            // send SNS notifications
            if (this.config.getScaleUp().getNotificationARN() != null && this.snsClient != null) {
                StreamScalingUtils.sendNotification(this.snsClient,
                        this.config.getScaleUp().getNotificationARN(), "Kinesis Autoscaling - Scale Up",
                        (report == null ? "No Changes Made" : report.asJson()));
            }
        } else if (finalScaleDirection.equals(ScaleDirection.DOWN)) {
            // check the cool down interval
            if (lastScaleDown != null
                    && now.minusMinutes(this.config.getScaleDown().getCoolOffMins()).isBefore(lastScaleDown)) {
                LOG.info(String.format(
                        "Stream %s: Deferring Scale Down until Cool Off Period of %s Minutes has elapsed",
                        this.config.getStreamName(), this.config.getScaleDown().getCoolOffMins()));
            } else {
                // submit a scale down
                Integer scaleDownCount = this.config.getScaleDown().getScaleCount();
                LOG.info(String.format(
                        "Requesting Scale Down of Stream %s by %s as %s has been below %s%% for %s Minutes",
                        this.config.getStreamName(),
                        (scaleDownCount != null) ? scaleDownCount
                                : this.config.getScaleDown().getScalePct() + "%",
                        config.getScaleOnOperations().toString(),
                        this.config.getScaleDown().getScaleThresholdPct(),
                        this.config.getScaleDown().getScaleAfterMins()));
                try {
                    if (scaleDownCount != null) {
                        report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                                currentShardCount - scaleDownCount, this.config.getMinShards(),
                                this.config.getMaxShards());
                    } else {
                        report = this.scaler.updateShardCount(this.config.getStreamName(), currentShardCount,
                                new Double(currentShardCount
                                        - (new Double(this.config.getScaleDown().getScalePct()) / 100))
                                                .intValue(),
                                this.config.getMinShards(), this.config.getMaxShards());
                    }

                    lastScaleDown = new DateTime(System.currentTimeMillis());

                    // send SNS notifications
                    if (this.config.getScaleDown().getNotificationARN() != null && this.snsClient != null) {
                        StreamScalingUtils.sendNotification(this.snsClient,
                                this.config.getScaleDown().getNotificationARN(),
                                "Kinesis Autoscaling - Scale Down",
                                (report == null ? "No Changes Made" : report.asJson()));
                    }
                } catch (AlreadyOneShardException aose) {
                    // do nothing - we're already at 1 shard
                    LOG.info(String.format("Stream %s: Not Scaling Down - Already at Minimum of 1 Shard",
                            this.config.getStreamName()));
                }
            }
        } else {
            // scale direction not set, so we're not going to scale
            // up or down - everything fine
            LOG.info("No Scaling required - Stream capacity within specified tolerances");
            return this.scaler.reportFor(ScalingCompletionStatus.NoActionRequired, this.config.getStreamName(),
                    0, finalScaleDirection);
        }
    } catch (Exception e) {
        LOG.error("Failed to process stream " + this.config.getStreamName(), e);
    }

    return report;
}

From source file:org.apache.storm.daemon.supervisor.SyncSupervisorEvent.java

protected Map<Integer, LocalAssignment> readMyExecutors(String stormId, String assignmentId,
        Assignment assignment) {//from   w  ww.  j  a  v a 2 s.  c o  m
    Map<Integer, LocalAssignment> portTasks = new HashMap<>();
    Map<Long, WorkerResources> slotsResources = new HashMap<>();
    Map<NodeInfo, WorkerResources> nodeInfoWorkerResourcesMap = assignment.get_worker_resources();
    if (nodeInfoWorkerResourcesMap != null) {
        for (Map.Entry<NodeInfo, WorkerResources> entry : nodeInfoWorkerResourcesMap.entrySet()) {
            if (entry.getKey().get_node().equals(assignmentId)) {
                Set<Long> ports = entry.getKey().get_port();
                for (Long port : ports) {
                    slotsResources.put(port, entry.getValue());
                }
            }
        }
    }
    Map<List<Long>, NodeInfo> executorNodePort = assignment.get_executor_node_port();
    if (executorNodePort != null) {
        for (Map.Entry<List<Long>, NodeInfo> entry : executorNodePort.entrySet()) {
            if (entry.getValue().get_node().equals(assignmentId)) {
                for (Long port : entry.getValue().get_port()) {
                    LocalAssignment localAssignment = portTasks.get(port.intValue());
                    if (localAssignment == null) {
                        List<ExecutorInfo> executors = new ArrayList<ExecutorInfo>();
                        localAssignment = new LocalAssignment(stormId, executors);
                        if (slotsResources.containsKey(port)) {
                            localAssignment.set_resources(slotsResources.get(port));
                        }
                        portTasks.put(port.intValue(), localAssignment);
                    }
                    List<ExecutorInfo> executorInfoList = localAssignment.get_executors();
                    executorInfoList.add(new ExecutorInfo(entry.getKey().get(0).intValue(),
                            entry.getKey().get(entry.getKey().size() - 1).intValue()));
                }
            }
        }
    }
    return portTasks;
}

From source file:net.firejack.platform.core.cache.CacheProcessor.java

public void loadData(boolean forceReload) {
    if (/*cachedData.isEmpty() || */forceReload) {
        //                cachedData.clear();
        List<RoleModel> roles = roleStore.findAll();
        if (roles != null) {
            Map<String, PackageModel> pkgByLookup = retrievePackageByLookup();
            List<String> packageLookupList = new LinkedList<String>(pkgByLookup.keySet());

            Map<String, List<NavigationElement>> navigationElementsByPackage = retrieveNavigationElementsByPackage(
                    packageLookupList);//www .j  ava2 s . c om
            Map<String, SortedSet<Action>> actionsByPackage = retrieveActionsByPackage();
            Map<String, List<ResourceLocation>> resourceLocationsByPackage = retrieveResourceLocationsByPackage(
                    packageLookupList);
            Map<Long, SecuredRecordNode> securedRecords = retrieveSecuredRecords();
            Map<Long, List<UserPermission>> permissionsByRoles = retrieveGlobalPermissions(roles);
            Map<Long, SecuredRecordPermissions> securedRecordPermissions = retrieveSecuredRecordPermissions();

            Map<Long, List<Long>> userRoles = userStore.findAllRolesByUsers();
            Map<Long, Map<String, IdFilter>> readActionFiltersByUsers = populateReadActionFiltersInfo(
                    securedRecordPermissions, permissionsByRoles, userRoles);
            Map<String, List<Entity>> entitiesMap = retrieveEntityUpHierarchies();
            Map<String, Map<Long, UserContextPermissions>> userPermissionsByPackage = retrievePackageLevelUserPermissions();
            List<ConfigModel> configModels = configStore.findAll();

            CacheManager cacheManager = CacheManager.getInstance();
            for (Map.Entry<String, SortedSet<Action>> packageActions : actionsByPackage.entrySet()) {
                List<Action> actionList = new LinkedList<Action>(packageActions.getValue());
                cacheManager.setActions(packageActions.getKey(), actionList);
            }
            for (Map.Entry<String, List<NavigationElement>> navigationElementsEntry : navigationElementsByPackage
                    .entrySet()) {
                cacheManager.setNavigationList(navigationElementsEntry.getKey(),
                        navigationElementsEntry.getValue());
            }
            for (Map.Entry<String, List<ResourceLocation>> packageResourceLocations : resourceLocationsByPackage
                    .entrySet()) {
                cacheManager.setResourceLocations(packageResourceLocations.getKey(),
                        packageResourceLocations.getValue());
            }
            //
            for (Map.Entry<Long, List<UserPermission>> permissionsByRole : permissionsByRoles.entrySet()) {
                cacheManager.setRolePermissions(permissionsByRole.getKey(), permissionsByRole.getValue());
            }
            for (Map.Entry<Long, SecuredRecordPermissions> securedRecordPermission : securedRecordPermissions
                    .entrySet()) {
                cacheManager.setSecuredRecordPermissions(securedRecordPermission.getKey(),
                        securedRecordPermission.getValue());
            }
            for (Map.Entry<Long, Map<String, IdFilter>> readActionFiltersByUser : readActionFiltersByUsers
                    .entrySet()) {
                cacheManager.setIdFiltersForUser(readActionFiltersByUser.getKey(),
                        readActionFiltersByUser.getValue());
            }
            for (Map.Entry<String, List<Entity>> entry : entitiesMap.entrySet()) {
                cacheManager.setTypeWithSubclasses(entry.getKey(), entry.getValue());
                Boolean securityEnabled = entry.getValue().get(0).getSecurityEnabled();
                cacheManager.setEntityAsSecurityEnabled(entry.getKey(),
                        securityEnabled != null && securityEnabled);
            }
            for (Map.Entry<String, Map<Long, UserContextPermissions>> entry : userPermissionsByPackage
                    .entrySet()) {
                Map<Long, UserContextPermissions> userPermissions = entry.getValue();
                if (userPermissions != null) {
                    for (Map.Entry<Long, UserContextPermissions> userPermissionsEntry : userPermissions
                            .entrySet()) {
                        cacheManager.setPackageLevelUserPermissions(entry.getKey(),
                                userPermissionsEntry.getKey(), userPermissionsEntry.getValue());
                    }
                }
            }
            List<Config> configs = modelFactory.convertTo(Config.class, configModels);
            cacheManager.setConfigs(configs);

            cacheManager.setEntityTypes(new ArrayList<String>(entitiesMap.keySet()));
            cacheManager.setSecuredRecords(securedRecords);
            cacheManager.setUserRoles(userRoles);
            cacheManager.initializeGuestData(this);

            configCacheProcessor.initConfigs();
        }
    }
}

From source file:org.apache.slider.providers.agent.AgentProviderService.java

private void publishModifiedExportGroups(Set<String> modifiedGroups) {
    for (String groupName : modifiedGroups) {
        Map<String, List<ExportEntry>> entries = this.exportGroups.get(groupName);

        // Publish in old format for the time being
        Map<String, String> simpleEntries = new HashMap<String, String>();
        for (Map.Entry<String, List<ExportEntry>> entry : entries.entrySet()) {
            List<ExportEntry> exports = entry.getValue();
            if (exports != null && exports.size() > 0) {
                // there is no support for multiple exports per name - so extract only the first one
                simpleEntries.put(entry.getKey(), entry.getValue().get(0).getValue());
            }//from w  w  w. j a v  a2s.  co  m
        }
        publishApplicationInstanceData(groupName, groupName, simpleEntries.entrySet());

        PublishedExports exports = new PublishedExports(groupName);
        exports.setUpdated(new Date().getTime());
        exports.putValues(entries.entrySet());
        getAmState().getPublishedExportsSet().put(groupName, exports);
    }
}

From source file:semaforo.Semaforo.java

public static <K extends Comparable, V extends Comparable> Map<K, List<Integer>> sortByValues(
        Map<K, List<Integer>> map) {
    List<Map.Entry<K, List<Integer>>> entries = new LinkedList<Map.Entry<K, List<Integer>>>(map.entrySet());

    Collections.sort(entries, new Comparator<Map.Entry<K, List<Integer>>>() {

        @Override//from   w  ww  .  j  av a 2s .c om
        public int compare(Map.Entry<K, List<Integer>> o1, Map.Entry<K, List<Integer>> o2) {
            int comparation = o1.getValue().get(tablaSelectedIndex)
                    .compareTo(o2.getValue().get(tablaSelectedIndex));
            if (comparation == 0) {
                return o1.getKey().compareTo(o2.getKey());
            }
            return comparation;
        }
    });
    Map<K, List<Integer>> sortedMap = new LinkedHashMap<K, List<Integer>>();

    for (Map.Entry<K, List<Integer>> entry : entries) {
        sortedMap.put(entry.getKey(), entry.getValue());
    }
    return sortedMap;
}

From source file:org.aprilis.jrest.compile.Compile.java

/**
 * /*from  www .  j a  v  a 2  s.  c om*/
 * @param sJsonDef
 * @return
 */
@SuppressWarnings("unchecked")
private boolean loadJsonDefinition(String sJsonDef) {
    try {
        String commentsStrippedJson = sJsonDef.replaceAll(Constants.gsStripCommentLineRegEx,
                Constants.gsEmptyString);
        String newLineTrimmedJson = commentsStrippedJson.replaceAll(Constants.gsTrimFindeString,
                Constants.gsEmptyString);
        String spaceRemovedJson = newLineTrimmedJson.replaceAll(Constants.gsRemoveSpacesExcludingQuotes,
                Constants.gsEmptyString);

        mLogger.info(String.format(Exceptions.gsTrimmedJsonString, spaceRemovedJson));

        mhmapDefinitionDetails = (JSONObject) moJsonParser.parse(spaceRemovedJson);

        for (Map.Entry<String, HashMap<String, Object>> jsonEntry : mhmapDefinitionDetails.entrySet()) {
            // 1. Check whether definition key is valid (not null or empty) - Done
            // 2. Check whether value associated to key is valid (not null, since
            // its an
            // object) - Done
            //
            if (jsonEntry.getKey() != null && jsonEntry.getKey().length() >= Constants.gshMinDefinitionKeyLength
                    && jsonEntry.getValue() != null) {

                String sJsonKey = jsonEntry.getKey().toUpperCase();
                if ((sJsonKey.equals(Constants.gsAuthDefinitionKey) == true)
                        || (sJsonKey.equals(Constants.gsJdbcDefinitionKey) == true)) {
                    mLogger.warn(Exceptions.gsReservedKeywordNotAllowed);

                    continue;
                } // if ((sJsonKey.equals(Constants.gsAuthDefinitionKey) == true) ...
                  // )

                // 3. Check for mandatory keywords Type - Done
                //
                // if ( jsonEntry.getValue().containsKey(Constants.gsLangTokenQuery)
                // &&
                if (jsonEntry.getValue().containsKey(Constants.gsLangTokenType)) {

                    Definition apiDefinition = new Definition();

                    String sQueryType = jsonEntry.getValue().get(Constants.gsLangTokenType).toString().trim();

                    String sQueryValue = (String) jsonEntry.getValue().get(Constants.gsLangTokenQuery);
                    if ((sQueryType.equals(Constants.gsLangDefTypeGet)
                            || sQueryType.equals(Constants.gsLangDefTypeSet)) && sQueryValue == null) {
                        mLogger.error(Exceptions.gsMissingMandatoryKeywordsInDefFile);

                        return false;
                    }

                    if (sQueryValue != null) {
                        sQueryValue = jsonEntry.getValue().get(Constants.gsLangTokenQuery).toString().trim();

                        // 5. Check for value of keyword Query is not null or empty - Done
                        // 6. Check if Query value is terminated with a semicolon - Done
                        //
                        if (sQueryValue.length() > Constants.gshMinQueryLength) {
                            if (sQueryValue.charAt(
                                    sQueryValue.length() - Constants.gshOne) == Constants.gcDelimSemiColon) {
                                apiDefinition.setQuery(sQueryValue);
                            } else {
                                mLogger.error(Exceptions.gsQuerySyntaxError);

                                return false;
                            } // if (sQuery.charAt(sQuery.length() - Constants.gshOne) ==
                              // Constants.gcDelimSemiColon)
                        } else {
                            mLogger.error(Exceptions.gsEmptyOrInvalidQueryGiven);

                            return false;
                        } // end of if (sQueryValue != null && ... )
                    } // if (jsonEntry.getValue().get(Constants.gsLangTokenQuery) !=
                      // null)

                    // 9. Check for values of Before and After if they are mentioned -
                    // Done
                    //
                    if (jsonEntry.getValue().containsKey(Constants.gsLangTokenBefore)) {
                        try {
                            HashMap<String, String> hmapBeforeMapping = (JSONObject) jsonEntry.getValue()
                                    .get(Constants.gsLangTokenBefore);

                            if (loadJsonBeforeTagInfo(jsonEntry.getKey(), hmapBeforeMapping,
                                    apiDefinition) == false) {
                                return false;
                            } // if (loadJsonBeforeTagInfo(jsonEntry.getKey(), ... )
                        } catch (ClassCastException e) {
                            mLogger.error(String.format(Exceptions.gsMalformedDefinition,
                                    Constants.gsLangTokenBefore, jsonEntry.getKey()));

                            return false;
                        } // end of try ... catch block

                    } // end of
                      // if(jsonEntry.getValue().containsKey(Constants.gsLangTokenBefore))

                    if (jsonEntry.getValue().containsKey(Constants.gsLangTokenAfter)) {
                        try {
                            HashMap<String, String> hmapAfterMapping = (JSONObject) jsonEntry.getValue()
                                    .get(Constants.gsLangTokenAfter);

                            if (loadJsonAfterTagInfo(jsonEntry.getKey(), hmapAfterMapping,
                                    apiDefinition) == false) {
                                return false;
                            } // if (loadJsonAfterTagInfo(jsonEntry.getKey(), ... )
                        } catch (ClassCastException e) {
                            mLogger.error(String.format(Exceptions.gsMalformedDefinition,
                                    Constants.gsLangTokenAfter, jsonEntry.getKey()));

                            return false;
                        } // end of try ... catch block
                    } // end of if
                      // (jsonEntry.getValue().containsKey(Constants.gsLangTokenAfter))

                    loadRoles(jsonEntry.getKey(), (JSONObject) jsonEntry.getValue(), apiDefinition);

                    // 6. Check for value of keyword Type is either GET or SET or Upload
                    if (sQueryType != null) {
                        if (sQueryType.equals(Constants.gsLangDefTypeGet)) {
                            moStore.addGetDefinition(jsonEntry.getKey(), apiDefinition);
                        } else if (sQueryType.equals(Constants.gsLangDefTypeSet)) {
                            moStore.addSetDefinition(jsonEntry.getKey(), apiDefinition);
                        } else {
                            mLogger.error(Exceptions.gsInvalidDefinitionTypeGiven);

                            return false;
                        } // end of if (defType.equals(Constants.gsLangDefTypeGet))

                        return true;
                    } // if(jsonEntry.getValue().containsKey(Constants.gsLangTokenType))
                } else {
                    mLogger.error(Exceptions.gsMissingMandatoryKeywordsInDefFile);
                } // end of if
                  // (jsonEntry.getValue().containsKey(Constants.gsLangTokenQuery)
            } else {
                mLogger.error(Exceptions.gsEmptyDefinition);
            } // end of if (jsonEntry.getKey() != null && .... )
        } // for (Map.Entry<String, HashMap<String, Object>> jsonEntry
    } catch (Exception e) {
        e.printStackTrace();

        e.printStackTrace(moPrintWriter);

        mLogger.error(moStringWriter.toString());
    } // end of try ... catch block

    return false;
}

From source file:org.openhab.binding.amazonechocontrol.internal.Connection.java

public HttpsURLConnection makeRequest(String verb, String url, @Nullable String postData, boolean json,
        boolean autoredirect, @Nullable Map<String, String> customHeaders)
        throws IOException, URISyntaxException {
    String currentUrl = url;/*  w  w w  . ja v a  2 s .  co m*/
    for (int i = 0; i < 30; i++) // loop for handling redirect, using automatic redirect is not possible, because
                                 // all response headers must be catched
    {
        int code;
        HttpsURLConnection connection = null;
        try {
            logger.debug("Make request to {}", url);
            connection = (HttpsURLConnection) new URL(currentUrl).openConnection();
            connection.setRequestMethod(verb);
            connection.setRequestProperty("Accept-Language", "en-US");
            if (customHeaders == null || !customHeaders.containsKey("User-Agent")) {
                connection.setRequestProperty("User-Agent", userAgent);
            }
            connection.setRequestProperty("Accept-Encoding", "gzip");
            connection.setRequestProperty("DNT", "1");
            connection.setRequestProperty("Upgrade-Insecure-Requests", "1");
            if (customHeaders != null) {
                for (String key : customHeaders.keySet()) {
                    String value = customHeaders.get(key);
                    if (StringUtils.isNotEmpty(value)) {
                        connection.setRequestProperty(key, value);
                    }
                }
            }
            connection.setInstanceFollowRedirects(false);

            // add cookies
            URI uri = connection.getURL().toURI();

            if (customHeaders == null || !customHeaders.containsKey("Cookie")) {

                StringBuilder cookieHeaderBuilder = new StringBuilder();
                for (HttpCookie cookie : cookieManager.getCookieStore().get(uri)) {
                    if (cookieHeaderBuilder.length() > 0) {
                        cookieHeaderBuilder.append(";");
                    }
                    cookieHeaderBuilder.append(cookie.getName());
                    cookieHeaderBuilder.append("=");
                    cookieHeaderBuilder.append(cookie.getValue());
                    if (cookie.getName().equals("csrf")) {
                        connection.setRequestProperty("csrf", cookie.getValue());
                    }

                }
                if (cookieHeaderBuilder.length() > 0) {
                    String cookies = cookieHeaderBuilder.toString();
                    connection.setRequestProperty("Cookie", cookies);
                }
            }
            if (postData != null) {

                logger.debug("{}: {}", verb, postData);
                // post data
                byte[] postDataBytes = postData.getBytes(StandardCharsets.UTF_8);
                int postDataLength = postDataBytes.length;

                connection.setFixedLengthStreamingMode(postDataLength);

                if (json) {
                    connection.setRequestProperty("Content-Type", "application/json; charset=UTF-8");
                } else {
                    connection.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
                }
                connection.setRequestProperty("Content-Length", Integer.toString(postDataLength));
                if (verb == "POST") {
                    connection.setRequestProperty("Expect", "100-continue");
                }

                connection.setDoOutput(true);
                OutputStream outputStream = connection.getOutputStream();
                outputStream.write(postDataBytes);
                outputStream.close();
            }
            // handle result
            code = connection.getResponseCode();
            String location = null;

            // handle response headers
            Map<String, List<String>> headerFields = connection.getHeaderFields();
            for (Map.Entry<String, List<String>> header : headerFields.entrySet()) {
                String key = header.getKey();
                if (StringUtils.isNotEmpty(key)) {
                    if (key.equalsIgnoreCase("Set-Cookie")) {
                        // store cookie
                        for (String cookieHeader : header.getValue()) {
                            if (StringUtils.isNotEmpty(cookieHeader)) {

                                List<HttpCookie> cookies = HttpCookie.parse(cookieHeader);
                                for (HttpCookie cookie : cookies) {
                                    cookieManager.getCookieStore().add(uri, cookie);
                                }
                            }
                        }
                    }
                    if (key.equalsIgnoreCase("Location")) {
                        // get redirect location
                        location = header.getValue().get(0);
                        if (StringUtils.isNotEmpty(location)) {
                            location = uri.resolve(location).toString();
                            // check for https
                            if (location.toLowerCase().startsWith("http://")) {
                                // always use https
                                location = "https://" + location.substring(7);
                                logger.debug("Redirect corrected to {}", location);
                            }
                        }
                    }
                }
            }
            if (code == 200) {
                logger.debug("Call to {} succeeded", url);
                return connection;
            }
            if (code == 302 && location != null) {
                logger.debug("Redirected to {}", location);
                currentUrl = location;
                if (autoredirect) {
                    continue;
                }
                return connection;
            }
        } catch (IOException e) {

            if (connection != null) {
                connection.disconnect();
            }
            logger.warn("Request to url '{}' fails with unkown error", url, e);
            throw e;
        } catch (Exception e) {
            if (connection != null) {
                connection.disconnect();
            }
            throw e;
        }
        if (code != 200) {
            throw new HttpException(code,
                    verb + " url '" + url + "' failed: " + connection.getResponseMessage());
        }
    }
    throw new ConnectionException("Too many redirects");
}

From source file:org.kles.m3.service.M3ConnectorBuilderService.java

@Override
protected Task<Void> createTask() {
    return new Task<Void>() {

        @Override/*from  ww w.  j  av  a  2s. com*/
        protected Void call() throws MAKException, InterruptedException {
            beginTime = System.currentTimeMillis();
            updateMessage("Initialisation des donnes de " + m3Connector.getEnv().getName());
            m3Connector.getMapEntityByConfiguration().entrySet().stream()
                    .forEach((Map.Entry<M3ConfigurationInfo, M3ConfigurationDetail> key) -> {

                        if (key.getKey().getName().equals(m3Connector.getMainConfig().getName())
                                && !m3Connector.getMainConfig().getName().equals("MVX")) {
                            ArrayList<M3ClassPathEntry> l = new ArrayList<>();
                            for (Map.Entry<String, M3Component> component : key.getValue().getListComponent()
                                    .entrySet()) {
                                if (m3Connector.getListComponentSelect().contains(component.getValue())
                                        && !component.getValue().getNameComponent().equals("MVX")) {
                                    l.addAll(component.getValue().getListComponentPath());
                                }
                            }
                            key.getValue().setConfigClassPath(l);
                        } else {
                            key.getValue().setConfigClassPath(key.getValue().getCompleteClassPath());
                        }
                        ArrayList<Path> arrayPath = new ArrayList<>();
                        ProcessFile fileProcessor = new ProcessFile();
                        fileProcessor.setExtFilter(".class");
                        for (M3ClassPathEntry cl1 : key.getValue().getConfigClassPath()) {
                            try {
                                if (new File(cl1.getPath().toOSString()).exists()) {
                                    updateMessage("Recherche d'objets dans " + cl1.getPath().toOSString());
                                    Files.walkFileTree(Paths.get(cl1.getPath().toString()), fileProcessor);
                                }
                            } catch (IOException ex) {
                                Logger.getLogger(Process.class.getName()).log(Level.SEVERE, null, ex);
                            }
                            arrayPath.addAll(fileProcessor.getList());
                        }

                        int cpt = 0;
                        for (Path p : arrayPath) {
                            updateMessage("Traitement des objets de " + p.toString());
                            LinkedHashMap<String, SourceObject> entity = key.getValue().getMapEntity()
                                    .get(M3Utils.getPgmType(p.toFile().getAbsolutePath()));
                            if (entity != null) {
                                String name = FilenameUtils.getBaseName(p.toString());
                                if (!entity.containsKey(name)) {
                                    if (M3Utils.getPgmType(p.toFile().getAbsolutePath()).equals(Resource.DB)
                                            && name.contains("$")) {
                                        continue;
                                    }
                                    SourceObject obj = new SourceObject();
                                    obj.setObjectPath(p);
                                    entity.put(name, obj);
                                    cpt++;
                                }
                            }
                        }
                        updateMessage(cpt + " objets trouvs appartenant  la configuration "
                                + key.getKey().getName() + "(" + key.getKey().getName() + ")");
                    });
            updateMessage("Fin de la recherche d'objet");
            Thread.sleep(2000);
            return null;
            /*beginTime = System.currentTimeMillis();
             updateMessage("Initialisation des donnes");
             m3Connector.getMapEntityByConfiguration().entrySet().stream().forEach((key) -> {
                    
             ArrayList<Path> arrayPath = new ArrayList<>();
             ProcessFile fileProcessor = new ProcessFile();
             fileProcessor.setExtFilter(".class");
             for (M3ClassPathEntry cl1 : key.getValue().getConfigClassPath()) {
             try {
             if (new File(cl1.getPath().toOSString()).exists()) {
             updateMessage("Recherche d'objets dans " + cl1.getPath().toOSString());
             Files.walkFileTree(Paths.get(cl1.getPath().toString()), fileProcessor);
             }
             } catch (IOException ex) {
             Logger.getLogger(Process.class.getName()).log(Level.SEVERE, null, ex);
             }
             arrayPath.addAll(fileProcessor.getList());
             }
                    
             int cpt = 0;
             for (Path p : arrayPath) {
             updateMessage("Traitement des objets de " + p.toString());
             LinkedHashMap<String, SourceObject> entity = key.getValue().getMapEntity().get(M3Utils.getPgmType(p.toFile().getAbsolutePath()));
             if (entity != null) {
             String name = FilenameUtils.getBaseName(p.toString());
             if (!entity.containsKey(name)) {
             if (M3Utils.getPgmType(p.toFile().getAbsolutePath()).equals(Resource.DB) && name.contains("$")) {
             continue;
             }
             SourceObject obj = new SourceObject();
             obj.setObjectPath(p);
             entity.put(name, obj);
             cpt++;
             }
             }
             }
             updateMessage(cpt + " objets trouvs appartenant  la configuration " + key.getKey().getName() + "(" + key.getKey().getName() + ")");
             });
             updateMessage("Fin de la recherche d'objet");
             Thread.sleep(2000);
             return null;*/
        }
    };
}

From source file:org.openecomp.sdc.be.model.operations.impl.ResourceInstanceOperationTest.java

public void addResourceInstancesAndRelation(String serviceId) {

    String rootName = "tosca.nodes.test.root";
    String softwareCompName = "tosca.nodes.test.softwarecomponent";
    String capabilityTypeName = "myCapability";
    String reqName = "host";
    String computeNodeName = "tosca.nodes.test.compute";
    String reqRelationship = "myRelationship";

    ResourceOperationTest resourceOperationTest = new ResourceOperationTest();
    resourceOperationTest.setOperations(titanDao, resourceOperation, propertyOperation);

    Resource rootResource = createRootResource(rootName, resourceOperationTest);
    // Create Capability type
    CapabilityTypeOperationTest capabilityTypeOperationTest = new CapabilityTypeOperationTest();
    capabilityTypeOperationTest.setOperations(titanDao, capabilityTypeOperation);
    CapabilityTypeDefinition createCapabilityDef = capabilityTypeOperationTest
            .createCapability(capabilityTypeName);

    Resource softwareComponentResource = createSoftwareComponentWithReq(softwareCompName, resourceOperationTest,
            rootResource, capabilityTypeName, reqName, reqRelationship, computeNodeName);
    Resource compute = createComputeWithCapability(capabilityTypeName, computeNodeName, resourceOperationTest,
            rootResource);//from  ww w.ja  v  a2s  .  c  o  m

    // resource1
    ComponentInstance myresourceInstance = buildResourceInstance(softwareComponentResource.getUniqueId(), "1",
            "tosca.nodes.test.root");

    Either<ComponentInstance, TitanOperationStatus> myinstanceRes = resourceInstanceOperation
            .addComponentInstanceToContainerComponent(serviceId, NodeTypeEnum.Service, "1", true,
                    myresourceInstance, NodeTypeEnum.Resource, false);

    assertTrue("check instance added to service", myinstanceRes.isLeft());

    // resource2
    ComponentInstance computeInstance = buildResourceInstance(compute.getUniqueId(), "2",
            "tosca.nodes.test.compute");
    ComponentInstance value1 = myinstanceRes.left().value();

    Either<ComponentInstance, TitanOperationStatus> computeInstTes = resourceInstanceOperation
            .addComponentInstanceToContainerComponent(serviceId, NodeTypeEnum.Service, "2", true,
                    computeInstance, NodeTypeEnum.Resource, false);
    assertTrue("check instance added to service", computeInstTes.isLeft());
    ComponentInstance value2 = computeInstTes.left().value();

    RequirementAndRelationshipPair relationPair = new RequirementAndRelationshipPair();
    relationPair.setRequirement(reqName);
    relationPair.setCapability(capabilityTypeName);

    String capId = "";
    Map<String, List<CapabilityDefinition>> capabilities = compute.getCapabilities();
    for (Map.Entry<String, List<CapabilityDefinition>> entry : capabilities.entrySet()) {
        capId = entry.getValue().get(0).getUniqueId();
    }
    relationPair.setCapabilityUid(capId);
    Map<String, List<RequirementDefinition>> requirements = softwareComponentResource.getRequirements();
    String reqId = "";
    for (Map.Entry<String, List<RequirementDefinition>> entry : requirements.entrySet()) {
        reqId = entry.getValue().get(0).getUniqueId();
    }
    relationPair.setRequirementUid(reqId);
    relationPair.setRequirementOwnerId(value1.getUniqueId());
    relationPair.setCapabilityOwnerId(value2.getUniqueId());
    relationPair.setCapabilityUid(capId);

    Either<RelationshipInstData, TitanOperationStatus> connectResourcesInService = resourceInstanceOperation
            .connectResourcesInService(serviceId, NodeTypeEnum.Service, value1.getUniqueId(),
                    value2.getUniqueId(), relationPair);

    assertTrue("check relation created", connectResourcesInService.isLeft());

}

From source file:ro.uaic.info.nlptools.ggs.engine.core.StateMachine.java

protected void compileMirrored() throws GGSException {
    Grammar mirroredGrammar = new Grammar();
    mapToMirrored = new HashMap<GraphNode, GraphNode>();

    for (Map.Entry<String, Graph> entry : grammar.getGraphs().entrySet()) {
        Graph mirroredGraph = new Graph(grammar, entry.getKey());
        mirroredGrammar.getGraphs().put(mirroredGraph.getId(), mirroredGraph);

        for (GraphNode gn : entry.getValue().getGraphNodes().values()) {
            if (gn.nodeType == GraphNode.NodeType.Comment)
                continue;

            GraphNode mirroredNode = gn.clone();
            mapToMirrored.put(gn, mirroredNode);
            mirroredGraph.addGraphNode(mirroredNode, gn.getIndex());
            if (gn.isEnd) {
                mirroredNode.isStart = true;
                mirroredGraph.setStartNode(mirroredNode);

            } else if (gn.isStart) {
                mirroredNode.isEnd = true;
                mirroredGraph.setEndNode(mirroredNode);
            }//from   www  .j av a 2s  .c om

            if (mirroredNode.getTokenMatchingCode().startsWith("?<")) {
                mirroredNode.setTokenMatchingCode("?" + mirroredNode.getTokenMatchingCode().substring(2));
            } else if (mirroredNode.getTokenMatchingCode().startsWith("?")) {
                mirroredNode.setTokenMatchingCode("?<" + mirroredNode.getTokenMatchingCode().substring(1));
            }
        }
        for (GraphNode gn : entry.getValue().getGraphNodes().values()) {
            if (gn == null)
                continue;
            if (gn.nodeType == GraphNode.NodeType.Comment)
                continue;
            if (gn.nodeType == GraphNode.NodeType.CrossReference)
                for (int childNodeIndex : gn.getChildNodesIndexes()) {
                    mirroredGraph.getGraphNodes().get(gn.getIndex()).getChildNodesIndexes().add(childNodeIndex);
                }
            else
                for (int childNodeIndex : gn.getChildNodesIndexes()) {
                    if (entry.getValue().getGraphNodes()
                            .get(childNodeIndex).nodeType == GraphNode.NodeType.CrossReference)
                        mirroredGraph.getGraphNodes().get(gn.getIndex()).getChildNodesIndexes()
                                .add(childNodeIndex);
                    else
                        mirroredGraph.getGraphNodes().get(childNodeIndex).getChildNodesIndexes()
                                .add(gn.getIndex());
                }
        }
    }

    mirroredMachine = new StateMachine(compiledGrammar);
    ;
    mirroredMachine.mirroredMachine = this;
    mirroredMachine.mapToMirrored = new HashMap<GraphNode, GraphNode>();
    for (Map.Entry<GraphNode, GraphNode> entry : mapToMirrored.entrySet())
        mirroredMachine.mapToMirrored.put(entry.getValue(), entry.getKey());

    mirroredMachine.compile(mirroredGrammar, true);
    mirroredMachine.jsEngine = jsEngine;

}