Example usage for java.util LinkedHashMap entrySet

List of usage examples for java.util LinkedHashMap entrySet

Introduction

In this page you can find the example usage for java.util LinkedHashMap entrySet.

Prototype

public Set<Map.Entry<K, V>> entrySet() 

Source Link

Document

Returns a Set view of the mappings contained in this map.

Usage

From source file:gov.llnl.lc.smt.command.event.SmtEvent.java

private void dumpAllEvents(OsmEvent e) {
    // dump event data for all OMS records
    // if the OsmEvent parameter is NOT null, then just list that event
    //  otherwise, list all events in OSM_EventStats

    // using an SMT_Updater, AND I have already read the file, so just set the collection
    SMT_UpdateService sus = (SMT_UpdateService) UpdateService;
    OMS_Collection collection = sus.getCollection();
    LinkedHashMap<String, OpenSmMonitorService> omsHistory = collection.getOSM_History();

    // display all the Events, or just a single one?

    // loop through the history and dump the event stats for each timestamp
    for (Map.Entry<String, OpenSmMonitorService> entry : omsHistory.entrySet()) {
        OpenSmMonitorService oms = entry.getValue();
        OSM_Fabric fabric = oms.getFabric();
        OSM_EventStats EventStats = fabric.getOsmEventStats();
        if (e == null)
            System.out.println(fabric.getTimeStamp() + ": " + EventStats.toString());
        else {/*w  w  w  .j  av  a  2 s.  c o  m*/
            System.out.println(fabric.getTimeStamp() + ": " + EventStats.toEventString(e));
            //       System.out.println(fabric.getTimeStamp() + ": " + EventStats.getCounter(e));
        }
    }
}

From source file:com.sillelien.dollar.api.types.DollarMap.java

@NotNull
private LinkedHashMap<var, var> deepClone(@NotNull LinkedHashMap<var, var> o) {
    LinkedHashMap<var, var> result = new LinkedHashMap<>();
    for (Map.Entry<var, var> entry : o.entrySet()) {
        result.put(entry.getKey(), entry.getValue());
    }//from  w  w  w. j a v  a  2  s. co  m
    return result;
}

From source file:gov.llnl.lc.smt.command.utilize.SmtUtilize.java

private void dumpAllUtilization() {
    // iterate through the history, and display the analysis
    boolean exit = (UpdateService == null) || (OMService == null);
    if (!exit) {//from  ww w  .j  a  v  a  2 s.  com
        if (UpdateService instanceof SMT_UpdateService) {
            SMT_UpdateService sus = ((SMT_UpdateService) UpdateService);
            OMS_Collection collection = sus.getCollection();
            LinkedHashMap<String, OpenSmMonitorService> omsHistory = collection.getOSM_History();

            // loop through the history, and display the utilization (key is timestamp, value is OMS)
            for (Map.Entry<String, OpenSmMonitorService> entry : omsHistory.entrySet()) {
                OpenSmMonitorService oms = entry.getValue();

                try {
                    // trigger a new analysis
                    Analysis_Mgr.osmServiceUpdate(sus, oms);

                    // give the analyzer some time to complete, and signal the smtAnalysisUpdate, before continuing
                    TimeUnit.SECONDS.sleep(1);
                } catch (Exception e) {
                    System.err.println("Error doing the delta analysis, or time delay");
                }
            }
        } else
            System.err.println("Using the original OMS_UpdateService");
    }
}

From source file:org.mycard.net.network.RequestQueue.java

/** helper */
private Request removeFirst(LinkedHashMap<HttpHost, LinkedList<Request>> requestQueue) {
    Request ret = null;//from  w  ww . j av  a  2s  .com
    Iterator<Map.Entry<HttpHost, LinkedList<Request>>> iter = requestQueue.entrySet().iterator();
    if (iter.hasNext()) {
        Map.Entry<HttpHost, LinkedList<Request>> entry = iter.next();
        LinkedList<Request> reqList = entry.getValue();
        ret = reqList.removeFirst();
        if (reqList.isEmpty()) {
            requestQueue.remove(entry.getKey());
        }
    }
    return ret;
}

From source file:nl.nn.adapterframework.webcontrol.api.ShowIbisstoreSummary.java

@POST
@RolesAllowed({ "ObserverAccess", "IbisTester" })
@Path("/jdbc/summary")
@Consumes(MediaType.APPLICATION_JSON)// w w  w  . j  a v a  2  s  .  c  om
@Produces(MediaType.APPLICATION_JSON)
public Response execute(LinkedHashMap<String, Object> json) throws ApiException {
    initBase(servletConfig);

    Response.ResponseBuilder response = Response.noContent(); //PUT defaults to no content

    String query = AppConstants.getInstance().getProperty(SHOWIBISSTOREQUERYKEY);
    String realm = null;

    for (Entry<String, Object> entry : json.entrySet()) {
        String key = entry.getKey();
        if (key.equalsIgnoreCase("realm")) {
            realm = entry.getValue().toString();
        }
        if (key.equalsIgnoreCase("query")) {
            query = entry.getValue().toString();
        }
    }

    if (realm == null)
        return response.status(Response.Status.BAD_REQUEST).build();

    String result = "";
    try {
        IbisstoreSummaryQuerySender qs;
        qs = (IbisstoreSummaryQuerySender) ibisManager.getIbisContext()
                .createBeanAutowireByName(IbisstoreSummaryQuerySender.class);
        qs.setSlotmap(getSlotmap());
        try {
            qs.setName("QuerySender");
            qs.setJmsRealm(realm);
            qs.setQueryType("select");
            qs.setBlobSmartGet(true);
            qs.configure(true);
            qs.open();
            result = qs.sendMessage("dummy", query);
        } catch (Throwable t) {
            throw new ApiException("An error occured on executing jdbc query: " + t.toString());
        } finally {
            qs.close();
        }
    } catch (Exception e) {
        throw new ApiException("An error occured on creating or closing the connection: " + e.toString());
    }

    List<Map<String, String>> resultMap = null;
    if (XmlUtils.isWellFormed(result)) {
        resultMap = XmlQueryResult2Map(result);
    }
    if (resultMap == null)
        throw new ApiException("Invalid query result.");

    Map<String, Object> resultObject = new HashMap<String, Object>();
    resultObject.put("query", query);
    resultObject.put("result", resultMap);

    return Response.status(Response.Status.CREATED).entity(resultObject).build();
}

From source file:org.duracloud.sync.mgmt.ChangedList.java

/**
 * Restores the state of the ChangedList using the given backup file
 *
 * @param persistFile file containing previous state
 * @param contentDirs content directories currently configured.
 *///from  w  w w. ja v a 2  s .com
public synchronized void restore(File persistFile, List<File> contentDirs) {
    try {
        FileInputStream fileStream = new FileInputStream(persistFile);
        ObjectInputStream oStream = new ObjectInputStream(fileStream);
        log.info("Restoring changed list from backup: {}", persistFile.getAbsolutePath());
        synchronized (this) {
            LinkedHashMap<String, ChangedFile> fileListFromDisk = (LinkedHashMap<String, ChangedFile>) oStream
                    .readObject();

            //remove files in change list that are not in the content dir list.
            if (contentDirs != null && !contentDirs.isEmpty()) {

                Iterator<Entry<String, ChangedFile>> entries = fileListFromDisk.entrySet().iterator();
                while (entries.hasNext()) {
                    Entry<String, ChangedFile> entry = entries.next();
                    ChangedFile file = entry.getValue();
                    boolean watched = false;
                    for (File contentDir : contentDirs) {
                        if (file.getFile().getAbsolutePath().startsWith(contentDir.getAbsolutePath())
                                && !this.fileExclusionManager.isExcluded(file.getFile())) {
                            watched = true;
                            break;
                        }
                    }

                    if (!watched) {
                        entries.remove();
                    }
                }
            }

            this.fileList = fileListFromDisk;
        }
        oStream.close();
    } catch (Exception e) {
        throw new RuntimeException("Unable to restore File Changed List:" + e.getMessage(), e);
    }
}

From source file:org.apache.hadoop.hive.ql.parse.PartitionPruner.java

/** From the table metadata prune the partitions to return the partitions **/
@SuppressWarnings("nls")
public Set<Partition> prune() throws HiveException {
    LOG.trace("Started pruning partiton");
    LOG.trace("tabname = " + this.tab.getName());
    LOG.trace("prune Expression = " + this.prunerExpr);

    HashSet<Partition> ret_parts = new HashSet<Partition>();
    try {/*from w  ww. j a  v a  2s .  c o m*/
        StructObjectInspector rowObjectInspector = (StructObjectInspector) this.tab.getDeserializer()
                .getObjectInspector();
        Object[] rowWithPart = new Object[2];
        InspectableObject inspectableObject = new InspectableObject();

        ExprNodeEvaluator evaluator = ExprNodeEvaluatorFactory.get(this.prunerExpr);
        for (Partition part : Hive.get().getPartitions(this.tab)) {
            // Set all the variables here
            LinkedHashMap<String, String> partSpec = part.getSpec();

            // Create the row object
            ArrayList<String> partNames = new ArrayList<String>();
            ArrayList<String> partValues = new ArrayList<String>();
            ArrayList<ObjectInspector> partObjectInspectors = new ArrayList<ObjectInspector>();
            for (Map.Entry<String, String> entry : partSpec.entrySet()) {
                partNames.add(entry.getKey());
                partValues.add(entry.getValue());
                partObjectInspectors
                        .add(ObjectInspectorFactory.getStandardPrimitiveObjectInspector(String.class));
            }
            StructObjectInspector partObjectInspector = ObjectInspectorFactory
                    .getStandardStructObjectInspector(partNames, partObjectInspectors);

            rowWithPart[1] = partValues;
            ArrayList<StructObjectInspector> ois = new ArrayList<StructObjectInspector>(2);
            ois.add(rowObjectInspector);
            ois.add(partObjectInspector);
            StructObjectInspector rowWithPartObjectInspector = ObjectInspectorFactory
                    .getUnionStructObjectInspector(ois);

            // evaluate the expression tree
            evaluator.evaluate(rowWithPart, rowWithPartObjectInspector, inspectableObject);
            LOG.trace("prune result for partition " + partSpec + ": " + inspectableObject.o);
            if (!Boolean.FALSE.equals(inspectableObject.o)) {
                LOG.debug("retained partition: " + partSpec);
                ret_parts.add(part);
            } else {
                LOG.trace("pruned partition: " + partSpec);
            }
        }
    } catch (Exception e) {
        throw new HiveException(e);
    }

    // Now return the set of partitions
    return ret_parts;
}

From source file:fingerprint.MyStructureFingerprint.java

public void computePercentageAndHistogram(MyChainIfc peptide, int splittedSequenceLength) {

    splittedPercentageOccupied = new ArrayList<>();
    splittedHistogramD2OccupiedSolidAngle = new ArrayList<>();

    // i want to compute the distribution according to peptide
    // but it is a list now for each and every segment of the used length in the DockingPeptide algo

    // it can be for the query which might be longer than splittedsequeceLength

    // it can be for the splitted peptide and then it is gonna be the same length

    System.out.println("solid angle peptide length = " + peptide.getMyMonomers().length);

    // if it is of the same size then I do the job
    // get the barycenter of the peptide

    if (peptide.getMyMonomers().length == splittedSequenceLength) {
        List<PointIfc> pointsPeptide = PointsTools.createListOfPointIfcFromPeptide(peptide);
        PointIfc barycenter = ShapeReductorTools.computeLigandBarycenter(pointsPeptide);

        LinkedHashMap<PhiThetaInterval, List<PointIfc>> groupedResidues = groupResiduesAccordingToSolidAngleAccordingToLocalStructureBarycenter(
                barycenter, myStructure);
        int countOccupied = 0;
        for (Entry<PhiThetaInterval, List<PointIfc>> entry : groupedResidues.entrySet()) {
            if (entry.getValue().size() > 0) {
                countOccupied += 1;//w w  w  . j ava  2 s. c om
            }
        }
        double percentageOccupied = (double) countOccupied / groupedResidues.size();
        List<Integer> histogramD2OccupiedSolidAngle = computeHistogramD2(groupedResidues);
        splittedPercentageOccupied.add(percentageOccupied);
        splittedHistogramD2OccupiedSolidAngle.add(histogramD2OccupiedSolidAngle);
    } else {

        // remember that the docking peptide class which is using this is docking the peptide chain bound to it already in the structure
        for (int i = 0; i < peptide.getMyMonomers().length; i++) {

            int startId = i;
            int endId = i + splittedSequenceLength;
            if (endId < peptide.getMyMonomers().length + 1) {

                // I need to schrink myStructure to the env of this subpeptide
                StructureLocalToBuildAnyShape structureLocalToBuildAnyShape = null;
                try {
                    structureLocalToBuildAnyShape = new StructureLocalToBuildAnyShape(myStructure,
                            peptide.getChainId(), startId, splittedSequenceLength, algoParameters);
                } catch (ShapeBuildingException e) {
                    continue;
                }
                MyStructureIfc myStructureLocal = structureLocalToBuildAnyShape.getMyStructureLocal();
                if (myStructureLocal == null) {
                    continue;
                }

                // makes the subPeptide barycenter
                MyChainIfc subPeptide = peptide.makeSubchain(startId, splittedSequenceLength);
                List<PointIfc> pointsPeptide = PointsTools.createListOfPointIfcFromPeptide(subPeptide);
                PointIfc barycenter = ShapeReductorTools.computeLigandBarycenter(pointsPeptide);

                LinkedHashMap<PhiThetaInterval, List<PointIfc>> groupedResidues = groupResiduesAccordingToSolidAngleAccordingToLocalStructureBarycenter(
                        barycenter, myStructureLocal);
                int countOccupied = 0;
                for (Entry<PhiThetaInterval, List<PointIfc>> entry : groupedResidues.entrySet()) {
                    if (entry.getValue().size() > 0) {
                        countOccupied += 1;
                    }
                }
                double percentageOccupied = (double) countOccupied / groupedResidues.size();
                List<Integer> histogramD2OccupiedSolidAngle = computeHistogramD2(groupedResidues);
                splittedPercentageOccupied.add(percentageOccupied);
                splittedHistogramD2OccupiedSolidAngle.add(histogramD2OccupiedSolidAngle);
            }
        }
    }
}

From source file:org.openscore.lang.compiler.utils.ExecutableBuilder.java

private Workflow compileWorkFlow(LinkedHashMap<String, Map<String, Object>> workFlowRawData,
        Map<String, String> imports, Workflow onFailureWorkFlow, boolean onFailureSection) {

    Deque<Task> tasks = new LinkedList<>();

    Validate.notEmpty(workFlowRawData, "Flow must have tasks in its workflow");

    PeekingIterator<Map.Entry<String, Map<String, Object>>> iterator = new PeekingIterator<>(
            workFlowRawData.entrySet().iterator());

    boolean isOnFailureDefined = onFailureWorkFlow != null;

    String defaultFailure = isOnFailureDefined ? onFailureWorkFlow.getTasks().getFirst().getName()
            : FAILURE_RESULT;// ww  w  .  j  av a  2 s.c o m

    while (iterator.hasNext()) {
        Map.Entry<String, Map<String, Object>> taskRawData = iterator.next();
        Map.Entry<String, Map<String, Object>> nextTaskData = iterator.peek();
        String taskName = taskRawData.getKey();
        Map<String, Object> taskRawDataValue;
        try {
            taskRawDataValue = taskRawData.getValue();
        } catch (ClassCastException ex) {
            throw new RuntimeException("Task: " + taskName
                    + " syntax is illegal.\nBelow task name, there should be a map of values in the format:\ndo:\n\top_name:");
        }

        String defaultSuccess;
        if (nextTaskData != null) {
            defaultSuccess = nextTaskData.getKey();
        } else {
            defaultSuccess = onFailureSection ? FAILURE_RESULT : SUCCESS_RESULT;
        }
        Task task = compileTask(taskName, taskRawDataValue, defaultSuccess, imports, defaultFailure);
        tasks.add(task);
    }

    if (isOnFailureDefined) {
        tasks.addAll(onFailureWorkFlow.getTasks());
    }

    return new Workflow(tasks);
}

From source file:com.my.batch.controller.JobOperationsController.java

@RequestMapping(value = "job/{jobName}", method = RequestMethod.POST)
public JsonResponse launch(@RequestBody LinkedHashMap<String, String> jobParams, @PathVariable String jobName)
        throws NoSuchJobException, JobInstanceAlreadyExistsException, JobParametersInvalidException,
        JobExecutionAlreadyRunningException, JobRestartException, JobInstanceAlreadyCompleteException,
        JobParametersNotFoundException {
    try {//from w  w w  .j ava2  s  .c om
        //         LinkedHashMap<String, Object> jobParams = new LinkedHashMap<>();
        Job job = jobRegistry.getJob(jobName);
        JobParametersBuilder jobParametersBuilder = new JobParametersBuilder();
        for (Map.Entry<String, String> entry : jobParams.entrySet()) {
            jobParametersBuilder.addString(entry.getKey(), String.valueOf(entry.getValue()));
        }
        jobLauncher.run(job, jobParametersBuilder.toJobParameters());
    } catch (NoSuchJobException e) {
        log.error("job start fail, jobName " + jobName, e);
        JsonResponse.createFailMsg(e.getMessage());
    }
    return JsonResponse.createSuccess();
}