Example usage for java.util List listIterator

List of usage examples for java.util List listIterator

Introduction

In this page you can find the example usage for java.util List listIterator.

Prototype

ListIterator<E> listIterator(int index);

Source Link

Document

Returns a list iterator over the elements in this list (in proper sequence), starting at the specified position in the list.

Usage

From source file:org.apache.ambari.server.scheduler.ExecutionScheduleManager.java

private JobDetail persistBatch(RequestExecution requestExecution) throws AmbariException {

    Batch batch = requestExecution.getBatch();
    JobDetail jobDetail = null;/*from   w  ww .j  a  v  a2s  .c om*/

    if (batch != null) {
        List<BatchRequest> batchRequests = batch.getBatchRequests();
        if (batchRequests != null) {
            Collections.sort(batchRequests);
            ListIterator<BatchRequest> iterator = batchRequests.listIterator(batchRequests.size());
            String nextJobName = null;
            while (iterator.hasPrevious()) {
                BatchRequest batchRequest = iterator.previous();

                String jobName = getJobName(requestExecution.getId(), batchRequest.getOrderId());

                Integer separationSeconds = requestExecution.getBatch().getBatchSettings()
                        .getBatchSeparationInSeconds();

                // Create Job and store properties to get next batch request details
                jobDetail = newJob(BatchRequestJob.class)
                        .withIdentity(jobName, ExecutionJob.LINEAR_EXECUTION_JOB_GROUP)
                        .usingJobData(ExecutionJob.NEXT_EXECUTION_JOB_NAME_KEY, nextJobName)
                        .usingJobData(ExecutionJob.NEXT_EXECUTION_JOB_GROUP_KEY,
                                ExecutionJob.LINEAR_EXECUTION_JOB_GROUP)
                        .usingJobData(BatchRequestJob.BATCH_REQUEST_EXECUTION_ID_KEY, requestExecution.getId())
                        .usingJobData(BatchRequestJob.BATCH_REQUEST_BATCH_ID_KEY, batchRequest.getOrderId())
                        .usingJobData(BatchRequestJob.BATCH_REQUEST_CLUSTER_NAME_KEY,
                                requestExecution.getClusterName())
                        .usingJobData(BatchRequestJob.NEXT_EXECUTION_SEPARATION_SECONDS,
                                separationSeconds != null ? separationSeconds : 0)
                        .storeDurably().build();

                try {
                    executionScheduler.addJob(jobDetail);
                } catch (SchedulerException e) {
                    LOG.error("Failed to add job detail. " + batchRequest, e);
                }

                nextJobName = jobName;
            }
        }
    }
    return jobDetail;
}

From source file:com.neatresults.mgnltweaks.ui.field.ComponentTemplateSelectFieldFactory.java

private Map<String, TemplateDefinition> getAreaHierarchy(Node parentArea)
        throws RepositoryException, RegistrationException {
    Map<String, TemplateDefinition> areaHierarchy = new LinkedHashMap<String, TemplateDefinition>();
    List<String> areaNamesHierarchy = new ArrayList<String>();
    Node parentParentArea = parentArea;
    while (parentParentArea != null) {
        String areaName = parentParentArea.getName();
        areaNamesHierarchy.add(areaName);
        parentParentArea = NodeUtil.getNearestAncestorOfType(parentParentArea, NodeTypes.Area.NAME);
    }//from   w  ww.  j a  v a  2 s . c  om

    Node parentPage = NodeUtil.getNearestAncestorOfType(parentArea, NodeTypes.Page.NAME);
    templateId = parentPage.getProperty(NodeTypes.Renderable.TEMPLATE).getString();
    TemplateDefinition templateDef = registry.getTemplateDefinition(templateId);

    templateDef = mergeDefinition(templateDef);

    ListIterator<String> iter = areaNamesHierarchy.listIterator(areaNamesHierarchy.size());
    Node componentOrArea = parentPage;
    while (iter.hasPrevious()) {
        String name = iter.previous();
        // subnode component is typically indication of having area type single
        if (!componentOrArea.hasNode(name)
                && (componentOrArea.hasNode("component") || (templateDef instanceof AreaDefinition
                        && "single".equals(((AreaDefinition) templateDef).getType())))) {
            componentOrArea = componentOrArea.getNode("component/" + name);
            // so we know component is single, and we neeed to look if it has any sub areas
            String id = componentOrArea.getParent().getProperty(NodeTypes.Renderable.TEMPLATE).getString();
            TemplateDefinition componentDef = registry.getTemplateDefinition(id);
            if (componentDef != null) {
                templateDef = componentDef;
            }
        } else {
            componentOrArea = componentOrArea.getNode(name);
        }
        // do we really need to merge here already?
        AreaDefinition area = templateDef.getAreas().get(name);
        if (area != null) {
            AreaDefinition areaDef = (AreaDefinition) mergeDefinition(area);
            templateDef = areaDef;
        } else {
            AreaDefinition maybeHit = templateDef.getAreas().get(name);
            if (maybeHit != null) {
                areaHierarchy.put(name, maybeHit);
                templateDef = maybeHit;
            } else {
                // get subareas of the area? what the hack was i thinking when writing this? How does it work anyway?
                for (Entry<String, AreaDefinition> tempAreaEntry : templateDef.getAreas().entrySet()) {
                    AreaDefinition tempArea = tempAreaEntry.getValue();
                    maybeHit = tempArea.getAreas().get(name);
                    if (maybeHit != null) {
                        areaHierarchy.put(tempAreaEntry.getKey(), tempAreaEntry.getValue());
                        templateDef = maybeHit;
                    }
                }
            }
            // noComponent area ... how do i read those?
        }
        areaHierarchy.put(name, templateDef);
    }

    return areaHierarchy;
}

From source file:hudson.model.AbstractItem.java

/**
 * Deletes this item.//w  w w.j  ava2s.c om
 * Note on the funny name: for reasons of historical compatibility, this URL is {@code /doDelete}
 * since it predates {@code <l:confirmationLink>}. {@code /delete} goes to a Jelly page
 * which should now be unused by core but is left in case plugins are still using it.
 */
@RequirePOST
public void doDoDelete(StaplerRequest req, StaplerResponse rsp)
        throws IOException, ServletException, InterruptedException {
    delete();
    if (req == null || rsp == null) { // CLI
        return;
    }
    List<Ancestor> ancestors = req.getAncestors();
    ListIterator<Ancestor> it = ancestors.listIterator(ancestors.size());
    String url = getParent().getUrl(); // fallback but we ought to get to Jenkins.instance at the root
    while (it.hasPrevious()) {
        Object a = it.previous().getObject();
        if (a instanceof View) {
            url = ((View) a).getUrl();
            break;
        } else if (a instanceof ViewGroup && a != this) {
            url = ((ViewGroup) a).getUrl();
            break;
        }
    }
    rsp.sendRedirect2(req.getContextPath() + '/' + url);
}

From source file:net.sourceforge.fenixedu.domain.degreeStructure.DegreeModule.java

public ICurricularRule getMostRecentActiveCurricularRule(final CurricularRuleType ruleType,
        final CourseGroup parentCourseGroup, final ExecutionYear executionYear) {
    final List<ICurricularRule> curricularRules = new ArrayList<ICurricularRule>(
            getCurricularRules(ruleType, parentCourseGroup, (ExecutionYear) null));
    Collections.sort(curricularRules, ICurricularRule.COMPARATOR_BY_BEGIN);

    if (curricularRules.isEmpty()) {
        return null;
    }//from   w  w w. j  av  a 2 s .  co  m

    if (executionYear == null) {
        final ListIterator<ICurricularRule> iter = curricularRules.listIterator(curricularRules.size());
        while (iter.hasPrevious()) {
            final ICurricularRule curricularRule = iter.previous();
            if (curricularRule.isActive()) {
                return curricularRule;
            }
        }

        return null;
    }

    ICurricularRule result = null;
    for (final ICurricularRule curricularRule : curricularRules) {
        if (curricularRule.isValid(executionYear)) {
            if (result != null) {
                // TODO: remove this throw when curricular rule ensures
                // that it can be only one active for execution period
                // and replace by: return curricularRule
                throw new DomainException(
                        "error.degree.module.has.more.than.one.credits.limit.for.executionYear", getName());
            }
            result = curricularRule;
        }
    }

    return result;
}

From source file:com.haulmont.cuba.core.sys.MetadataImpl.java

protected void invokePostConstructMethods(Entity entity)
        throws InvocationTargetException, IllegalAccessException {
    List<Method> postConstructMethods = new ArrayList<>(4);
    List<String> methodNames = new ArrayList<>(4);
    Class clazz = entity.getClass();
    while (clazz != Object.class) {
        Method[] classMethods = clazz.getDeclaredMethods();
        for (Method method : classMethods) {
            if (method.isAnnotationPresent(PostConstruct.class) && !methodNames.contains(method.getName())) {
                postConstructMethods.add(method);
                methodNames.add(method.getName());
            }//from  www  .ja  v a  2  s . c om
        }
        clazz = clazz.getSuperclass();
    }

    ListIterator<Method> iterator = postConstructMethods.listIterator(postConstructMethods.size());
    while (iterator.hasPrevious()) {
        Method method = iterator.previous();
        if (!method.isAccessible()) {
            method.setAccessible(true);
        }
        method.invoke(entity);
    }
}

From source file:edu.umn.msi.tropix.persistence.dao.hibernate.TropixObjectDaoImpl.java

public TropixObject getHomeDirectoryPath(final String userId, final List<String> pathParts) {
    if (LOG.isDebugEnabled()) {
        LOG.debug(String.format("getPath called with userId %s and path parts %s", userId,
                Iterables.toString(pathParts)));
    }/*w w w.j  a  v a  2s.  c  o  m*/
    final StringBuilder joins = new StringBuilder(), wheres = new StringBuilder();
    final ListIterator<String> pathPartsIter = pathParts.listIterator(pathParts.size());
    final LinkedList<String> parameters = Lists.newLinkedList();
    while (pathPartsIter.hasPrevious()) {
        int index = pathPartsIter.previousIndex() + 1;
        final String pathPart = pathPartsIter.previous();

        int nextObjectBackIndex = pathPartsIter.previousIndex() + 1;
        joins.append(String.format(" inner join o%d.permissionParents as o%d ", index, nextObjectBackIndex));
        wheres.append(String.format(" and o%d.deletedTime is null", index));
        wheres.append(String.format(" and o%d.committed is true", index));
        addConstraintForPathPart(pathPart, index, wheres, parameters);
    }

    final String queryString = String.format(
            "User u, TropixObject o%d %s where u.cagridId = :userId %s and u.homeFolder.id = o0.id",
            pathParts.size(), joins.toString(), wheres.toString());
    return executePathQuery(userId, String.format("o%d", pathParts.size()), queryString, 1, parameters);
}

From source file:org.apache.atlas.model.typedef.AtlasStructDef.java

public void setAttributeDefs(List<AtlasAttributeDef> attributeDefs) {
    if (this.attributeDefs != null && this.attributeDefs == attributeDefs) {
        return;/*from  www  . j a va 2s  .  c  o  m*/
    }

    if (CollectionUtils.isEmpty(attributeDefs)) {
        this.attributeDefs = new ArrayList<>();
    } else {
        // if multiple attributes with same name are present, keep only the last entry
        List<AtlasAttributeDef> tmpList = new ArrayList<>(attributeDefs.size());
        Set<String> attribNames = new HashSet<>();

        ListIterator<AtlasAttributeDef> iter = attributeDefs.listIterator(attributeDefs.size());
        while (iter.hasPrevious()) {
            AtlasAttributeDef attributeDef = iter.previous();
            String attribName = attributeDef != null ? attributeDef.getName() : null;

            if (attribName != null) {
                attribName = attribName.toLowerCase();

                if (!attribNames.contains(attribName)) {
                    tmpList.add(new AtlasAttributeDef(attributeDef));

                    attribNames.add(attribName);
                }
            }
        }
        Collections.reverse(tmpList);

        this.attributeDefs = tmpList;
    }
}

From source file:com.bwc.ora.models.Lrp.java

public List<XYSeries> getFWHMForLRPPeaks(XYSeries lrpPeaks, XYSeries lrpSeries) {
    LinkedList<XYSeries> seriesList = new LinkedList<>();
    List<XYDataItem> pointList = (List<XYDataItem>) lrpSeries.getItems();
    List<XYDataItem> peakList = (List<XYDataItem>) lrpPeaks.getItems();
    //iterate through the peaks, process FWHM for each peak
    for (XYDataItem peak : peakList) {
        //grab index of the closest point to the peak
        int peakIndex = -1;
        for (XYDataItem pnt : pointList) {
            peakIndex++;/*from  ww w. j a  v  a 2s . com*/
            if (Math.abs(pnt.getXValue() - peak.getXValue()) < 0.6D) {
                break;
            }
        }
        //calculate point with Y value of valley to the left of peak
        XYDataItem leftValleyPoint = null;
        ListIterator<XYDataItem> it = pointList.listIterator(peakIndex);
        double prevY = peak.getYValue();
        while (it.hasPrevious()) {
            XYDataItem leftPoint = it.previous();
            if (leftPoint.getYValue() <= prevY) {
                prevY = leftPoint.getYValue();
                leftValleyPoint = leftPoint;
            } else {
                break;
            }
        }
        //calculate point with Y value of valley to the right of peak
        XYDataItem rightValleyPoint = null;
        it = pointList.listIterator(peakIndex);
        prevY = peak.getYValue();
        while (it.hasNext()) {
            XYDataItem rightPoint = it.next();
            if (rightPoint.getYValue() <= prevY) {
                prevY = rightPoint.getYValue();
                rightValleyPoint = rightPoint;
            } else {
                break;
            }
        }
        //determine half max Y value
        double halfMaxYValue;
        if (rightValleyPoint.getYValue() == leftValleyPoint.getYValue()) {
            halfMaxYValue = peak.getYValue() - ((peak.getYValue() - leftValleyPoint.getYValue()) / 2D);
        } else if (rightValleyPoint.getYValue() > leftValleyPoint.getYValue()) {
            halfMaxYValue = peak.getYValue() - ((peak.getYValue() - rightValleyPoint.getYValue()) / 2D);
        } else {
            halfMaxYValue = peak.getYValue() - ((peak.getYValue() - leftValleyPoint.getYValue()) / 2D);
        }
        //determine the X value on both sides of the peak that corresponds to the half max Y value
        double leftX = pointList.get(0).getXValue(), rightX = pointList.get(pointList.size() - 1).getXValue();
        XYDataItem prevPoint = pointList.get(peakIndex);
        it = pointList.listIterator(peakIndex);
        while (it.hasPrevious()) {
            XYDataItem leftPoint = it.previous();
            if (leftPoint.getYValue() == halfMaxYValue) {
                leftX = leftPoint.getXValue();
                break;
            } else {
                if (leftPoint.getYValue() < halfMaxYValue) {
                    //                        System.out.println("Left X for peak (" + peak.getXValue() + "," + peak.getYValue() + "): ");
                    leftX = calculateXFromYForLineWithTwoPoints(leftPoint, prevPoint, halfMaxYValue);
                    //                        System.out.println("    Left X: (" + leftX + "," + halfMaxYValue + "): ");
                    break;
                } else {
                    prevPoint = leftPoint;
                }
            }
        }
        prevPoint = pointList.get(peakIndex);
        it = pointList.listIterator(peakIndex);
        while (it.hasNext()) {
            XYDataItem rightPoint = it.next();
            if (rightPoint.getYValue() == halfMaxYValue) {
                rightX = rightPoint.getXValue();
                break;
            } else {
                if (rightPoint.getYValue() < halfMaxYValue) {
                    //                        System.out.println("Right X for peak (" + peak.getXValue() + "," + peak.getYValue() + "): ");
                    rightX = calculateXFromYForLineWithTwoPoints(rightPoint, prevPoint, halfMaxYValue);
                    //                        System.out.println("    Right X: (" + leftX + "," + halfMaxYValue + "): ");
                    break;
                } else {
                    prevPoint = rightPoint;
                }
            }
        }
        //store the two points for the half max full width line for this peak
        XYSeries peakSeries = new XYSeries("(" + peak.getXValue() + "," + peak.getYValue() + ")FWHM");
        peakSeries.add(leftX, halfMaxYValue);
        peakSeries.add(rightX, halfMaxYValue);
        seriesList.add(peakSeries);
    }
    return seriesList;
}

From source file:mekhq.Utilities.java

public static Map<String, Integer> sortMapByValue(Map<String, Integer> unsortMap, boolean highFirst) {

    // Convert Map to List
    List<Map.Entry<String, Integer>> list = new LinkedList<Map.Entry<String, Integer>>(unsortMap.entrySet());

    // Sort list with comparator, to compare the Map values
    Collections.sort(list, new Comparator<Map.Entry<String, Integer>>() {
        @Override/*from  w  w  w .ja v  a  2s. c o  m*/
        public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) {
            return (o1.getValue()).compareTo(o2.getValue());
        }
    });

    // Convert sorted map back to a Map
    Map<String, Integer> sortedMap = new LinkedHashMap<String, Integer>();
    if (highFirst) {
        ListIterator<Map.Entry<String, Integer>> li = list.listIterator(list.size());
        while (li.hasPrevious()) {
            Map.Entry<String, Integer> entry = li.previous();
            sortedMap.put(entry.getKey(), entry.getValue());
        }
    } else {
        for (Iterator<Map.Entry<String, Integer>> it = list.iterator(); it.hasNext();) {
            Map.Entry<String, Integer> entry = it.next();
            sortedMap.put(entry.getKey(), entry.getValue());
        }
    }

    return sortedMap;
}

From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java

private List<Boolean> evaluateBDD(BDD bddParam, State startState, List<Transition> sequence) {
    List<Boolean> retVal = new ArrayList<>();
    for (int i = sequence.size() - 1; i >= 0; i--) {
        BDD bdd = bddParam.or(factory.zero());
        ListIterator<Transition> it = sequence.listIterator(sequence.size() - i);
        List<Signal> alreadyset = new ArrayList<>();
        while (it.hasPrevious()) {
            BDD sigbdd = null;/*from w  ww  . ja v  a2  s  .c o m*/
            Transition t = it.previous();
            if (!alreadyset.contains(t.getSignal())) {
                switch (t.getEdge()) {
                case falling:
                    sigbdd = getNegBDD(t.getSignal());
                    break;
                case rising:
                    sigbdd = getPosBDD(t.getSignal());
                    break;
                }
                bdd = bdd.restrictWith(sigbdd);
                alreadyset.add(t.getSignal());
            }
        }

        for (Entry<Signal, Value> entry : startState.getStateValues().entrySet()) {
            if (!alreadyset.contains(entry.getKey())) {
                BDD sigbdd = null;
                switch (entry.getValue()) {
                case falling:
                case high:
                    sigbdd = getPosBDD(entry.getKey());
                    break;
                case low:
                case rising:
                    sigbdd = getNegBDD(entry.getKey());
                    break;
                }
                bdd = bdd.restrictWith(sigbdd);
            }
        }
        for (Entry<NetlistVariable, Boolean> entry : netlist.getQuasiSignals().entrySet()) {
            BDD sigbdd = null;
            if (entry.getValue()) {
                //true => Normally 1
                sigbdd = getPosBDD(quasimap.get(entry.getKey()));
            } else {
                sigbdd = getNegBDD(quasimap.get(entry.getKey()));
            }
            bdd = bdd.restrictWith(sigbdd);
        }

        if (bdd.isOne()) {
            retVal.add(true);
        } else if (bdd.isZero()) {
            retVal.add(false);
        } else {
            logger.error("BDD not restricted enough?!");
            return null;
        }
    }

    return retVal;
}