Example usage for java.util ListIterator remove

List of usage examples for java.util ListIterator remove

Introduction

In this page you can find the example usage for java.util ListIterator remove.

Prototype

void remove();

Source Link

Document

Removes from the list the last element that was returned by #next or #previous (optional operation).

Usage

From source file:org.netflux.core.task.compose.SplitTask.java

@Override
protected RecordMetadata computeMetadata(String outputPortName, InputPort changedInputPort,
        RecordMetadata newMetadata) {/* w  ww  .jav  a  2 s  . c  o  m*/
    int insertionPoint = Integer.MAX_VALUE;
    RecordMetadata inputMetadata = this.inputPorts.get("input").getMetadata();

    if (inputMetadata != null && inputMetadata.getFieldCount() > 0) {
        List<FieldMetadata> fieldMetadata = new LinkedList<FieldMetadata>(inputMetadata.getFieldMetadata());
        ListIterator<FieldMetadata> fieldMetadataIterator = fieldMetadata.listIterator();
        while (fieldMetadataIterator.hasNext()) {
            FieldMetadata currentFieldMetadata = fieldMetadataIterator.next();
            if (this.fieldNamesToSplitAsSet.contains(currentFieldMetadata.getName())) {
                insertionPoint = Math.min(insertionPoint, fieldMetadataIterator.previousIndex());
                fieldMetadataIterator.remove();
            }
        }

        List<FieldMetadata> splittedMetadata = new LinkedList<FieldMetadata>();
        Iterator<String> splittedFieldNameIterator = this.splittedFieldNames.iterator();
        for (String fieldName : this.fieldNamesToSplit.get(0)) {
            // FIXME: Do this properly: getFieldMetadata should throw an Exception
            if (inputMetadata.getFieldIndex(fieldName) != -1) {
                FieldMetadata currentSplittedMetadata = inputMetadata.getFieldMetadata(fieldName).clone();
                currentSplittedMetadata.setName(splittedFieldNameIterator.next());
                splittedMetadata.add(currentSplittedMetadata);
            } else {
                splittedMetadata.clear();
                insertionPoint = fieldMetadata.size();
                break;
            }
        }

        fieldMetadata.addAll(insertionPoint, splittedMetadata);

        return new RecordMetadata(fieldMetadata);
    } else {
        List<FieldMetadata> emptyMetadata = Collections.emptyList();
        return new RecordMetadata(emptyMetadata);
    }
}

From source file:org.fao.geonet.kernel.security.ldap.LdapUserDetailsManager.java

public void updateUser(UserDetails user) {
    DistinguishedName dn = usernameMapper.buildDn(user.getUsername());

    logger.debug("Updating user '" + user.getUsername() + "' with DN '" + dn + "'");

    List<GrantedAuthority> authorities = getUserAuthorities(dn, user.getUsername());

    DirContextAdapter ctx = loadUserAsContext(dn, user.getUsername());
    ctx.setUpdateMode(true);/*from   w  ww  .j  av  a  2  s.  c  o m*/
    copyToContext(user, ctx);

    // Remove the objectclass attribute from the list of mods (if present).
    List<ModificationItem> mods = new LinkedList<ModificationItem>(Arrays.asList(ctx.getModificationItems()));
    ListIterator<ModificationItem> modIt = mods.listIterator();

    while (modIt.hasNext()) {
        ModificationItem mod = (ModificationItem) modIt.next();
        Attribute a = mod.getAttribute();
        if ("objectclass".equalsIgnoreCase(a.getID())) {
            modIt.remove();
        }
    }

    template.modifyAttributes(dn, mods.toArray(new ModificationItem[mods.size()]));

    // template.rebind(dn, ctx, null);
    // Remove the old authorities and replace them with the new one
    removeAuthorities(dn, authorities);
    addAuthorities(dn, user.getAuthorities());
}

From source file:org.squashtest.tm.service.internal.repository.hibernate.HibernateIterationDao.java

@Override
public void removeFromCampaign(Iteration iteration) {

    Campaign campaign = findCampaignByIterationId(iteration.getId());

    if (campaign == null) {
        return;//  www . j  a va  2 s .co  m
    }

    ListIterator<Iteration> iterator = campaign.getIterations().listIterator();
    while (iterator.hasNext()) {
        Iteration ts = iterator.next();
        if (ts.getId().equals(iteration.getId())) {
            iterator.remove();
            break;
        }
    }

}

From source file:org.evosuite.ga.metaheuristics.SPEA2.java

/**
 * /*from  w ww .  ja v a2 s .  c  o  m*/
 * @param population
 * @return
 */
protected List<T> environmentalSelection(List<T> union) {

    List<T> populationCopy = new ArrayList<T>(union.size());
    populationCopy.addAll(union);

    // First step is to copy all nondominated individuals, i.e., those
    // which have a fitness lower than one, from archive and population
    // to the archive of the next generation
    List<T> tmpPopulation = new ArrayList<T>(populationCopy.size());
    Iterator<T> it = populationCopy.iterator();
    while (it.hasNext()) {
        T individual = it.next();
        if (individual.getDistance() < 1.0) {
            tmpPopulation.add(individual);
            it.remove();
        }
    }

    // If the nondominated front fits exactly into the archive, the environmental
    // selection step is completed
    if (tmpPopulation.size() == Properties.POPULATION) {
        return tmpPopulation;
    }
    // If archive is too small, the best dominated individuals in the previous
    // archive and population are copied to the new archive
    else if (tmpPopulation.size() < Properties.POPULATION) {
        Collections.sort(populationCopy, new StrengthFitnessComparator());
        int remain = (union.size() < Properties.POPULATION ? union.size() : Properties.POPULATION)
                - tmpPopulation.size();
        for (int i = 0; i < remain; i++) {
            tmpPopulation.add(populationCopy.get(i));
        }

        return tmpPopulation;
    }

    // when the size of the current nondominated (multi)set exceeds the archive size,
    // an archive truncation procedure is invoked which iteratively removes individuals
    // from the new front until if fits exactly into the archive. the individual which
    // has the minimum distance to another individual is chosen at each stage; if there
    // are several individuals with minimum distance the tie is broken by considering the
    // second smallest distances and so forth.

    double[][] distance = this.euclideanDistanceMatrix(tmpPopulation);

    List<List<Pair<Integer, Double>>> distanceList = new LinkedList<List<Pair<Integer, Double>>>();
    for (int i = 0; i < tmpPopulation.size(); i++) {
        List<Pair<Integer, Double>> distanceNodeList = new LinkedList<Pair<Integer, Double>>();

        for (int j = 0; j < tmpPopulation.size(); j++) {
            if (i != j) {
                distanceNodeList.add(Pair.of(j, distance[i][j]));
            }
        }

        // sort by distance so that later we can just get the first element, i.e.,
        // the one with the smallest distance
        Collections.sort(distanceNodeList, new Comparator<Pair<Integer, Double>>() {
            @Override
            public int compare(Pair<Integer, Double> pair1, Pair<Integer, Double> pair2) {
                if (pair1.getRight() < pair2.getRight()) {
                    return -1;
                } else if (pair1.getRight() > pair2.getRight()) {
                    return 1;
                } else {
                    return 0;
                }
            }
        });

        distanceList.add(distanceNodeList);
    }

    while (tmpPopulation.size() > Properties.POPULATION) {
        double minDistance = Double.POSITIVE_INFINITY;
        int minimumIndex = -1;

        for (int i = 0; i < distanceList.size(); i++) {
            List<Pair<Integer, Double>> distances = distanceList.get(i);
            Pair<Integer, Double> point = distances.get(0);

            // as this list is sorted, we just need to get the first element of it.
            if (point.getRight() < minDistance) {
                minDistance = point.getRight();
                minimumIndex = i;
            } else if (point.getRight() == minDistance) {
                // as there is a tie, the th smallest distances as to to be searched for

                // find the k-th smallest distance that is not equal to the one just
                // selected. i.e., go through all distances and skip the ones that
                // are equal.
                for (int k = 0; k < distances.size(); k++) {
                    double kdist1 = distances.get(k).getRight();
                    double kdist2 = distanceList.get(minimumIndex).get(k).getRight();

                    if (kdist1 == kdist2) {
                        continue;
                    } else if (kdist1 < kdist2) {
                        minimumIndex = i;
                    }

                    break;
                }
            }
        }

        assert minimumIndex != -1;

        // remove the solution with the smallest distance
        tmpPopulation.remove(minimumIndex);
        distanceList.remove(minimumIndex);

        // remove from the neighbours' list of neighbours, the one we just removed
        for (List<Pair<Integer, Double>> distances : distanceList) {
            ListIterator<Pair<Integer, Double>> iterator = distances.listIterator();
            while (iterator.hasNext()) {
                if (iterator.next().getLeft() == minimumIndex) {
                    iterator.remove();
                    // TODO can we break the loop? is there any chance that 'distances'
                    // has repeated elements?!
                }
            }
        }
    }

    return tmpPopulation;
}

From source file:org.broadleafcommerce.core.order.dao.OrderDaoImpl.java

@Override
@SuppressWarnings("unchecked")
public Order readNamedOrderForCustomer(final Customer customer, final String name) {
    final Query query = em.createNamedQuery("BC_READ_NAMED_ORDER_FOR_CUSTOMER");
    query.setParameter("customerId", customer.getId());
    query.setParameter("orderStatus", OrderStatus.NAMED.getType());
    query.setParameter("orderName", name);
    List<Order> orders = query.getResultList();

    // Filter out orders that don't match the current locale (if one is set)
    if (BroadleafRequestContext.getBroadleafRequestContext() != null) {
        ListIterator<Order> iter = orders.listIterator();
        while (iter.hasNext()) {
            Locale locale = BroadleafRequestContext.getBroadleafRequestContext().getLocale();
            Order order = iter.next();// w  w  w . j  a  v  a2  s  . c  om
            if (locale != null && !locale.equals(order.getLocale())) {
                iter.remove();
            }
        }
    }

    // Apply any additional filters that extension modules have registered
    if (orders != null && !orders.isEmpty() && extensionManager != null) {
        extensionManager.getProxy().applyAdditionalOrderLookupFilter(customer, name, orders);
    }

    return orders == null || orders.isEmpty() ? null : orders.get(0);
}

From source file:org.apache.solr.handler.component.HttpShardHandler.java

/**
 * A distributed request is made via {@link LBHttpSolrClient} to the first live server in the URL list.
 * This means it is just as likely to choose current host as any of the other hosts.
 * This function makes sure that the cores of current host are always put first in the URL list.
 * If all nodes prefer local-cores then a bad/heavily-loaded node will receive less requests from healthy nodes.
 * This will help prevent a distributed deadlock or timeouts in all the healthy nodes due to one bad node.
 *///  w  w w.j  av  a 2s. c  o  m
private void preferCurrentHostForDistributedReq(final String currentHostAddress, final List<String> urls) {
    if (log.isDebugEnabled())
        log.debug("Trying to prefer local shard on {} among the urls: {}", currentHostAddress,
                Arrays.toString(urls.toArray()));

    ListIterator<String> itr = urls.listIterator();
    while (itr.hasNext()) {
        String url = itr.next();
        if (url.startsWith(currentHostAddress)) {
            // move current URL to the fore-front
            itr.remove();
            urls.add(0, url);

            if (log.isDebugEnabled())
                log.debug("Applied local shard preference for urls: {}", Arrays.toString(urls.toArray()));

            break;
        }
    }
}

From source file:com.xpn.xwiki.plugin.tag.TagPlugin.java

/**
 * Remove a tag from a document. The document is saved (minor edit) after this operation.
 * //from w w w.j  a v  a2s.  c om
 * @param tag tag to remove.
 * @param document the document.
 * @param context XWiki context.
 * @return the {@link TagOperationResult result} of the operation
 * @throws XWikiException if document save fails for some reason (Insufficient rights, DB access, etc).
 */
public TagOperationResult removeTagFromDocument(String tag, XWikiDocument document, XWikiContext context)
        throws XWikiException {
    List<String> tags = getTagsFromDocument(document);
    boolean needsUpdate = false;

    ListIterator<String> it = tags.listIterator();
    while (it.hasNext()) {
        if (tag.equalsIgnoreCase(it.next())) {
            needsUpdate = true;
            it.remove();
        }
    }

    if (needsUpdate) {
        setDocumentTags(document, tags, context);
        List<String> commentArgs = new ArrayList<String>();
        commentArgs.add(tag);
        String comment = context.getMessageTool().get("plugin.tag.editcomment.removed", commentArgs);

        // Since we're changing the document we need to set the new author
        document.setAuthorReference(context.getUserReference());

        context.getWiki().saveDocument(document, comment, true, context);

        return TagOperationResult.OK;
    } else {
        // Document doesn't contain this tag.
        return TagOperationResult.NO_EFFECT;
    }
}

From source file:au.com.dektech.dektalk.MainActivity.java

private boolean havePermissions(ArrayList<String> permissions) {
    boolean allgranted = true;
    ListIterator<String> it = permissions.listIterator();
    while (it.hasNext()) {
        if (ActivityCompat.checkSelfPermission(this, it.next()) != PackageManager.PERMISSION_GRANTED) {
            allgranted = false;/*from   w ww. j  a v  a 2s  .co  m*/
        } else {
            // permission granted, remove it from permissions
            it.remove();
        }
    }
    return allgranted;
}

From source file:com.ivli.roim.controls.ChartControl.java

void removeMarker(DomainMarker aM) {
    ListIterator<Interpolation> it = iInterpolations.listIterator();

    while (it.hasNext()) {
        Interpolation i = it.next();//from www.  j  a va 2 s . c  o  m
        if (i.iLhs == aM || i.iRhs == aM) {
            it.remove();
            i.close();
        }
    }

    getChart().getXYPlot().removeRangeMarker(aM.getLinkedMarker(), Layer.FOREGROUND);
    getChart().getXYPlot().removeDomainMarker(aM, Layer.FOREGROUND);
}

From source file:org.netflux.core.task.compose.CombineTask.java

@Override
protected RecordMetadata computeMetadata(String outputPortName, InputPort changedInputPort,
        RecordMetadata newMetadata) {//from ww  w . j a  v  a2 s  .  co m
    int insertionPoint = Integer.MAX_VALUE;
    RecordMetadata inputMetadata = this.inputPorts.get("input").getMetadata();

    if (inputMetadata != null && inputMetadata.getFieldCount() > 0) {
        List<FieldMetadata> fieldMetadata = new LinkedList<FieldMetadata>(inputMetadata.getFieldMetadata());
        ListIterator<FieldMetadata> fieldMetadataIterator = fieldMetadata.listIterator();
        while (fieldMetadataIterator.hasNext()) {
            FieldMetadata currentFieldMetadata = fieldMetadataIterator.next();
            if (this.fieldNamesToCombineAsSet.contains(currentFieldMetadata.getName())) {
                insertionPoint = Math.min(insertionPoint, fieldMetadataIterator.previousIndex());
                fieldMetadataIterator.remove();
            }
        }

        this.groupingKeyFieldNames = new ArrayList<String>(new RecordMetadata(fieldMetadata).getFieldNames());

        List<FieldMetadata> combinedMetadata = new LinkedList<FieldMetadata>();
        for (List<String> groupOfCombinedFieldNames : this.getCombinedFieldNames()) {
            Iterator<String> fieldNameToCombineIterator = this.getFieldNamesToCombine().iterator();
            for (String combinedFieldName : groupOfCombinedFieldNames) {
                // FIXME: Do this properly: getFieldMetadata should throw an Exception
                String fieldNameToCombine = fieldNameToCombineIterator.next();
                FieldMetadata currentFieldMetadata = inputMetadata.getFieldMetadata(fieldNameToCombine);
                if (currentFieldMetadata != null) {
                    FieldMetadata currentCombinedMetadata = currentFieldMetadata.clone();
                    currentCombinedMetadata.setName(combinedFieldName);
                    combinedMetadata.add(currentCombinedMetadata);
                } else {
                    combinedMetadata.clear();
                    insertionPoint = fieldMetadata.size();
                    break;
                }
            }
        }

        fieldMetadata.addAll(insertionPoint, combinedMetadata);

        return new RecordMetadata(fieldMetadata);
    } else {
        this.groupingKeyFieldNames = Collections.emptyList();
        List<FieldMetadata> emptyMetadata = Collections.emptyList();
        return new RecordMetadata(emptyMetadata);
    }
}