Example usage for java.util List listIterator

List of usage examples for java.util List listIterator

Introduction

In this page you can find the example usage for java.util List listIterator.

Prototype

ListIterator<E> listIterator();

Source Link

Document

Returns a list iterator over the elements in this list (in proper sequence).

Usage

From source file:org.apache.hadoop.hive.ql.parse.ImportSemanticAnalyzer.java

public static boolean prepareImport(boolean isLocationSet, boolean isExternalSet, boolean isPartSpecSet,
        boolean waitOnPrecursor, String parsedLocation, String parsedTableName, String parsedDbName,
        LinkedHashMap<String, String> parsedPartSpec, String fromLocn,
        EximUtil.SemanticAnalyzerWrapperContext x, Map<String, Long> dbsUpdated,
        Map<String, Long> tablesUpdated) throws IOException, MetaException, HiveException, URISyntaxException {

    // initialize load path
    URI fromURI = EximUtil.getValidatedURI(x.getConf(), stripQuotes(fromLocn));
    Path fromPath = new Path(fromURI.getScheme(), fromURI.getAuthority(), fromURI.getPath());

    FileSystem fs = FileSystem.get(fromURI, x.getConf());
    x.getInputs().add(toReadEntity(fromPath, x.getConf()));

    MetaData rv = new MetaData();
    try {/*from  w ww .  j  a va  2 s.  c  om*/
        rv = EximUtil.readMetaData(fs, new Path(fromPath, EximUtil.METADATA_NAME));
    } catch (IOException e) {
        throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
    }

    ReplicationSpec replicationSpec = rv.getReplicationSpec();
    if (replicationSpec.isNoop()) {
        // nothing to do here, silently return.
        return false;
    }

    String dbname = SessionState.get().getCurrentDatabase();
    if ((parsedDbName != null) && (!parsedDbName.isEmpty())) {
        // If the parsed statement contained a db.tablename specification, prefer that.
        dbname = parsedDbName;
    }
    if (dbsUpdated != null) {
        dbsUpdated.put(dbname, Long.valueOf(replicationSpec.get(ReplicationSpec.KEY.EVENT_ID)));
    }

    // Create table associated with the import
    // Executed if relevant, and used to contain all the other details about the table if not.
    ImportTableDesc tblDesc;
    try {
        tblDesc = getBaseCreateTableDescFromTable(dbname, rv.getTable());
    } catch (Exception e) {
        throw new HiveException(e);
    }

    if ((replicationSpec != null) && replicationSpec.isInReplicationScope()) {
        tblDesc.setReplicationSpec(replicationSpec);
    }

    if (isExternalSet) {
        tblDesc.setExternal(isExternalSet);
        // This condition-check could have been avoided, but to honour the old
        // default of not calling if it wasn't set, we retain that behaviour.
        // TODO:cleanup after verification that the outer if isn't really needed here
    }

    if (isLocationSet) {
        tblDesc.setLocation(parsedLocation);
        x.getInputs().add(toReadEntity(new Path(parsedLocation), x.getConf()));
    }

    if ((parsedTableName != null) && (!parsedTableName.isEmpty())) {
        tblDesc.setTableName(parsedTableName);
    }
    if (tablesUpdated != null) {
        tablesUpdated.put(dbname + "." + tblDesc.getTableName(),
                Long.valueOf(replicationSpec.get(ReplicationSpec.KEY.EVENT_ID)));
    }

    List<AddPartitionDesc> partitionDescs = new ArrayList<AddPartitionDesc>();
    Iterable<Partition> partitions = rv.getPartitions();
    for (Partition partition : partitions) {
        // TODO: this should ideally not create AddPartitionDesc per partition
        AddPartitionDesc partsDesc = getBaseAddPartitionDescFromPartition(fromPath, dbname, tblDesc, partition);
        partitionDescs.add(partsDesc);
    }

    if (isPartSpecSet) {
        // The import specification asked for only a particular partition to be loaded
        // We load only that, and ignore all the others.
        boolean found = false;
        for (Iterator<AddPartitionDesc> partnIter = partitionDescs.listIterator(); partnIter.hasNext();) {
            AddPartitionDesc addPartitionDesc = partnIter.next();
            if (!found && addPartitionDesc.getPartition(0).getPartSpec().equals(parsedPartSpec)) {
                found = true;
            } else {
                partnIter.remove();
            }
        }
        if (!found) {
            throw new SemanticException(
                    ErrorMsg.INVALID_PARTITION.getMsg(" - Specified partition not found in import directory"));
        }
    }

    if (tblDesc.getTableName() == null) {
        // Either we got the tablename from the IMPORT statement (first priority)
        // or from the export dump.
        throw new SemanticException(ErrorMsg.NEED_TABLE_SPECIFICATION.getMsg());
    } else {
        x.getConf().set("import.destination.table", tblDesc.getTableName());
        for (AddPartitionDesc addPartitionDesc : partitionDescs) {
            addPartitionDesc.setTableName(tblDesc.getTableName());
        }
    }

    Warehouse wh = new Warehouse(x.getConf());
    Table table = tableIfExists(tblDesc, x.getHive());
    boolean tableExists = false;

    if (table != null) {
        checkTable(table, tblDesc, replicationSpec, x.getConf());
        x.getLOG().debug("table " + tblDesc.getTableName() + " exists: metadata checked");
        tableExists = true;
    }

    if (!replicationSpec.isInReplicationScope()) {
        createRegularImportTasks(tblDesc, partitionDescs, isPartSpecSet, replicationSpec, table, fromURI, fs,
                wh, x);
    } else {
        createReplImportTasks(tblDesc, partitionDescs, isPartSpecSet, replicationSpec, waitOnPrecursor, table,
                fromURI, fs, wh, x);
    }
    return tableExists;
}

From source file:org.apache.jmeter.report.processor.ExternalSampleSorter.java

private List<Sample> merge(List<Sample> left, List<Sample> right) {
    ArrayList<Sample> out = new ArrayList<>();
    ListIterator<Sample> l = left.listIterator();
    ListIterator<Sample> r = right.listIterator();
    while (l.hasNext() || r.hasNext()) {
        if (l.hasNext() && r.hasNext()) {
            Sample firstLeft = l.next();
            Sample firstRight = r.next();
            if (!revertedSort && sampleComparator.compare(firstLeft, firstRight) < 0
                    || revertedSort && sampleComparator.compare(firstLeft, firstRight) >= 0) {
                out.add(firstLeft);//from   w w w  .jav  a  2s .  co m
                r.previous();
            } else {
                out.add(firstRight);
                l.previous();
            }
        } else if (l.hasNext()) {
            out.add(l.next());
        } else if (r.hasNext()) {
            out.add(r.next());
        }
    }
    return out;
}

From source file:com.asakusafw.runtime.directio.hadoop.HadoopDataSourceUtil.java

private static List<Path> consumeStep(LinkedList<Segment> segments) {
    assert segments != null;
    assert segments.isEmpty() == false;
    assert segments.getFirst().isTraverse() == false;
    List<Path> results = new ArrayList<>();

    Segment current = segments.removeFirst();
    for (String segment : resolve(current)) {
        results.add(new Path(segment));
    }/*  www. j  a va 2  s .  c o m*/
    while (isGlobRequired(current) && segments.isEmpty() == false
            && segments.getFirst().isTraverse() == false) {
        current = segments.removeFirst();
        Set<String> suffixCandidates = resolve(current);
        if (suffixCandidates.size() == 1) {
            String suffix = suffixCandidates.iterator().next();
            for (ListIterator<Path> i = results.listIterator(); i.hasNext();) {
                Path parent = i.next();
                i.set(new Path(parent, suffix));
            }
        } else {
            List<Path> nextResults = new ArrayList<>();
            for (Path parent : results) {
                for (String suffix : suffixCandidates) {
                    nextResults.add(new Path(parent, suffix));
                }
            }
            results = nextResults;
        }
    }

    Set<Path> saw = new HashSet<>();
    for (Iterator<Path> iter = results.iterator(); iter.hasNext();) {
        Path path = iter.next();
        if (saw.contains(path)) {
            iter.remove();
        } else {
            saw.add(path);
        }
    }
    return results;
}

From source file:oct.analysis.application.dat.OCTAnalysisManager.java

public List<LinePoint> findAbsoluteDiff(List<LinePoint> fa, List<LinePoint> fb) {
    ListIterator<LinePoint> faIter, fbIter;
    if (fa.get(0).getX() == fb.get(0).getX()) {
        faIter = fa.listIterator();
        fbIter = fb.listIterator();//w  ww  .  j av  a  2 s.  c o  m
    } else if (fa.get(0).getX() > fb.get(0).getX()) {
        faIter = fa.listIterator();
        fbIter = fb.listIterator(fa.get(0).getX() - fb.get(0).getX());
    } else {
        faIter = fa.listIterator(fb.get(0).getX() - fa.get(0).getX());
        fbIter = fb.listIterator();
    }
    LinkedList<LinePoint> retLine = new LinkedList<>();
    while (faIter.hasNext() && fbIter.hasNext()) {
        LinePoint pointA = faIter.next();
        LinePoint pointB = fbIter.next();
        retLine.add(new LinePoint(pointA.getX(), Math.abs(pointA.getY() - pointB.getY())));
    }
    return retLine;
}

From source file:net.sf.jasperreports.engine.fill.PartReportFiller.java

protected void fillDelayedEvaluatedParts(List<DelayedPrintPart> parts, byte evaluation) throws JRException {
    for (ListIterator<DelayedPrintPart> it = parts.listIterator(); it.hasNext();) {
        DelayedPrintPart part = it.next();
        it.remove();//ww  w . j  a v a2s. c  om

        fillDelayedPart(evaluation, part);
    }
}

From source file:umontreal.iro.lecuyer.charts.HistogramSeriesCollection.java

public String toLatex(double XScale, double YScale, double XShift, double YShift, double xmin, double xmax,
        double ymin, double ymax) {

    // Calcule les bornes reelles du graphique, en prenant en compte la position des axes
    xmin = Math.min(XShift, xmin);
    xmax = Math.max(XShift, xmax);
    ymin = Math.min(YShift, ymin);
    ymax = Math.max(YShift, ymax);

    CustomHistogramDataset tempSeriesCollection = (CustomHistogramDataset) seriesCollection;
    Formatter formatter = new Formatter(Locale.US);
    double var;
    double margin = ((XYBarRenderer) renderer).getMargin();

    for (int i = tempSeriesCollection.getSeriesCount() - 1; i >= 0; i--) {
        List temp = tempSeriesCollection.getBins(i);
        ListIterator iter = temp.listIterator();

        Color color = (Color) renderer.getSeriesPaint(i);
        String colorString = detectXColorClassic(color);
        if (colorString == null) {
            colorString = "color" + i;
            formatter.format("\\definecolor{%s}{rgb}{%.2f, %.2f, %.2f}%n", colorString, color.getRed() / 255.0,
                    color.getGreen() / 255.0, color.getBlue() / 255.0);
        }//from   www.  j av a  2  s  .  c  om

        HistogramBin currentBin = null;
        while (iter.hasNext()) {
            double currentMargin;
            currentBin = (HistogramBin) iter.next();
            currentMargin = ((margin * (currentBin.getEndBoundary() - currentBin.getStartBoundary()))) * XScale;
            if ((currentBin.getStartBoundary() >= xmin && currentBin.getStartBoundary() <= xmax)
                    && (currentBin.getCount() >= ymin && currentBin.getCount() <= ymax)) {
                var = Math.min(currentBin.getEndBoundary(), xmax);
                if (filled[i]) {
                    formatter.format(
                            "\\filldraw [line width=%.2fpt, opacity=%.2f, color=%s] ([xshift=%.4f] %.4f, %.4f) rectangle ([xshift=-%.4f] %.4f, %.4f); %%%n",
                            lineWidth[i], (color.getAlpha() / 255.0), colorString, currentMargin,
                            (currentBin.getStartBoundary() - XShift) * XScale, 0.0, currentMargin,
                            (var - XShift) * XScale, (currentBin.getCount() - YShift) * YScale);
                } else {
                    formatter.format(
                            "\\draw [line width=%.2fpt, color=%s] ([xshift=%.4f] %.4f, %.4f) rectangle ([xshift=-%.4f] %.4f, %.4f); %%%n",
                            lineWidth[i], colorString, currentMargin,
                            (currentBin.getStartBoundary() - XShift) * XScale, 0.0, currentMargin,
                            (var - XShift) * XScale, (currentBin.getCount() - YShift) * YScale);
                }
            } else if ((currentBin.getStartBoundary() >= xmin && currentBin.getStartBoundary() <= xmax)
                    && (currentBin.getCount() >= ymin && currentBin.getCount() > ymax)) { // Cas ou notre rectangle ne peut pas etre affiche en entier (trop haut)
                var = Math.min(currentBin.getEndBoundary(), xmax);
                if (filled[i]) {
                    formatter.format(
                            "\\filldraw [line width=%.2fpt,  opacity=%.2f, color=%s] ([xshift=%.4f] %.4f, %.4f) rectangle ([xshift=-%.4f] %.4f, %.4f); %%%n",
                            lineWidth[i], (color.getAlpha() / 255.0), colorString, currentMargin,
                            (currentBin.getStartBoundary() - XShift) * XScale, 0.0, currentMargin,
                            (var - XShift) * XScale, (ymax - YShift) * YScale);
                    formatter.format(
                            "\\draw [line width=%.2fpt, color=%s, style=dotted] ([xshift=%.4f] %.4f, %.4f) rectangle ([yshift=3mm, xshift=-%.4f] %.4f, %.4f); %%%n",
                            lineWidth[i], colorString, currentMargin,
                            (currentBin.getStartBoundary() - XShift) * XScale, (ymax - YShift) * YScale,
                            currentMargin, (var - XShift) * XScale, (ymax - YShift) * YScale);
                } else {
                    formatter.format(
                            "\\draw [line width=%.2fpt, color=%s] ([xshift=%.4f] %.4f, %.4f) rectangle ([xshift=-%.4f] %.4f, %.4f); %%%n",
                            lineWidth[i], colorString, currentMargin,
                            (currentBin.getStartBoundary() - XShift) * XScale, 0.0, currentMargin,
                            (var - XShift) * XScale, (ymax - YShift) * YScale);

                    formatter.format(
                            "\\draw [line width=%.2fpt, color=%s, style=dotted] ([xshift=%.4f] %.4f, %.4f) rectangle ([yshift=3mm, xshift=-%.4f] %.4f, %.4f); %%%n",
                            lineWidth[i], colorString, currentMargin,
                            (currentBin.getStartBoundary() - XShift) * XScale, (ymax - YShift) * YScale,
                            currentMargin, (var - XShift) * XScale, (ymax - YShift) * YScale);
                }
            }
        }
    }
    return formatter.toString();
}

From source file:de.hofuniversity.iisys.neo4j.websock.util.JSONList.java

@Override
public ListIterator<Object> listIterator() {
    final List<Object> itList = new LinkedList<Object>();
    final int size = fArray.length();

    for (int i = 0; i < size; ++i) {
        try {//from ww  w. j  a v  a2s .  c  om
            itList.add(fromInternal(fArray.get(i)));
        } catch (JSONException e) {
            e.printStackTrace();
        }
    }

    return itList.listIterator();
}

From source file:com.caricah.iotracah.datastore.ignitecache.internal.impl.SubscriptionFilterHandler.java

public Observable<IotSubscriptionFilter> getTopicFilterTree(String partition,
        List<String> topicFilterTreeRoute) {

    return Observable.create(observer -> {

        List<Long> collectingParentIdList = new ArrayList<>();

        collectingParentIdList.add(0l);//from  w  w  w.j  a  v a2s .c  o m

        List<String> growingTitles = new ArrayList<>();

        ListIterator<String> pathIterator = topicFilterTreeRoute.listIterator();

        try {

            while (pathIterator.hasNext()) {

                String topicPart = pathIterator.next();

                log.debug(" getTopicFilterTree : current path in tree is : {}", topicPart);

                growingTitles.add(topicPart);

                List<Long> parentIdList = new ArrayList<>(collectingParentIdList);
                collectingParentIdList.clear();

                for (Long parentId : parentIdList) {

                    log.debug(" getTopicFilterTree : Dealing with parent id : {} and titles is {}", parentId,
                            growingTitles);

                    if (Constant.MULTI_LEVEL_WILDCARD.equals(topicPart)) {

                        getMultiLevelWildCard(observer, partition, parentId);
                    } else if (Constant.SINGLE_LEVEL_WILDCARD.equals(topicPart)) {

                        String query = "partitionId = ? AND parentId = ? ";
                        Object[] params = { partition, parentId };

                        getByQuery(IotSubscriptionFilter.class, query, params).toBlocking()
                                .forEach(subscriptionFilter -> {

                                    log.debug(" getTopicFilterTree : Found matching single level filter : {}",
                                            subscriptionFilter);

                                    if (pathIterator.hasNext()) {
                                        collectingParentIdList.add(subscriptionFilter.getId());

                                    } else {
                                        observer.onNext(subscriptionFilter);
                                    }

                                });

                    } else {

                        String query = "partitionId = ? AND parentId = ? AND name = ? ";

                        String joinedTopicName = String.join(Constant.PATH_SEPARATOR, growingTitles);

                        Object[] params = new Object[] { partition, parentId, joinedTopicName };

                        getByQuery(IotSubscriptionFilter.class, query, params).toBlocking()
                                .forEach(subscriptionFilter -> {

                                    log.debug(" getTopicFilterTree : Found matching point filter : {}",
                                            subscriptionFilter);

                                    if (pathIterator.hasNext()) {
                                        collectingParentIdList.add(subscriptionFilter.getId());
                                    } else {
                                        observer.onNext(subscriptionFilter);
                                    }

                                });

                    }
                }

            }

            observer.onCompleted();

        } catch (Exception e) {
            observer.onError(e);
        }

    });

}

From source file:com.voxeo.moho.remote.impl.CallImpl.java

@Override
public ListIterator<String> getHeaders(String name) {
    List<String> list = new ArrayList<String>();
    String value = _headers.get(name);
    list.add(value);//  ww w.java2 s  .  com
    return list.listIterator();
}

From source file:org.apache.solr.handler.component.HttpShardHandler.java

/**
 * A distributed request is made via {@link LBHttpSolrClient} to the first live server in the URL list.
 * This means it is just as likely to choose current host as any of the other hosts.
 * This function makes sure that the cores of current host are always put first in the URL list.
 * If all nodes prefer local-cores then a bad/heavily-loaded node will receive less requests from healthy nodes.
 * This will help prevent a distributed deadlock or timeouts in all the healthy nodes due to one bad node.
 *///  w ww  .  j  a  va2 s .co  m
private void preferCurrentHostForDistributedReq(final String currentHostAddress, final List<String> urls) {
    if (log.isDebugEnabled())
        log.debug("Trying to prefer local shard on {} among the urls: {}", currentHostAddress,
                Arrays.toString(urls.toArray()));

    ListIterator<String> itr = urls.listIterator();
    while (itr.hasNext()) {
        String url = itr.next();
        if (url.startsWith(currentHostAddress)) {
            // move current URL to the fore-front
            itr.remove();
            urls.add(0, url);

            if (log.isDebugEnabled())
                log.debug("Applied local shard preference for urls: {}", Arrays.toString(urls.toArray()));

            break;
        }
    }
}