Example usage for java.util Queue isEmpty

List of usage examples for java.util Queue isEmpty

Introduction

In this page you can find the example usage for java.util Queue isEmpty.

Prototype

boolean isEmpty();

Source Link

Document

Returns true if this collection contains no elements.

Usage

From source file:org.kuali.rice.krad.uif.lifecycle.ViewLifecyclePhaseBase.java

/**
 * {@inheritDoc}/*from  ww  w. j  a  v  a  2s . co m*/
 */
@Override
public String toString() {
    StringBuilder sb = new StringBuilder();
    Queue<ViewLifecyclePhase> toPrint = new LinkedList<ViewLifecyclePhase>();
    toPrint.offer(this);
    while (!toPrint.isEmpty()) {
        ViewLifecyclePhase tp = toPrint.poll();

        if (tp.getElement() == null) {
            sb.append("\n      ");
            sb.append(tp.getClass().getSimpleName());
            sb.append(" (recycled)");
            continue;
        }

        String indent;
        if (tp == this) {
            sb.append("\nProcessed? ");
            sb.append(processed);
            indent = "\n";
        } else {
            indent = "\n    ";
        }
        sb.append(indent);

        sb.append(tp.getClass().getSimpleName());
        sb.append(" ");
        sb.append(System.identityHashCode(tp));
        sb.append(" ");
        sb.append(tp.getViewPath());
        sb.append(" ");
        sb.append(tp.getElement().getClass().getSimpleName());
        sb.append(" ");
        sb.append(tp.getElement().getId());
        sb.append(" ");
        sb.append(pendingSuccessors);

        if (tp == this) {
            sb.append("\nPredecessor Phases:");
        }

        ViewLifecyclePhase tpredecessor = tp.getPredecessor();
        if (tpredecessor != null) {
            toPrint.add(tpredecessor);
        }
    }
    return sb.toString();
}

From source file:sadl.models.pdrta.PDRTA.java

@Override
public int getTransitionCount() {
    int result = 0;
    final Queue<PDRTAState> q = new ArrayDeque<>();
    final Set<PDRTAState> found = new HashSet<>();
    q.add(root);/*  w w w.  j  a va 2s  . co  m*/
    found.add(root);
    while (!q.isEmpty()) {
        final PDRTAState s = q.remove();
        for (int i = 0; i < input.getAlphSize(); i++) {
            final Set<Entry<Integer, Interval>> ins = s.getIntervals(i).entrySet();
            for (final Entry<Integer, Interval> eIn : ins) {
                final Interval in = eIn.getValue();
                final PDRTAState t = in.getTarget();
                if (t != null) {
                    result++;
                }
            }
        }
    }
    return result;
}

From source file:de.csw.lucene.ConceptFilter.java

/**
 * advances to the next token in the stream.
 * Takes into account that terms from the ontology might be constructed
 * out of several consecutive tokens./* w  ww .  j  a  v  a 2 s  .  c  o  m*/
 * @return false at EOS
 */
@Override
public boolean incrementToken() throws IOException {

    boolean hasMoreToken = innerNextToken();
    if (!hasMoreToken) {
        return false;
    }

    Queue<AttributeSource.State> lookAhead = new LinkedList<AttributeSource.State>();
    List<String> terms = new ArrayList<String>();
    terms.add(String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));

    while (index.isPrefix(terms) && hasMoreToken) {
        lookAhead.add(captureState());
        hasMoreToken = innerNextToken();
        terms.add(String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));
    }

    // if we have a match ...
    if (index.hasExactMatches(StringUtils.join(terms.toArray(), OntologyIndex.PREFIX_SEPARATOR))) {

        // ..then we consume all elements in the look ahead, if present
        if (!lookAhead.isEmpty()) {
            int maxEndOffset = offsetAttribute.endOffset();
            restoreState(lookAhead.poll());
            terms.remove(0); // already present in current token
            for (String term : terms) {
                charTermAttribute.append(OntologyIndex.PREFIX_SEPARATOR);
                charTermAttribute.append(term);
            }

            offsetAttribute.setOffset(offsetAttribute.startOffset(), maxEndOffset);
        }
        typeAttribute.setType(CONCEPT_TYPE);
        if (log.isTraceEnabled()) {
            log.trace("Concept token recognized: "
                    + String.copyValueOf(charTermAttribute.buffer(), 0, charTermAttribute.length()));
        }

    } else {

        // .. else we push back in the queue the tokens already read
        if (!lookAhead.isEmpty()) {
            lookAhead.add(captureState());
            restoreState(lookAhead.poll());
            for (AttributeSource.State laterToken : lookAhead) {
                queue.add(laterToken);
            }
        }
    }

    return hasMoreToken;
}

From source file:eu.stratosphere.runtime.io.channels.InputChannel.java

@Override
public void destroy() {
    final Queue<Buffer> buffersToRecycle = new ArrayDeque<Buffer>();

    synchronized (this.queuedEnvelopes) {
        this.destroyCalled = true;

        while (!this.queuedEnvelopes.isEmpty()) {
            final Envelope envelope = this.queuedEnvelopes.poll();
            if (envelope.getBuffer() != null) {
                buffersToRecycle.add(envelope.getBuffer());
            }//  w ww .j av a  2  s  .  c  o m
        }
    }

    while (!buffersToRecycle.isEmpty()) {
        buffersToRecycle.poll().recycleBuffer();
    }
}

From source file:org.onebusaway.uk.network_rail.gtfs_realtime.graph.PositionBerthToStanoxGraphMain.java

private void interpolateBerthLocations() {
    int index = 0;
    for (RawBerthNode rootNode : _berthNodesToLocations.keySet()) {
        if (index % 100 == 0) {
            _log.info("node=" + index + "/" + _berthNodesToLocations.keySet().size());
        }//  ww w .  java 2s  .com
        index++;
        Location fromLocation = _berthNodesToLocations.get(rootNode);
        Queue<OrderedRawBerthNode> queue = new PriorityQueue<OrderedRawBerthNode>();
        queue.add(new OrderedRawBerthNode(rootNode, null, 0.0));

        Map<RawBerthNode, RawBerthNode> parents = new HashMap<RawBerthNode, RawBerthNode>();
        Set<RawBerthNode> visited = new HashSet<RawBerthNode>();

        while (!queue.isEmpty()) {
            OrderedRawBerthNode currentNode = queue.poll();
            RawBerthNode node = currentNode.getNode();
            if (!visited.add(node)) {
                continue;
            }

            parents.put(node, currentNode.getParent());
            Location toLocation = _berthNodesToLocations.get(node);
            if (currentNode.getParent() != null && toLocation != null) {

                List<RawBerthNode> path = new ArrayList<RawBerthNode>();
                RawBerthNode last = node;
                while (last != null) {
                    path.add(last);
                    last = parents.get(last);
                }

                if (path.size() <= 2) {
                    break;
                }
                Collections.reverse(path);
                BerthPath berthPath = new BerthPath(path, currentNode.getDistance());
                double d = fromLocation.getDistance(toLocation);
                if (d > 30000) {
                    continue;
                }
                RailwayPath railwayPath = _railwayShapeService.getPath(fromLocation.getPoint(),
                        toLocation.getPoint());
                if (railwayPath != null) {
                    snapBerthsToRailwayPath(berthPath, railwayPath);
                }
                break;
            } else {
                for (Map.Entry<RawBerthNode, List<Integer>> entry : node.getOutgoing().entrySet()) {
                    RawBerthNode outgoing = entry.getKey();
                    int avgDuration = RawNode.average(entry.getValue());
                    queue.add(new OrderedRawBerthNode(outgoing, node, currentNode.getDistance() + avgDuration));
                }
            }
        }
    }
}

From source file:com.norconex.committer.core.AbstractFileQueueCommitter.java

@Override
public void commit() {

    // Get all files to be committed, relying on natural ordering which 
    // will be in file creation order.
    final Queue<File> filesPending = new ConcurrentLinkedQueue<File>();
    FileUtil.visitAllFiles(new File(queue.getDirectory()), new IFileVisitor() {
        @Override//from   w  w  w.  java2 s.  c om
        public void visit(File file) {
            filesPending.add(file);
        }
    }, REF_FILTER);

    // Nothing left to commit. This happens if multiple threads are 
    // committing at the same time and no more files are available for the 
    // current thread to commit. This should happen rarely in practice.
    if (filesPending.isEmpty()) {
        return;
    }

    // Don't commit more than queue size
    List<ICommitOperation> filesToCommit = new ArrayList<>();
    while (filesToCommit.size() < queueSize) {

        File file = filesPending.poll();

        // If no more files available in both list, quit loop. This happens 
        // if multiple threads tries to commit at once and there is less 
        // than queueSize files to commit. This should happen rarely in
        // practice.
        if (file == null) {
            break;
        }

        // Current thread tries to own this file. If the file is already own
        // by another thread, continue and attempt to grab another file.
        if (filesCommitting.putIfAbsent(file, Thread.currentThread()) != null) {
            continue;
        }

        // A file might have already been committed and cleanup from 
        // the map, but still returned by the directory listing. Ignore 
        // those. It is important to make this check AFTER the current  
        // thread got ownership of the file. 
        if (!file.exists()) {
            continue;
        }

        // Current thread will be committing this file
        if (file.getAbsolutePath().contains(FileSystemCommitter.FILE_SUFFIX_ADD)) {
            filesToCommit.add(new FileAddOperation(file));
        } else if (file.getAbsolutePath().contains(FileSystemCommitter.FILE_SUFFIX_REMOVE)) {
            filesToCommit.add(new FileDeleteOperation(file));
        } else {
            LOG.error("Unsupported file to commit: " + file);
        }
    }

    if (LOG.isInfoEnabled()) {
        LOG.info(String.format("Committing %s files", filesToCommit.size()));
    }
    for (ICommitOperation op : filesToCommit) {
        try {
            if (op instanceof FileAddOperation) {
                prepareCommitAddition((IAddOperation) op);
                commitAddition((IAddOperation) op);
            } else {
                prepareCommitDeletion((IDeleteOperation) op);
                commitDeletion((IDeleteOperation) op);
            }
        } catch (IOException e) {
            throw new CommitterException("Cannot read reference from : " + op, e);
        }
    }

    commitComplete();

    deleteEmptyOldDirs(new File(queue.getDirectory()));

    // Cleanup committed files from map that might have been deleted
    Enumeration<File> en = filesCommitting.keys();
    while (en.hasMoreElements()) {
        File file = (File) en.nextElement();
        if (!file.exists()) {
            filesCommitting.remove(file);
        }
    }
}

From source file:ubic.basecode.dataStructure.graph.DirectedGraph.java

/**
 * Fills in the topoSortOrder for each node.
 *//*from   ww w  .j a  v  a  2 s  .  c  o m*/
public void topoSort() {
    Queue<DirectedGraphNode<K, V>> q = new LinkedList<DirectedGraphNode<K, V>>();
    int counter = 0;

    Map<DirectedGraphNode<K, V>, Integer> degrees = new HashMap<DirectedGraphNode<K, V>, Integer>();

    /* Get the degrees of all items, and enqueue zero-indegree nodes */
    for (K element : this.items.keySet()) {
        DirectedGraphNode<K, V> v = items.get(element);
        degrees.put(v, new Integer(v.inDegree()));
        if (degrees.get(v).intValue() == 0) {
            q.offer(v);
        }
    }

    while (!q.isEmpty()) {
        DirectedGraphNode<K, V> v = q.remove();
        v.setTopoSortOrder(++counter);
        for (DirectedGraphNode<K, V> w : v.getChildNodes()) {
            /* decrement the degree of this node */
            int inDegree = degrees.get(w).intValue();
            inDegree--;
            degrees.put(w, new Integer(inDegree));

            /* see if this now is one of the zero-indegree nodes */
            if (inDegree == 0) {
                q.offer(w);
            }
        }
    }

    if (counter != items.size()) {
        throw new IllegalStateException(
                "Graph contains a cycle; " + counter + " items found, " + items.size() + " expected");
    }

}

From source file:com.github.rinde.rinsim.core.model.road.PlaneRoadModel.java

@Override
protected MoveProgress doFollowPath(MovingRoadUser object, Queue<Point> path, TimeLapse time) {
    final long startTimeConsumed = time.getTimeConsumed();
    Point loc = objLocs.get(object);

    double traveled = 0;
    final double speed = min(unitConversion.toInSpeed(object.getSpeed()), maxSpeed);
    if (speed == 0d) {
        // FIXME add test for this case, also check GraphRoadModel
        final Measure<Double, Length> dist = Measure.valueOf(0d, getDistanceUnit());
        final Measure<Long, Duration> dur = Measure.valueOf(0L, time.getTimeUnit());
        return MoveProgress.create(dist, dur, new ArrayList<Point>());
    }//from  w  w  w.  j  a  v  a  2  s. c  o m

    final List<Point> travelledNodes = new ArrayList<>();
    while (time.hasTimeLeft() && !path.isEmpty()) {
        checkArgument(isPointInBoundary(path.peek()),
                "points in the path must be within the predefined boundary of the " + "plane");

        // distance in internal time unit that can be traveled with timeleft
        final double travelDistance = speed * unitConversion.toInTime(time.getTimeLeft(), time.getTimeUnit());
        final double stepLength = unitConversion.toInDist(Point.distance(loc, path.peek()));

        if (travelDistance >= stepLength) {
            loc = path.remove();
            travelledNodes.add(loc);

            final long timeSpent = DoubleMath.roundToLong(
                    unitConversion.toExTime(stepLength / speed, time.getTimeUnit()), RoundingMode.HALF_DOWN);
            time.consume(timeSpent);
            traveled += stepLength;
        } else {
            final Point diff = Point.diff(path.peek(), loc);

            if (stepLength - travelDistance < DELTA) {
                loc = path.peek();
                traveled += stepLength;
            } else {
                final double perc = travelDistance / stepLength;
                loc = new Point(loc.x + perc * diff.x, loc.y + perc * diff.y);
                traveled += travelDistance;
            }
            time.consumeAll();

        }
    }
    objLocs.put(object, loc);

    // convert to external units
    final Measure<Double, Length> distTraveled = unitConversion.toExDistMeasure(traveled);
    final Measure<Long, Duration> timeConsumed = Measure.valueOf(time.getTimeConsumed() - startTimeConsumed,
            time.getTimeUnit());
    return MoveProgress.create(distTraveled, timeConsumed, travelledNodes);
}

From source file:com.thoughtworks.go.server.service.dd.reporting.ReportingDependencyFanInNode.java

private Pair<StageIdentifier, List<ReportingFaninScmMaterial>> getRevisionNthFor(int n,
        ReportingFanInGraphContext context) {
    List<ReportingFaninScmMaterial> scmMaterials = new ArrayList<>();
    PipelineTimeline pipelineTimeline = context.pipelineTimeline;
    Queue<PipelineTimelineEntry.Revision> revisionQueue = new ConcurrentLinkedQueue<>();
    DependencyMaterialConfig dependencyMaterial = (DependencyMaterialConfig) materialConfig;
    PipelineTimelineEntry entry = pipelineTimeline.instanceFor(dependencyMaterial.getPipelineName(),
            totalInstanceCount - n);/*ww  w  .  j  a  v  a 2  s.c  om*/

    StageIdentifier dependentStageIdentifier = dependentStageIdentifier(context, entry,
            CaseInsensitiveString.str(dependencyMaterial.getStageName()));
    if (!StageIdentifier.NULL.equals(dependentStageIdentifier)) {
        addToRevisionQueue(entry, revisionQueue, scmMaterials, context);
    } else {
        return null;
    }
    while (!revisionQueue.isEmpty()) {
        PipelineTimelineEntry.Revision revision = revisionQueue.poll();
        DependencyMaterialRevision dmr = DependencyMaterialRevision.create(revision.revision, null);
        PipelineTimelineEntry pte = pipelineTimeline
                .getEntryFor(new CaseInsensitiveString(dmr.getPipelineName()), dmr.getPipelineCounter());
        addToRevisionQueue(pte, revisionQueue, scmMaterials, context);
    }

    return new Pair<>(dependentStageIdentifier, scmMaterials);
}

From source file:org.wso2.carbon.andes.event.core.internal.subscription.registry.TopicManagerServiceImpl.java

/**
 * {@inheritDoc}// w  w  w  .j  ava 2s . co m
 */
@Override
public Subscription[] getSubscriptions(String topicName, boolean withChildren) throws EventBrokerException {

    List<Subscription> subscriptions = new ArrayList<Subscription>();
    Queue<String> pathsQueue = new LinkedList<String>();
    String resourcePath = JavaUtil.getResourcePath(topicName, this.topicStoragePath);

    pathsQueue.add(resourcePath);
    while (!pathsQueue.isEmpty()) {
        addSubscriptions(pathsQueue.remove(), subscriptions, pathsQueue, withChildren);
    }

    return subscriptions.toArray(new Subscription[subscriptions.size()]);

}