Example usage for java.util ArrayDeque ArrayDeque

List of usage examples for java.util ArrayDeque ArrayDeque

Introduction

In this page you can find the example usage for java.util ArrayDeque ArrayDeque.

Prototype

public ArrayDeque() 

Source Link

Document

Constructs an empty array deque with an initial capacity sufficient to hold 16 elements.

Usage

From source file:com.espertech.esper.view.std.GroupByViewReclaimAged.java

private void sweep(long currentTime) {
    ArrayDeque<Object> removed = new ArrayDeque<Object>();
    for (Map.Entry<Object, GroupByViewAgedEntry> entry : subViewsPerKey.entrySet()) {
        long age = currentTime - entry.getValue().getLastUpdateTime();
        if (age > reclaimMaxAge) {
            removed.add(entry.getKey());
        }/*from  w  w  w. j  av  a2s.co m*/
    }

    for (Object key : removed) {
        GroupByViewAgedEntry entry = subViewsPerKey.remove(key);
        Object subviewHolder = entry.getSubviewHolder();
        if (subviewHolder instanceof List) {
            List<View> subviews = (List<View>) subviewHolder;
            for (View view : subviews) {
                removeSubview(view);
            }
        } else if (subviewHolder instanceof View) {
            removeSubview((View) subviewHolder);
        }
    }
}

From source file:com.heliosdecompiler.helios.gui.controller.FileTreeController.java

@FXML
public void initialize() {
    this.rootItem = new TreeItem<>(new TreeNode("[root]"));
    this.root.setRoot(this.rootItem);
    this.root.setCellFactory(new TreeCellFactory<>(node -> {
        if (node.getParent() == null) {
            ContextMenu export = new ContextMenu();

            MenuItem exportItem = new MenuItem("Export");

            export.setOnAction(e -> {
                File file = messageHandler.chooseFile().withInitialDirectory(new File("."))
                        .withTitle(Message.GENERIC_CHOOSE_EXPORT_LOCATION_JAR.format())
                        .withExtensionFilter(new FileFilter(Message.FILETYPE_JAVA_ARCHIVE.format(), "*.jar"),
                                true)/*from  w ww.ja va2s.co m*/
                        .promptSave();

                OpenedFile openedFile = (OpenedFile) node.getMetadata().get(OpenedFile.OPENED_FILE);

                Map<String, byte[]> clone = new HashMap<>(openedFile.getContents());

                backgroundTaskHelper.submit(
                        new BackgroundTask(Message.TASK_SAVING_FILE.format(node.getDisplayName()), true, () -> {
                            try {
                                if (!file.exists()) {
                                    if (!file.createNewFile()) {
                                        throw new IOException("Could not create export file");
                                    }
                                }

                                try (ZipOutputStream zipOutputStream = new ZipOutputStream(
                                        new FileOutputStream(file))) {
                                    for (Map.Entry<String, byte[]> ent : clone.entrySet()) {
                                        ZipEntry zipEntry = new ZipEntry(ent.getKey());
                                        zipOutputStream.putNextEntry(zipEntry);
                                        zipOutputStream.write(ent.getValue());
                                        zipOutputStream.closeEntry();
                                    }
                                }

                                messageHandler.handleMessage(Message.GENERIC_EXPORTED.format());
                            } catch (IOException ex) {
                                messageHandler.handleException(Message.ERROR_IOEXCEPTION_OCCURRED.format(), ex);
                            }
                        }));
            });

            export.getItems().add(exportItem);
            return export;
        }
        return null;
    }));

    root.addEventHandler(KeyEvent.KEY_RELEASED, event -> {
        if (event.getCode() == KeyCode.ENTER) {
            TreeItem<TreeNode> selected = this.root.getSelectionModel().getSelectedItem();
            if (selected != null) {
                if (selected.getChildren().size() != 0) {
                    selected.setExpanded(!selected.isExpanded());
                } else {
                    getParentController().getAllFilesViewerController().handleClick(selected.getValue());
                }
            }
        }
    });

    Tooltip tooltip = new Tooltip();
    StringBuilder search = new StringBuilder();

    List<TreeItem<TreeNode>> searchContext = new ArrayList<>();
    AtomicInteger searchIndex = new AtomicInteger();

    root.focusedProperty().addListener((observable, oldValue, newValue) -> {
        if (!newValue) {
            tooltip.hide();
            search.setLength(0);
        }
    });

    root.boundsInLocalProperty().addListener((observable, oldValue, newValue) -> {
        Bounds bounds = root.localToScreen(newValue);
        tooltip.setAnchorX(bounds.getMinX());
        tooltip.setAnchorY(bounds.getMinY());
    });

    root.addEventHandler(KeyEvent.KEY_PRESSED, event -> {
        if (tooltip.isShowing() && event.getCode() == KeyCode.UP) {
            if (searchIndex.decrementAndGet() < 0) {
                searchIndex.set(searchContext.size() - 1);
            }
        } else if (tooltip.isShowing() && event.getCode() == KeyCode.DOWN) {
            if (searchIndex.incrementAndGet() >= searchContext.size()) {
                searchIndex.set(0);
            }
        } else {
            return;
        }
        event.consume();

        root.scrollTo(root.getRow(searchContext.get(searchIndex.get())));
        root.getSelectionModel().select(searchContext.get(searchIndex.get()));
    });

    root.addEventHandler(KeyEvent.KEY_TYPED, event -> {
        if (event.getCharacter().charAt(0) == '\b') {
            if (search.length() > 0) {
                search.setLength(search.length() - 1);
            }
        } else if (event.getCharacter().charAt(0) == '\u001B') { //esc
            tooltip.hide();
            search.setLength(0);
            return;
        } else if (search.length() > 0
                || (search.length() == 0 && StringUtils.isAlphanumeric(event.getCharacter()))) {
            search.append(event.getCharacter());
            if (!tooltip.isShowing()) {
                tooltip.show(root.getScene().getWindow());
            }
        }

        if (!tooltip.isShowing())
            return;

        String str = search.toString();
        tooltip.setText("Search for: " + str);

        searchContext.clear();

        ArrayDeque<TreeItem<TreeNode>> deque = new ArrayDeque<>();
        deque.addAll(rootItem.getChildren());

        while (!deque.isEmpty()) {
            TreeItem<TreeNode> item = deque.poll();
            if (item.getValue().getDisplayName().contains(str)) {
                searchContext.add(item);
            }
            if (item.isExpanded() && item.getChildren().size() > 0)
                deque.addAll(item.getChildren());
        }

        searchIndex.set(0);
        if (searchContext.size() > 0) {
            root.scrollTo(root.getRow(searchContext.get(0)));
            root.getSelectionModel().select(searchContext.get(0));
        }
    });

    openedFileController.loadedFiles().addListener((MapChangeListener<String, OpenedFile>) change -> {
        if (change.getValueAdded() != null) {
            updateTree(change.getValueAdded());
        }
        if (change.getValueRemoved() != null) {
            this.rootItem.getChildren()
                    .removeIf(ti -> ti.getValue().equals(change.getValueRemoved().getRoot()));
        }
    });
}

From source file:com.espertech.esper.core.start.EPPreparedExecuteSingleStream.java

/**
 * Executes the prepared query.//from   www . j  av a  2  s.  c  o m
 * @return query results
 */
public EPPreparedQueryResult execute(ContextPartitionSelector[] contextPartitionSelectors) {
    if (contextPartitionSelectors != null && contextPartitionSelectors.length != 1) {
        throw new IllegalArgumentException("Number of context partition selectors must be one");
    }
    ContextPartitionSelector optionalSingleSelector = contextPartitionSelectors != null
            && contextPartitionSelectors.length > 0 ? contextPartitionSelectors[0] : null;

    // validate context
    if (processor.getContextName() != null && statementSpec.getOptionalContextName() != null
            && !processor.getContextName().equals(statementSpec.getOptionalContextName())) {
        throw new EPException("Context for named window is '" + processor.getContextName()
                + "' and query specifies context '" + statementSpec.getOptionalContextName() + "'");
    }

    // handle non-specified context
    if (statementSpec.getOptionalContextName() == null) {
        NamedWindowProcessorInstance processorInstance = processor.getProcessorInstanceNoContext();
        if (processorInstance != null) {
            EventBean[] rows = executor.execute(processorInstance);
            if (rows.length > 0) {
                dispatch();
            }
            return new EPPreparedQueryResult(processor.getNamedWindowType(), rows);
        }
    }

    // context partition runtime query
    Collection<Integer> agentInstanceIds = EPPreparedExecuteMethodHelper.getAgentInstanceIds(processor,
            optionalSingleSelector, services.getContextManagementService(), processor.getContextName());

    // collect events and agent instances
    if (agentInstanceIds.isEmpty()) {
        return new EPPreparedQueryResult(processor.getNamedWindowType(), CollectionUtil.EVENT_PER_STREAM_EMPTY);
    }

    if (agentInstanceIds.size() == 1) {
        int agentInstanceId = agentInstanceIds.iterator().next();
        NamedWindowProcessorInstance processorInstance = processor.getProcessorInstance(agentInstanceId);
        EventBean[] rows = executor.execute(processorInstance);
        if (rows.length > 0) {
            dispatch();
        }
        return new EPPreparedQueryResult(processor.getNamedWindowType(), rows);
    }

    ArrayDeque<EventBean> allRows = new ArrayDeque<EventBean>();
    for (int agentInstanceId : agentInstanceIds) {
        NamedWindowProcessorInstance processorInstance = processor.getProcessorInstance(agentInstanceId);
        if (processorInstance != null) {
            EventBean[] rows = executor.execute(processorInstance);
            allRows.addAll(Arrays.asList(rows));
        }
    }
    if (allRows.size() > 0) {
        dispatch();
    }
    return new EPPreparedQueryResult(processor.getNamedWindowType(),
            allRows.toArray(new EventBean[allRows.size()]));
}

From source file:org.finra.herd.service.impl.JobServiceImpl.java

@Override
public Job deleteJob(String jobId, JobDeleteRequest jobDeleteRequest) throws Exception {
    Assert.hasText(jobId, "jobId must be specified");
    Assert.notNull(jobDeleteRequest, "jobDeleteRequest must be specified");
    Assert.hasText(jobDeleteRequest.getDeleteReason(), "deleteReason must be specified");

    // Trim input parameters.
    String localJobId = jobId.trim();

    ProcessInstance mainProcessInstance = activitiService.getProcessInstanceById(localJobId);

    if (mainProcessInstance != null) {
        checkPermissions(mainProcessInstance.getProcessDefinitionKey(),
                new NamespacePermissionEnum[] { NamespacePermissionEnum.EXECUTE });

        // Load all processes (main process and sub-processes) into a deque to be later deleted.
        Deque<String> processInstanceIds = new ArrayDeque<>();
        processInstanceIds.push(mainProcessInstance.getProcessInstanceId());
        Deque<String> superProcessInstanceIds = new ArrayDeque<>();
        superProcessInstanceIds.push(mainProcessInstance.getProcessInstanceId());
        while (!superProcessInstanceIds.isEmpty()) {
            String superProcessInstanceId = superProcessInstanceIds.pop();

            // Get all executions with the parent id equal to the super process instance id.
            for (Execution execution : activitiRuntimeService.createExecutionQuery()
                    .parentId(superProcessInstanceId).list()) {
                processInstanceIds.push(execution.getId());
            }// w ww  .  j a v a 2  s.  c o m

            // Get all active sub-processes for the super process instance id.
            for (ProcessInstance subProcessInstance : activitiRuntimeService.createProcessInstanceQuery()
                    .superProcessInstanceId(superProcessInstanceId).active().list()) {
                processInstanceIds.push(subProcessInstance.getId());
                superProcessInstanceIds.push(subProcessInstance.getId());
            }
        }

        // Delete all processes individually in LIFO order.
        while (!processInstanceIds.isEmpty()) {
            activitiService.deleteProcessInstance(processInstanceIds.pop(), jobDeleteRequest.getDeleteReason());
        }
    } else {
        throw new ObjectNotFoundException(
                String.format("Job with ID \"%s\" does not exist or is already completed.", localJobId));
    }

    return getJob(localJobId, false, false);
}

From source file:de.uni_potsdam.hpi.asg.logictool.mapping.SequenceBasedAndGateDecomposer.java

public boolean decomposeAND(NetlistTerm term) {

    logger.info("Decomposition of " + term.toString());

    Set<Signal> signals = netlist.getDrivenSignalsTransitive(term);
    if (signals.isEmpty()) {
        logger.warn("No signal(s) for term " + term + " found");
        return false;
    } else if (signals.size() > 1) {
        logger.warn("Term " + term + " drives more than one signal. This is not supported yet");
        return false;
    }//from   w  w  w  .j  av  a 2 s.c o  m
    Signal origsig = signals.iterator().next();
    if (!isAOC(term, origsig)) {
        logger.warn("Algorithm not applicable for non-AOC architectures");
        return false;
    }

    int startgatesize = BDDHelper.numberOfVars(term.getBdd());

    BDD bdd = term.getBdd();
    Set<Signal> origrelevant = findRelevantSigs(bdd);
    if (origrelevant == null) {
        return false;
    }

    StateGraph sg2 = sghelper.getNewStateGraph(origrelevant, origsig);
    if (sg2 == null) {
        logger.warn("Failed to generate new SG. Using the original one.");
        sg2 = origsg;
    }

    BiMap<Signal, Signal> sigmap = HashBiMap.create();
    Set<Signal> relevant = new HashSet<>();
    boolean found;
    for (Signal oldSig : origrelevant) {
        found = false;
        for (Signal newSig : sg2.getAllSignals()) {
            if (oldSig.getName().equals(newSig.getName())) {
                sigmap.put(oldSig, newSig);
                found = true;
                break;
            }
        }
        if (!found) {
            logger.error("Signal " + oldSig.getName() + " not found");
            return false;
        }
        relevant.add(sigmap.get(oldSig));
    }
    found = false;
    for (Signal newSig : sg2.getAllSignals()) {
        if (origsig.getName().equals(newSig.getName())) {
            sigmap.put(origsig, newSig);
            found = true;
            break;
        }
    }
    if (!found) {
        logger.error("Signal " + origsig.getName() + " not found");
        return false;
    }
    Signal sig = sigmap.get(origsig);

    Map<Signal, Boolean> posnegmap = getInputsPosOrNeg(term, sigmap);
    BDD newbdd = factory.one();
    for (Entry<Signal, Boolean> entry : posnegmap.entrySet()) {
        if (entry.getValue()) {
            newbdd = newbdd.andWith(getPosBDD(entry.getKey()));
        } else {
            newbdd = newbdd.andWith(getNegBDD(entry.getKey()));
        }
        if (entry.getKey() instanceof QuasiSignal) {
            relevant.add(entry.getKey());
        }
    }

    Set<State> startStates = new HashSet<>();
    for (State s : sg2.getStates()) {
        for (Entry<Transition, State> entry2 : s.getNextStates().entrySet()) {
            if (entry2.getKey().getSignal() == sig) {
                startStates.add(entry2.getValue());
            }
        }
    }

    List<List<Signal>> fallingPartitions = new ArrayList<>();
    for (Signal sig2 : relevant) {
        List<Signal> tmp = new ArrayList<>();
        tmp.add(sig2);
        fallingPartitions.add(tmp);
    }

    SortedSet<IOBehaviour> sequencesFront = new TreeSet<>(new SequenceFrontCmp());
    SortedSet<IOBehaviour> sequencesBack = new TreeSet<>(new SequenceBackCmp());
    Set<IOBehaviour> newSequences = new HashSet<>();
    Set<IOBehaviour> rmSequences = new HashSet<>();
    Deque<IOBehaviourSimulationStep> steps = new ArrayDeque<>();

    pool = new IOBehaviourSimulationStepPool(new IOBehaviourSimulationStepFactory());
    pool.setMaxTotal(-1);

    try {
        root = pool.borrowObject();
    } catch (Exception e) {
        e.printStackTrace();
        logger.error("Could not borrow object");
        return false;
    }

    IOBehaviourSimulationStep newStep;
    for (State s : startStates) {
        try {
            newStep = pool.borrowObject();
        } catch (Exception e) {
            e.printStackTrace();
            logger.error("Could not borrow object");
            return false;
        }
        root.getNextSteps().add(newStep);
        newStep.setPrevStep(root);
        newStep.setStart(s);
        newStep.setNextState(s);
        steps.add(newStep);
    }

    if (steps.isEmpty()) {
        return false;
    }

    final long checkThreshold = 100;

    long stepsEvaledTotal = 0;
    IOBehaviourSimulationStep step = null;
    while (!steps.isEmpty()) {
        step = steps.removeLast();
        //         System.out.println("#Step: " + step.toString());
        getNewSteps(step, sig, newSequences, steps, relevant);
        stepsEvaledTotal++;
        if (newSequences.size() >= checkThreshold) {
            removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences);
        }
    }
    removeCandidates(sequencesFront, sequencesBack, newSequences, rmSequences);
    logger.debug("Sequences: " + sequencesFront.size() + " - Tmp Sequences: " + newSequences.size()
            + " - Steps to evaluate: " + steps.size() + " - Steps evaluated: " + stepsEvaledTotal);
    logger.debug("Pool: " + "Created: " + pool.getCreatedCount() + ", Borrowed: " + pool.getBorrowedCount()
            + ", Returned: " + pool.getReturnedCount() + ", Active: " + pool.getNumActive() + ", Idle: "
            + pool.getNumIdle());
    logger.debug("RmSub: " + rmSub + " // RmFall: " + rmFall);

    SortedSet<IOBehaviour> sequences = new TreeSet<>(sequencesFront);
    sequencesFront.clear();
    sequencesBack.clear();
    //      System.out.println(sequences.toString());

    List<IOBehaviour> falling = new ArrayList<>();
    List<IOBehaviour> rising = new ArrayList<>();
    List<IOBehaviour> constant = new ArrayList<>();
    if (!categoriseSequences(newbdd, sequences, falling, rising, constant)) {
        return false;
    }
    //      System.out.println("Falling:");
    //      for(IOBehaviour beh : falling) {
    //         System.out.println(beh.toString());
    //      }
    //      System.out.println("Rising:");
    //      for(IOBehaviour beh : rising) {
    //         System.out.println(beh.toString());
    //      }
    //      System.out.println("Constant:");
    //      for(IOBehaviour beh : constant) {
    //         System.out.println(beh.toString());
    //      }

    fallingPartitions = getPossiblePartitionsFromFalling(falling, relevant);
    //      System.out.println("FallingPartitions: " + fallingPartitions.toString());

    Map<Integer, List<Partition>> partitions = getPartitions(relevant, startgatesize);
    if (partitions == null) {
        logger.error("There was a problem while creating partions for signal " + sig.getName());
        return false;
    }

    //      System.out.println("Init:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    filterPartitions(partitions, fallingPartitions);
    if (partitions.isEmpty()) {
        logger.error("No suitable partions found");
        return false;
    }

    //      System.out.println("After filter Falling:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    //      System.out.println("posneg: " + posnegmap.toString());

    setPartitionBDDs(partitions, posnegmap);

    if (!checkRising(rising, partitions)) {
        logger.error("Check rising failed");
        return false;
    }
    if (partitions.isEmpty()) {
        logger.error("No suitable partions found");
        return false;
    }

    //      System.out.println("After filter Rising:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    if (!checkConstant(constant, partitions)) {
        logger.error("Check constant failed");
        return false;
    }
    if (partitions.isEmpty()) {
        logger.error("No suitable partions found");
        return false;
    }

    //      System.out.println("After filter Constant:");
    //      for(Entry<Integer, List<Partition>> entry : partitions.entrySet()) {
    //         System.out.println(entry.getKey());
    //         for(Partition p : entry.getValue()) {
    //            System.out.println("\t" + p.toString());
    //         }
    //      }

    applyDecoResult(term, partitions, posnegmap, sigmap);
    return true;
}

From source file:io.cloudslang.score.lang.ExecutionRuntimeServices.java

/**
 *  add event - for score to fire//from w w w  . j  a v  a  2s . c  o m
 * @param eventType  - string which is the key you can listen to
 * @param eventData  - the event data
 */
public void addEvent(String eventType, Serializable eventData) {
    @SuppressWarnings("unchecked")
    Queue<ScoreEvent> eventsQueue = getFromMap(SCORE_EVENTS_QUEUE);
    if (eventsQueue == null) {
        eventsQueue = new ArrayDeque<>();
        contextMap.put(SCORE_EVENTS_QUEUE, (ArrayDeque) eventsQueue);
    }
    eventsQueue.add(new ScoreEvent(eventType, getLanguageName(), eventData, getMetaData()));
}

From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java

private String toStringTree(ASTNode rootNode) {
    Deque<ASTNode> stack = new ArrayDeque<ASTNode>();
    stack.push(this);

    while (!stack.isEmpty()) {
        ASTNode next = stack.peek();/*w  w w.  ja  va2s .c o m*/
        if (!next.visited) {
            if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent.getChild(0)) {
                rootNode.addtoMemoizedString(" ");
            }

            next.rootNode = rootNode;
            next.startIndx = rootNode.getMemoizedStringLen();

            // Leaf
            if (next.children == null || next.children.size() == 0) {
                String str = next.toString();
                rootNode.addtoMemoizedString(
                        next.getType() != HiveParser.StringLiteral ? str.toLowerCase() : str);
                next.endIndx = rootNode.getMemoizedStringLen();
                stack.pop();
                continue;
            }

            if (!next.isNil()) {
                rootNode.addtoMemoizedString("(");
                String str = next.toString();
                rootNode.addtoMemoizedString(
                        (next.getType() == HiveParser.StringLiteral || null == str) ? str : str.toLowerCase());
                rootNode.addtoMemoizedString(" ");
            }

            if (next.children != null) {
                for (int i = next.children.size() - 1; i >= 0; i--) {
                    stack.push((ASTNode) next.children.get(i));
                }
            }

            next.visited = true;
        } else {
            if (!next.isNil()) {
                rootNode.addtoMemoizedString(")");
            }
            next.endIndx = rootNode.getMemoizedStringLen();
            next.visited = false;
            stack.pop();
        }

    }

    return rootNode.getMemoizedSubString(startIndx, endIndx);
}

From source file:msi.gama.kernel.model.GamlModelSpecies.java

@Override
public Map<String, ISpecies> getAllSpecies() {
    if (allSpecies == null) {
        allSpecies = new TOrderedHashMap();
        final Deque<ISpecies> speciesStack = new ArrayDeque<ISpecies>();
        speciesStack.push(this);
        ISpecies currentSpecies;/*from w w w  . ja  v a 2s .  com*/
        while (!speciesStack.isEmpty()) {
            currentSpecies = speciesStack.pop();
            // scope.getGui().debug("GamlModelSpecies: effectively adding "
            // + currentSpecies.getName());
            allSpecies.put(currentSpecies.getName(), currentSpecies);
            final List<ISpecies> theMicroSpecies = currentSpecies.getMicroSpecies();
            for (final ISpecies microSpec : theMicroSpecies) {
                if (microSpec.getMacroSpecies().equals(currentSpecies)) {
                    speciesStack.push(microSpec);
                }
            }
        }
    }
    return allSpecies;
}

From source file:com.espertech.esper.view.window.TimeBatchView.java

/**
 * This method updates child views and clears the batch of events.
 * We schedule a new callback at this time if there were events in the batch.
 *//*w  w w  .ja  v a  2  s .  co m*/
protected void sendBatch() {
    isCallbackScheduled = false;

    // If there are child views and the batch was filled, fireStatementStopped update method
    if (this.hasViews()) {
        // Convert to object arrays
        EventBean[] newData = null;
        EventBean[] oldData = null;
        if (!currentBatch.isEmpty()) {
            newData = currentBatch.toArray(new EventBean[currentBatch.size()]);
        }
        if ((lastBatch != null) && (!lastBatch.isEmpty())) {
            oldData = lastBatch.toArray(new EventBean[lastBatch.size()]);
        }

        // Post new data (current batch) and old data (prior batch)
        if (viewUpdatedCollection != null) {
            viewUpdatedCollection.update(newData, oldData);
        }
        if ((newData != null) || (oldData != null) || (isForceOutput)) {
            updateChildren(newData, oldData);
        }
    }

    // Only if forceOutput is enabled or
    // there have been any events in this or the last interval do we schedule a callback,
    // such as to not waste resources when no events arrive.
    if ((!currentBatch.isEmpty()) || ((lastBatch != null) && (!lastBatch.isEmpty())) || (isForceOutput)) {
        scheduleCallback();
        isCallbackScheduled = true;
    }

    lastBatch = currentBatch;
    currentBatch = new ArrayDeque<EventBean>();
}

From source file:androidx.navigation.NavDestination.java

/**
 * Build an array containing the hierarchy from the root down to this destination.
 *
 * @return An array containing all of the ids from the root to this destination
 */// www  .ja  v  a 2 s . com
@NonNull
int[] buildDeepLinkIds() {
    ArrayDeque<NavDestination> hierarchy = new ArrayDeque<>();
    NavDestination current = this;
    do {
        NavGraph parent = current.getParent();
        if (parent == null || parent.getStartDestination() != current.getId()) {
            hierarchy.addFirst(current);
        }
        current = parent;
    } while (current != null);
    int[] deepLinkIds = new int[hierarchy.size()];
    int index = 0;
    for (NavDestination destination : hierarchy) {
        deepLinkIds[index++] = destination.getId();
    }
    return deepLinkIds;
}