Example usage for java.util Queue remove

List of usage examples for java.util Queue remove

Introduction

In this page you can find the example usage for java.util Queue remove.

Prototype

E remove();

Source Link

Document

Retrieves and removes the head of this queue.

Usage

From source file:graphs.Graphs.java

public static <V, E> String BreadthSearch(Graph<V, E> g) {
    StringBuilder b = new StringBuilder();

    Queue<V> Qu = new LinkedList<V>();
    Set<V> visited = new HashSet();
    Set<V> found = new HashSet();

    V start = (V) g.getVertices().toArray()[0];
    Qu.add(start);// w ww . ja v a2 s .  co  m
    found.add(start);

    while (!Qu.isEmpty()) {
        V vertex = Qu.remove();
        for (V neighbor : g.getNeighbors(vertex)) {
            if (!found.contains(neighbor) && !visited.contains(neighbor)) {
                found.add(neighbor);
                Qu.add(neighbor);
            }
        }
        b.append(vertex.toString() + " ");
        visited.add(vertex);
    }

    return b.toString();
}

From source file:main.java.whiteSocket.Area.java

public static boolean[][] floodBorder(Area area, boolean[][] floodArea, int x, int y) {
    /**//w  w w  .  j  av  a2s.co m
     * paint bucket-like algorithm to fill binary map border this filled
     * area becomes the area to be filtered through the stretch() area
     * pixels to be turned Color.WHITE (unless notified otherwise by user
     * dictation)
     *
     */

    if (!floodArea[y][x]) {

        Queue<Point> queue = new LinkedList<Point>();
        queue.add(new Point(x, y));

        while (!queue.isEmpty()) {

            Point p = queue.remove();

            if (!floodArea[p.y][p.x]) {

                floodArea[p.y][p.x] = true;

                //                  System.out.println("totalErase = " + totalErase);
                if (totalErase != null)
                    totalErase[p.y][p.x] = true;

                queue.add(new Point(p.x + 1, p.y));
                queue.add(new Point(p.x - 1, p.y));
                queue.add(new Point(p.x, p.y + 1));
                queue.add(new Point(p.x, p.y - 1));

            }
        }
    }

    return floodArea;
}

From source file:org.jspringbot.keyword.expression.ELUtils.java

public static Object doMap(Object... args) {
    Object defaultValue = null;//from   ww w  .j  a  v a2  s  . c o m

    Queue<Object> arguments = new LinkedList<Object>();
    arguments.addAll(Arrays.asList(args));

    Object variable = arguments.remove();

    while (!arguments.isEmpty()) {
        if (arguments.size() > 1) {
            Object variableValue = arguments.remove();
            Object mapValue = arguments.remove();
            if (variable.equals(variableValue)) {
                return mapValue;
            }
        } else {
            // default
            return arguments.remove();
        }
    }

    return defaultValue;
}

From source file:uniol.apt.analysis.isomorphism.IsomorphismLogic.java

private static BidiMap<State, State> checkViaDepthSearch(TransitionSystem lts1, TransitionSystem lts2) {
    BidiMap<State, State> result = new DualHashBidiMap<>();
    Set<String> alphabet = lts1.getAlphabet();
    if (!alphabet.equals(lts2.getAlphabet()))
        // Not isomorphic, there is an arc with a label not occurring in the other lts
        return result;

    Queue<Pair<State, State>> unhandled = new ArrayDeque<>();
    visit(result, unhandled, lts1.getInitialState(), lts2.getInitialState());

    while (!unhandled.isEmpty()) {
        InterrupterRegistry.throwIfInterruptRequestedForCurrentThread();

        Pair<State, State> pair = unhandled.remove();
        State state1 = pair.getFirst();
        State state2 = pair.getSecond();

        for (String label : alphabet) {
            State follow1 = follow(state1, label);
            State follow2 = follow(state2, label);

            if (!visit(result, unhandled, follow1, follow2))
                // Not isomorphic
                return new DualHashBidiMap<>();
        }/*w w  w . j  a v a  2 s . c o m*/
    }

    return result;
}

From source file:org.jspringbot.keyword.expression.ELUtils.java

public static Object doCase(Object... args) {
    Object defaultValue = null;/*from w w w. j av a2s .  co  m*/

    Queue<Object> arguments = new LinkedList<Object>();
    arguments.addAll(Arrays.asList(args));

    while (!arguments.isEmpty()) {
        if (arguments.size() > 1) {
            boolean condition = (Boolean) arguments.remove();
            Object value = arguments.remove();
            if (condition) {
                return value;
            }
        } else {
            // default
            return arguments.remove();
        }
    }

    return defaultValue;
}

From source file:com.googlesource.gerrit.plugins.supermanifest.JiriManifestParser.java

public static JiriProjects getProjects(GerritRemoteReader reader, String repoKey, String ref, String manifest)
        throws ConfigInvalidException, IOException {

    try (RepoMap<String, Repository> repoMap = new RepoMap<>()) {
        repoMap.put(repoKey, reader.openRepository(repoKey));
        Queue<ManifestItem> q = new LinkedList<>();
        q.add(new ManifestItem(repoKey, manifest, ref, "", false));
        HashMap<String, HashSet<String>> processedRepoFiles = new HashMap<>();
        HashMap<String, JiriProjects.Project> projectMap = new HashMap<>();

        while (q.size() != 0) {
            ManifestItem mi = q.remove();
            Repository repo = repoMap.get(mi.repoKey);
            if (repo == null) {
                repo = reader.openRepository(mi.repoKey);
                repoMap.put(mi.repoKey, repo);
            }/*from ww  w  .  j  a v  a  2s  .c om*/
            HashSet<String> processedFiles = processedRepoFiles.get(mi.repoKey);
            if (processedFiles == null) {
                processedFiles = new HashSet<String>();
                processedRepoFiles.put(mi.repoKey, processedFiles);
            }
            if (processedFiles.contains(mi.manifest)) {
                continue;
            }
            processedFiles.add(mi.manifest);
            JiriManifest m;
            try {
                m = parseManifest(repo, mi.ref, mi.manifest);
            } catch (JAXBException | XMLStreamException e) {
                throw new ConfigInvalidException("XML parse error", e);
            }

            for (JiriProjects.Project project : m.projects.getProjects()) {
                project.fillDefault();
                if (mi.revisionPinned && project.Key().equals(mi.projectKey)) {
                    project.setRevision(mi.ref);
                }
                if (projectMap.containsKey(project.Key())) {
                    if (!projectMap.get(project.Key()).equals(project))
                        throw new ConfigInvalidException(String.format(
                                "Duplicate conflicting project %s in manifest %s\n%s\n%s", project.Key(),
                                mi.manifest, project.toString(), projectMap.get(project.Key()).toString()));
                } else {
                    projectMap.put(project.Key(), project);
                }
            }

            URI parentURI;
            try {
                parentURI = new URI(mi.manifest);
            } catch (URISyntaxException e) {
                throw new ConfigInvalidException("Invalid parent URI", e);
            }
            for (JiriManifest.LocalImport l : m.imports.getLocalImports()) {
                ManifestItem tw = new ManifestItem(mi.repoKey, parentURI.resolve(l.getFile()).getPath(), mi.ref,
                        mi.projectKey, mi.revisionPinned);
                q.add(tw);
            }

            for (JiriManifest.Import i : m.imports.getImports()) {
                i.fillDefault();
                URI uri;
                try {
                    uri = new URI(i.getRemote());
                } catch (URISyntaxException e) {
                    throw new ConfigInvalidException("Invalid URI", e);
                }
                String iRepoKey = new Project.NameKey(StringUtils.strip(uri.getPath(), "/")).toString();
                String iRef = i.getRevision();
                boolean revisionPinned = true;
                if (iRef.isEmpty()) {
                    iRef = REFS_HEADS + i.getRemotebranch();
                    revisionPinned = false;
                }

                ManifestItem tmi = new ManifestItem(iRepoKey, i.getManifest(), iRef, i.Key(), revisionPinned);
                q.add(tmi);
            }
        }
        return new JiriProjects(projectMap.values().toArray(new JiriProjects.Project[0]));
    }
}

From source file:es.darkhogg.hazelnutt.Hazelnutt.java

/**
 * Terminates the application in at most <i>time</i> milliseconds for
 * every alive thread. /*from w ww .  j av a 2  s. co m*/
 * 
 * @param time Number of milliseconds to wait for each thread to terminate
 */
public static void terminate(long time) {
    Logger logger = getLogger();
    logger.info("Terminating application...");

    try {
        getFrame().dispose();

        // Get the root thread group
        ThreadGroup rootThreadGroup = Thread.currentThread().getThreadGroup();
        while (rootThreadGroup.getParent() != null) {
            rootThreadGroup = rootThreadGroup.getParent();
        }

        // Declare some collections
        Queue<ThreadGroup> threadGroups = new LinkedList<ThreadGroup>();
        Queue<Thread> threads = new LinkedList<Thread>();

        // Get ALL groups
        threadGroups.add(rootThreadGroup);
        while (!threadGroups.isEmpty()) {
            ThreadGroup group = threadGroups.remove();

            Thread[] subThreads = new Thread[group.activeCount() * 2];
            //group.enumerate( subThreads );
            for (Thread subThread : subThreads) {
                if (subThread != null) {
                    threads.add(subThread);
                }
            }

            ThreadGroup[] subThreadGroups = new ThreadGroup[group.activeGroupCount() * 2];
            for (ThreadGroup subThreadGroup : subThreadGroups) {
                if (subThreadGroup != null) {
                    threadGroups.add(subThreadGroup);
                }
            }
        }

        // Join a maximum of time milliseconds for all non-daemon threads
        while (!threads.isEmpty()) {
            Thread thread = threads.remove();
            LOGGER.trace(thread);

            if (!thread.isDaemon() && thread != Thread.currentThread()) {
                logger.trace("Waiting for thread '" + thread.getName() + "'");
                thread.join(time);
                if (thread.isAlive()) {
                    logger.trace("Interrupting thread '" + thread.getName() + "'");
                    thread.interrupt();
                }
            }
        }

    } catch (Throwable e) {
        LOGGER.warn("Interrupted while terminating application", e);

    } finally {
        // Exit the program
        System.exit(0);
    }
}

From source file:com.cloudera.oryx.rdf.common.pmml.DecisionForestPMML.java

private static Segment buildTreeModel(DecisionForest forest,
        Map<Integer, BiMap<String, Integer>> columnToCategoryNameToIDMapping,
        MiningFunctionType miningFunctionType, MiningSchema miningSchema, int treeID, DecisionTree tree,
        InboundSettings settings) {/*from  w w w  .  j  av  a2  s.  com*/

    List<String> columnNames = settings.getColumnNames();
    int targetColumn = settings.getTargetColumn();

    Node root = new Node();
    root.setId("r");

    // Queue<Node> modelNodes = Queues.newArrayDeque();
    Queue<Node> modelNodes = new ArrayDeque<Node>();
    modelNodes.add(root);

    Queue<Pair<TreeNode, Decision>> treeNodes = new ArrayDeque<Pair<TreeNode, Decision>>();
    treeNodes.add(new Pair<TreeNode, Decision>(tree.getRoot(), null));

    while (!treeNodes.isEmpty()) {

        Pair<TreeNode, Decision> treeNodePredicate = treeNodes.remove();
        Node modelNode = modelNodes.remove();

        // This is the decision that got us here from the parent, if any; not the predicate at this node
        Predicate predicate = buildPredicate(treeNodePredicate.getSecond(), columnNames,
                columnToCategoryNameToIDMapping);
        modelNode.setPredicate(predicate);

        TreeNode treeNode = treeNodePredicate.getFirst();
        if (treeNode.isTerminal()) {

            TerminalNode terminalNode = (TerminalNode) treeNode;
            modelNode.setRecordCount((double) terminalNode.getCount());

            Prediction prediction = terminalNode.getPrediction();

            if (prediction.getFeatureType() == FeatureType.CATEGORICAL) {

                Map<Integer, String> categoryIDToName = columnToCategoryNameToIDMapping.get(targetColumn)
                        .inverse();
                CategoricalPrediction categoricalPrediction = (CategoricalPrediction) prediction;
                int[] categoryCounts = categoricalPrediction.getCategoryCounts();
                float[] categoryProbabilities = categoricalPrediction.getCategoryProbabilities();
                for (int categoryID = 0; categoryID < categoryProbabilities.length; categoryID++) {
                    int categoryCount = categoryCounts[categoryID];
                    float probability = categoryProbabilities[categoryID];
                    if (categoryCount > 0 && probability > 0.0f) {
                        String categoryName = categoryIDToName.get(categoryID);
                        ScoreDistribution distribution = new ScoreDistribution(categoryName, categoryCount);
                        distribution.setProbability((double) probability);
                        modelNode.getScoreDistributions().add(distribution);
                    }
                }

            } else {

                NumericPrediction numericPrediction = (NumericPrediction) prediction;
                modelNode.setScore(Float.toString(numericPrediction.getPrediction()));
            }

        } else {

            DecisionNode decisionNode = (DecisionNode) treeNode;
            Decision decision = decisionNode.getDecision();

            Node positiveModelNode = new Node();
            positiveModelNode.setId(modelNode.getId() + '+');
            modelNode.getNodes().add(positiveModelNode);
            Node negativeModelNode = new Node();
            negativeModelNode.setId(modelNode.getId() + '-');
            modelNode.getNodes().add(negativeModelNode);
            modelNode.setDefaultChild(
                    decision.getDefaultDecision() ? positiveModelNode.getId() : negativeModelNode.getId());
            modelNodes.add(positiveModelNode);
            modelNodes.add(negativeModelNode);
            treeNodes.add(new Pair<TreeNode, Decision>(decisionNode.getRight(), decision));
            treeNodes.add(new Pair<TreeNode, Decision>(decisionNode.getLeft(), null));

        }

    }

    TreeModel treeModel = new TreeModel(miningSchema, root, miningFunctionType);
    treeModel.setSplitCharacteristic(TreeModel.SplitCharacteristic.BINARY_SPLIT);
    treeModel.setMissingValueStrategy(MissingValueStrategyType.DEFAULT_CHILD);

    Segment segment = new Segment();
    segment.setId(Integer.toString(treeID));
    segment.setPredicate(new True());
    segment.setModel(treeModel);
    segment.setWeight(forest.getWeights()[treeID]);

    return segment;
}

From source file:org.apache.jackrabbit.oak.run.SegmentUtils.java

private static void debugFileStore(FileStore store) {
    Map<SegmentId, List<SegmentId>> idmap = Maps.newHashMap();
    int dataCount = 0;
    long dataSize = 0;
    int bulkCount = 0;
    long bulkSize = 0;

    ((Logger) getLogger(SegmentTracker.class)).setLevel(Level.OFF);
    RecordUsageAnalyser analyser = new RecordUsageAnalyser();

    for (SegmentId id : store.getSegmentIds()) {
        if (id.isDataSegmentId()) {
            Segment segment = id.getSegment();
            dataCount++;/*from www  .java 2 s .  c  o  m*/
            dataSize += segment.size();
            idmap.put(id, segment.getReferencedIds());
            analyseSegment(segment, analyser);
        } else if (id.isBulkSegmentId()) {
            bulkCount++;
            bulkSize += id.getSegment().size();
            idmap.put(id, Collections.<SegmentId>emptyList());
        }
    }
    System.out.println("Total size:");
    System.out.format("%s in %6d data segments%n", byteCountToDisplaySize(dataSize), dataCount);
    System.out.format("%s in %6d bulk segments%n", byteCountToDisplaySize(bulkSize), bulkCount);
    System.out.println(analyser.toString());

    Set<SegmentId> garbage = newHashSet(idmap.keySet());
    Queue<SegmentId> queue = Queues.newArrayDeque();
    queue.add(store.getHead().getRecordId().getSegmentId());
    while (!queue.isEmpty()) {
        SegmentId id = queue.remove();
        if (garbage.remove(id)) {
            queue.addAll(idmap.get(id));
        }
    }
    dataCount = 0;
    dataSize = 0;
    bulkCount = 0;
    bulkSize = 0;
    for (SegmentId id : garbage) {
        if (id.isDataSegmentId()) {
            dataCount++;
            dataSize += id.getSegment().size();
        } else if (id.isBulkSegmentId()) {
            bulkCount++;
            bulkSize += id.getSegment().size();
        }
    }
    System.out.format("%nAvailable for garbage collection:%n");
    System.out.format("%s in %6d data segments%n", byteCountToDisplaySize(dataSize), dataCount);
    System.out.format("%s in %6d bulk segments%n", byteCountToDisplaySize(bulkSize), bulkCount);
    System.out.format("%n%s", new PCMAnalyser(store).toString());
}

From source file:org.apache.jackrabbit.oak.run.SegmentTarUtils.java

private static void debugFileStore(FileStore store) {
    Map<SegmentId, List<SegmentId>> idmap = Maps.newHashMap();
    int dataCount = 0;
    long dataSize = 0;
    int bulkCount = 0;
    long bulkSize = 0;

    ((Logger) getLogger(SegmentTracker.class)).setLevel(Level.OFF);
    RecordUsageAnalyser analyser = new RecordUsageAnalyser(store.getReader());

    for (SegmentId id : store.getSegmentIds()) {
        if (id.isDataSegmentId()) {
            Segment segment = id.getSegment();
            dataCount++;//from   w  w  w . ja va  2s.  c  o m
            dataSize += segment.size();
            idmap.put(id, segment.getReferencedIds());
            analyseSegment(segment, analyser);
        } else if (id.isBulkSegmentId()) {
            bulkCount++;
            bulkSize += id.getSegment().size();
            idmap.put(id, Collections.<SegmentId>emptyList());
        }
    }
    System.out.println("Total size:");
    System.out.format("%s in %6d data segments%n", byteCountToDisplaySize(dataSize), dataCount);
    System.out.format("%s in %6d bulk segments%n", byteCountToDisplaySize(bulkSize), bulkCount);
    System.out.println(analyser.toString());

    Set<SegmentId> garbage = newHashSet(idmap.keySet());
    Queue<SegmentId> queue = Queues.newArrayDeque();
    queue.add(store.getRevisions().getHead().getSegmentId());
    while (!queue.isEmpty()) {
        SegmentId id = queue.remove();
        if (garbage.remove(id)) {
            queue.addAll(idmap.get(id));
        }
    }
    dataCount = 0;
    dataSize = 0;
    bulkCount = 0;
    bulkSize = 0;
    for (SegmentId id : garbage) {
        if (id.isDataSegmentId()) {
            dataCount++;
            dataSize += id.getSegment().size();
        } else if (id.isBulkSegmentId()) {
            bulkCount++;
            bulkSize += id.getSegment().size();
        }
    }
    System.out.format("%nAvailable for garbage collection:%n");
    System.out.format("%s in %6d data segments%n", byteCountToDisplaySize(dataSize), dataCount);
    System.out.format("%s in %6d bulk segments%n", byteCountToDisplaySize(bulkSize), bulkCount);
}