List of usage examples for java.util Deque add
boolean add(E e);
From source file:ocr.sapphire.image.EdgeBasedImagePreprocessor.java
private Deque<Point> findConnectedComponent(int[] edgeData, int x, int y) { Deque<Point> points = new LinkedList<Point>(); Deque<Point> queue = new LinkedList<Point>(); edgeData[x + y * width] = WHITE;// w ww . j a va 2 s . c om Point initialPoint = new Point(x, y); points.add(initialPoint); queue.push(initialPoint); while (!queue.isEmpty()) { Point point = queue.removeFirst(); for (int k = 0; k < 8; k++) { int x2 = (int) (point.x + DX[k]); int y2 = (int) (point.y + DY[k]); if (x2 < 0 || y2 < 0 || x2 >= width || y2 >= height) { continue; } if (edgeData[x2 + y2 * width] == BLACK) { edgeData[x2 + y2 * width] = WHITE; Point point2 = new Point(x2, y2); points.add(point2); queue.addLast(point2); } } } return points; }
From source file:org.restheart.test.performance.LoadGetPT.java
/** * *//*w w w . ja v a2s .co m*/ public void dbdirect() { final Database dbsDAO = new DbsDAO(); DBCollection dbcoll = dbsDAO.getCollection(db, coll); Deque<String> _filter; if (filter == null) { _filter = null; } else { _filter = new ArrayDeque<>(); _filter.add(filter); } ArrayList<DBObject> data; try { data = new DbsDAO().getCollectionData(dbcoll, page, pagesize, null, _filter, DBCursorPool.EAGER_CURSOR_ALLOCATION_POLICY.NONE); } catch (Exception e) { System.out.println("error: " + e.getMessage()); return; } assertNotNull(data); assertFalse(data.isEmpty()); if (printData) { System.out.println(data); } }
From source file:jetbrains.exodus.entitystore.FileSystemBlobVaultOld.java
@Override public BackupStrategy getBackupStrategy() { return new BackupStrategy() { @Override//from ww w . j av a 2 s. c o m public Iterable<FileDescriptor> listFiles() { return new Iterable<FileDescriptor>() { @Override public Iterator<FileDescriptor> iterator() { final Deque<FileDescriptor> queue = new LinkedList<>(); queue.add(new FileDescriptor(location, blobsDirectory + File.separator)); return new Iterator<FileDescriptor>() { int i = 0; int n = 0; File[] files; FileDescriptor next; String currentPrefix; @Override public boolean hasNext() { if (next != null) { return true; } while (i < n) { final File file = files[i++]; final String name = file.getName(); if (file.isDirectory()) { queue.push(new FileDescriptor(file, currentPrefix + file.getName() + File.separator)); } else if (file.isFile()) { final long fileSize = file.length(); if (fileSize == 0) continue; if (name.endsWith(blobExtension) || name.equalsIgnoreCase(VERSION_FILE)) { next = new FileDescriptor(file, currentPrefix, fileSize); return true; } } else { // something strange with filesystem throw new EntityStoreException( "File or directory expected: " + file.toString()); } } if (queue.isEmpty()) { return false; } final FileDescriptor fd = queue.pop(); files = IOUtil.listFiles(fd.getFile()); currentPrefix = fd.getPath(); i = 0; n = files.length; next = fd; return true; } @Override public FileDescriptor next() { if (!hasNext()) { throw new NoSuchElementException(); } final FileDescriptor result = next; next = null; return result; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; } }; }
From source file:uniol.apt.adt.automaton.FiniteAutomatonUtility.java
static private List<String> findAcceptedWord(DeterministicFiniteAutomaton dfa) { Set<DFAState> statesSeen = new HashSet<>(); LinkedList<String> word = new LinkedList<>(); Deque<Pair<DFAState, Iterator<Symbol>>> trace = new LinkedList<>(); DFAState initial = dfa.getInitialState(); trace.add(new Pair<>(initial, initial.getDefinedSymbols().iterator())); while (!trace.isEmpty()) { InterrupterRegistry.throwIfInterruptRequestedForCurrentThread(); Pair<DFAState, Iterator<Symbol>> pair = trace.peekLast(); if (!pair.getSecond().hasNext()) { trace.removeLast();/*from w w w. j a v a 2 s .c om*/ word.pollLast(); } else { Symbol symbol = pair.getSecond().next(); DFAState nextState = pair.getFirst().getFollowingState(symbol); // Only follow this state if we haven't followed it yet before if (statesSeen.add(nextState)) { trace.add(new Pair<>(nextState, nextState.getDefinedSymbols().iterator())); word.add(symbol.getEvent()); if (nextState.isFinalState()) return word; } } } return null; }
From source file:uniol.apt.adt.automaton.FiniteAutomatonUtility.java
static private <S extends State> Iterable<S> statesIterable(final S initialState) { return new Iterable<S>() { @Override/*from w w w . j av a 2 s.com*/ public Iterator<S> iterator() { final Deque<State> unhandled = new LinkedList<>(); final Set<State> seen = new HashSet<>(); unhandled.add(initialState); seen.add(initialState); return new Iterator<S>() { @Override public boolean hasNext() { return !unhandled.isEmpty(); } @Override public S next() { State state = unhandled.pollFirst(); if (state == null) throw new NoSuchElementException(); for (State next : state.getFollowingStates(Symbol.EPSILON)) if (seen.add(next)) unhandled.add(next); for (Symbol symbol : state.getDefinedSymbols()) for (State next : state.getFollowingStates(symbol)) if (seen.add(next)) unhandled.add(next); @SuppressWarnings("unchecked") S ret = (S) state; return ret; } @Override public void remove() { throw new UnsupportedOperationException(); } }; } }; }
From source file:com.spotify.helios.servicescommon.coordination.DefaultZooKeeperClient.java
@Override public List<String> listRecursive(final String path) throws KeeperException { assertClusterIdFlagTrue();//from w w w . java 2s . c o m final Deque<String> queue = newLinkedList(); final List<String> tree = newArrayList(); queue.add(path); tree.add(path); while (!queue.isEmpty()) { final String node = queue.pollFirst(); final List<String> children = getChildren(node); for (final String child : children) { final String childPath = node.replaceAll("/$", "") + "/" + child; queue.add(childPath); tree.add(childPath); } } return tree; }
From source file:org.wso2.carbon.uuf.internal.core.create.DependencyTreeParser.java
public static Result parse(List<String> dependencyTreeLines) { // Flattened dependencies map. // key = component name // value = all dependencies of the 'key' SetMultimap<String, String> flattenedDependencies = HashMultimap.create(); // Leveled dependencies list. // index = dependency level, index 0 == root component's dependencies // List.get(i) = set of dependencies in level i List<Set<ComponentData>> leveledDependencies = new ArrayList<>(6); int previousLevel = 0; String previousComponentName = null; Deque<Pair<String, List<String>>> parentNodesStack = new LinkedList<>(); for (int i = 0; i < dependencyTreeLines.size(); i++) { String line = dependencyTreeLines.get(i); int level = countLevel(line); int jump = (level - previousLevel); ComponentData currentComponent = getComponentData(line); if (level < leveledDependencies.size()) { leveledDependencies.get(level).add(currentComponent); } else {/* w w w .j a v a 2 s . c o m*/ Set<ComponentData> set = new HashSet<>(); set.add(currentComponent); leveledDependencies.add(level, set); } if (i == 0) { // Very first leaf dependency. previousComponentName = currentComponent.name; continue; } if (jump < 0) { // Dependency level decreased, so remove entries from the stack. for (int j = Math.abs(jump); j > 0; j--) { Pair<String, List<String>> entry = parentNodesStack.removeLast(); flattenedDependencies.putAll(entry.getKey(), entry.getValue()); } } else if (jump > 0) { // jump == 1 // Dependency level increased, so add an entry to the stack. parentNodesStack.add(new ImmutablePair<>(previousComponentName, new ArrayList<>(3))); } // (jump == 0): Same dependency level, no need to change the stack. // Add current component name to all parent nodes as a dependency. for (Pair<String, List<String>> entry : parentNodesStack) { entry.getValue().add(currentComponent.name); } previousLevel = level; previousComponentName = currentComponent.name; } // If there is any remaining stack elements, add them to the flattenedDependencies. for (Pair<String, List<String>> entry : parentNodesStack) { flattenedDependencies.putAll(entry.getKey(), entry.getValue()); } return new Result(flattenedDependencies, leveledDependencies); }
From source file:org.talend.dataquality.semantic.statistics.SemanticQualityAnalyzer.java
/** * For the validation of a COMPOUND category, we only have to valid the leaves children categories. * This methods find the DICT children categories and the REGEX children categories. * /*from ww w . j av a 2 s . com*/ * @param id, the category from we search the children * @return the DICT children categories and the REGEX children categories with a map. */ private Map<CategoryType, Set<DQCategory>> getChildrenCategories(String id) { Deque<String> catToSee = new ArrayDeque<>(); Set<String> catAlreadySeen = new HashSet<>(); Map<CategoryType, Set<DQCategory>> children = new HashMap<>(); children.put(CategoryType.REGEX, new HashSet<DQCategory>()); children.put(CategoryType.DICT, new HashSet<DQCategory>()); catToSee.add(id); String currentCategory; while (!catToSee.isEmpty()) { currentCategory = catToSee.pop(); DQCategory dqCategory = crm.getCategoryMetadataById(currentCategory); if (dqCategory != null) if (!CollectionUtils.isEmpty(dqCategory.getChildren())) { for (DQCategory child : dqCategory.getChildren()) { if (!catAlreadySeen.contains(child.getId())) { catAlreadySeen.add(child.getId()); catToSee.add(child.getId()); } } } else if (!currentCategory.equals(id)) { children.get(dqCategory.getType()).add(dqCategory); } } return children; }
From source file:uniol.apt.adt.automaton.FiniteAutomatonUtility.java
/** * Find a word whose prefixes (including the word) conform to a given predicate and which itself also conforms * to a second predicate.// w w w . jav a2 s . co m * * This method uses a depth-first search. A breath-first search would use more memory. * * @param a The automaton whose accepted words should get checked. * @param prefixPredicate The predicate to check the prefixes. * @param wordPredicate The predicate to check the words. * @return A word which conforms to the predicates. */ static public List<String> findPredicateWord(FiniteAutomaton a, Predicate<List<String>> prefixPredicate, Predicate<List<String>> wordPredicate) { MinimalDeterministicFiniteAutomaton dfa = minimizeInternal(a); Deque<Pair<DFAState, Iterator<Symbol>>> trace = new ArrayDeque<>(); LinkedList<String> word = new LinkedList<>(); DFAState initial = dfa.getInitialState(); DFAState sinkState = findSinkState(dfa); trace.add(new Pair<>(initial, initial.getDefinedSymbols().iterator())); while (!trace.isEmpty()) { Pair<DFAState, Iterator<Symbol>> pair = trace.peekLast(); if (!pair.getSecond().hasNext()) { trace.removeLast(); word.pollLast(); } else { Symbol symbol = pair.getSecond().next(); DFAState nextState = pair.getFirst().getFollowingState(symbol); if (!nextState.equals(sinkState)) { word.add(symbol.getEvent()); List<String> roWord = ListUtils.unmodifiableList(word); if (prefixPredicate.evaluate(roWord)) { trace.addLast(new Pair<>(nextState, nextState.getDefinedSymbols().iterator())); if (nextState.isFinalState() && wordPredicate.evaluate(roWord)) return word; } else { word.removeLast(); } } } } return null; }
From source file:org.alfresco.repo.content.transform.TransformerDebugLog.java
@Override protected void addOrModify(Deque<DebugEntry> entries, Object message) { String msg = (String) message; String requestId = getRequestId(msg); if (requestId != null) { Iterator<DebugEntry> iterator = entries.descendingIterator(); while (iterator.hasNext()) { DebugEntry entry = iterator.next(); if (requestId.equals(entry.requestId)) { entry.addLine(msg);/* w ww. ja va2 s . c o m*/ return; } } entries.add(new DebugEntry(requestId, msg)); } }