List of usage examples for java.util Deque isEmpty
boolean isEmpty();
From source file:org.polymap.model2.store.geotools.FeatureTypeBuilder.java
protected ComplexType buildComplexType(Class<? extends Composite> compositeClass, String indent) throws Exception { // fields -> properties Collection<PropertyDescriptor> properties = new ArrayList(); // super classes and mixins Deque<Class> stack = new ArrayDeque(); stack.push(compositeClass);//from w w w .j a v a 2 s .c om while (!stack.isEmpty()) { Class type = stack.pop(); log.debug(indent + "Composite: " + type); // super class if (type.getSuperclass() != null && !Entity.class.equals(type.getSuperclass()) && !Composite.class.equals(type.getSuperclass())) { stack.push(type.getSuperclass()); } // mixins CompositeInfoImpl typeInfo = new CompositeInfoImpl(type); //log.debug( indent + " " + "Mixins: " + typeInfo.getMixins() ); stack.addAll(typeInfo.getMixins()); // fields for (Field field : type.getDeclaredFields()) { // Property or CollectionProperty if (Property.class.isAssignableFrom(field.getType()) || CollectionProperty.class.isAssignableFrom(field.getType())) { PropertyInfoImpl propInfo = new PropertyInfoImpl(field); Class<?> binding = propInfo.getType(); // attribute if (binding.isPrimitive() || binding.equals(String.class) || Number.class.isAssignableFrom(binding) || Boolean.class.isAssignableFrom(binding) || Date.class.isAssignableFrom(binding) || binding.isEnum()) { if (binding.isEnum()) { binding = String.class; } AttributeType propType = buildAttributeType(field, binding); AttributeDescriptor desc = factory.createAttributeDescriptor(propType, propType.getName(), 0, propInfo.getMaxOccurs(), propInfo.isNullable(), propInfo.getDefaultValue()); properties.add(desc); log.debug(indent + " " + "Attribute: " + desc); } // geometry else if (Geometry.class.isAssignableFrom(binding)) { AttributeType propType = buildAttributeType(field, binding); GeometryType geomType = factory.createGeometryType(propType.getName(), propType.getBinding(), crs, propType.isIdentified(), propType.isAbstract(), propType.getRestrictions(), propType.getSuper(), propType.getDescription()); GeometryDescriptor desc = factory.createGeometryDescriptor(geomType, geomType.getName(), 0, 1, propInfo.isNullable(), propInfo.getDefaultValue()); properties.add(desc); log.debug(indent + " " + "Geometry: " + desc); } // complex else if (Composite.class.isAssignableFrom(binding)) { ComplexType propType = buildComplexType((Class<? extends Composite>) binding, indent + " "); AttributeDescriptor desc = factory.createAttributeDescriptor(propType, nameInStore(field), 0, propInfo.getMaxOccurs(), propInfo.isNullable(), propInfo.getDefaultValue()); properties.add(desc); log.debug(indent + " " + "Complex Property: " + desc); } else { throw new RuntimeException("Property value type is not supported: " + binding); } } } } NameInStore nameInStore = compositeClass.getAnnotation(NameInStore.class); Name name = buildName(nameInStore != null ? nameInStore.value() : compositeClass.getSimpleName()); boolean isIdentified = false; boolean isAbstract = false; List<Filter> restrictions = null; AttributeType superType = null; Description annotation = compositeClass.getAnnotation(Description.class); InternationalString description = annotation != null ? SimpleInternationalString.wrap(annotation.value()) : null; return factory.createComplexType(name, properties, isIdentified, isAbstract, restrictions, superType, description); }
From source file:msi.gama.kernel.model.GamlModelSpecies.java
@Override public Map<String, ISpecies> getAllSpecies() { if (allSpecies == null) { allSpecies = new TOrderedHashMap(); final Deque<ISpecies> speciesStack = new ArrayDeque<ISpecies>(); speciesStack.push(this); ISpecies currentSpecies;/*from w ww.j av a 2 s . c om*/ while (!speciesStack.isEmpty()) { currentSpecies = speciesStack.pop(); // scope.getGui().debug("GamlModelSpecies: effectively adding " // + currentSpecies.getName()); allSpecies.put(currentSpecies.getName(), currentSpecies); final List<ISpecies> theMicroSpecies = currentSpecies.getMicroSpecies(); for (final ISpecies microSpec : theMicroSpecies) { if (microSpec.getMacroSpecies().equals(currentSpecies)) { speciesStack.push(microSpec); } } } } return allSpecies; }
From source file:com.cloudera.oryx.rdf.common.tree.DecisionTree.java
@Override public String toString() { StringBuilder result = new StringBuilder(); if (root != null) { Deque<Pair<TreeNode, TreePath>> toPrint = new LinkedList<Pair<TreeNode, TreePath>>(); toPrint.push(new Pair<TreeNode, TreePath>(root, TreePath.EMPTY)); while (!toPrint.isEmpty()) { Pair<TreeNode, TreePath> entry = toPrint.pop(); TreeNode node = entry.getFirst(); TreePath path = entry.getSecond(); int pathLength = path.length(); for (int i = 0; i < pathLength; i++) { if (i == pathLength - 1) { result.append(" +-"); } else { result.append(path.isLeftAt(i) ? " | " : " "); }//from w w w . j a va 2s . c o m } result.append(node).append('\n'); if (node != null && !node.isTerminal()) { DecisionNode decisionNode = (DecisionNode) node; toPrint.push(new Pair<TreeNode, TreePath>(decisionNode.getRight(), path.extendRight())); toPrint.push(new Pair<TreeNode, TreePath>(decisionNode.getLeft(), path.extendLeft())); } } } return result.toString(); }
From source file:uniol.apt.adt.automaton.FiniteAutomatonUtility.java
static private Set<State> followEpsilons(Set<State> states) { Set<State> result = new HashSet<>(states); Deque<State> unhandled = new LinkedList<>(result); while (!unhandled.isEmpty()) { State state = unhandled.removeFirst(); for (State newState : state.getFollowingStates(Symbol.EPSILON)) { if (result.add(newState)) unhandled.add(newState); }/* ww w .java2s .com*/ } return result; }
From source file:com.darkstar.beanCartography.utils.finder.Finder.java
/** * Search through all contained objects. Those matching a filter will have * the corresponding interceptor executed. * * @param target object to search//from ww w .ja va 2 s. c o m */ public void find(Object target) { if (target == null) return; Deque<BeanContext> stack = new LinkedList<>(); Set<BeanContext> visited = new LinkedHashSet<>(); stack.push(new BeanContext(target)); while (!stack.isEmpty()) visit(stack, visited); }
From source file:com.spotify.helios.agent.QueueingHistoryWriter.java
private TaskStatusEvent getNext() { // Some explanation: We first find the eldest event from amongst the queues (ok, they're // deques, but we really use it as a put back queue), and only then to we try to get // a lock on the relevant queue from whence we got the event. Assuming that all worked // *and* that the event we have wasn't rolled off due to max-size limitations, we then // pull the item off the queue and return it. We're basically doing optimistic concurrency, // and skewing things so that adding to this should be cheap. while (true) { final TaskStatusEvent current = findEldestEvent(); // Didn't find anything that needed processing? if (current == null) { return null; }//from w w w . j a va 2s . c o m final JobId id = current.getStatus().getJob().getId(); final Deque<TaskStatusEvent> deque = items.get(id); if (deque == null) { // shouldn't happen because we should be the only one pulling items off, but.... continue; } synchronized (deque) { if (!deque.peek().equals(current)) { // item got rolled off, try again continue; } // Pull it off the queue and be paranoid. final TaskStatusEvent newCurrent = deque.poll(); count.decrementAndGet(); checkState(current.equals(newCurrent), "current should equal newCurrent"); // Safe because this is the *only* place we hold these two locks at the same time. synchronized (items) { // Extra paranoia: curDeque should always == deque final Deque<TaskStatusEvent> curDeque = items.get(id); if (curDeque != null && curDeque.isEmpty()) { items.remove(id); } } return current; } } }
From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ObjectConstructionExpressionEvaluator.java
private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) { List<JRJsonNode> result = new ArrayList<>(); Deque<JRJsonNode> stack = new ArrayDeque<>(); if (log.isDebugEnabled()) { log.debug("initial stack population with: " + jrJsonNode.getDataNode()); }/* w ww. j av a 2 s . c o m*/ // populate the stack initially stack.push(jrJsonNode); while (!stack.isEmpty()) { JRJsonNode stackNode = stack.pop(); JsonNode stackDataNode = stackNode.getDataNode(); addChildrenToStack(stackNode, stack); if (log.isDebugEnabled()) { log.debug("processing stack element: " + stackDataNode); } // process the current stack item if (stackDataNode.isObject()) { JRJsonNode childWithKeys = constructNewObjectNodeWithKeys(stackNode); if (childWithKeys != null) { result.add(childWithKeys); } } } return result; }
From source file:org.talend.dataquality.semantic.recognizer.DefaultCategoryRecognizer.java
/** * For the discovery, if a category c matches with the data, * it means all the ancestor categories of c have to match too. * This method increments the ancestor categories of c. * /*from w ww.ja va 2s . c om*/ * @param categories, the category result * @param id, the category ID of the matched category c * */ private void incrementAncestorsCategories(Set<String> categories, String id) { Deque<Pair<String, Integer>> catToSee = new ArrayDeque<>(); Set<String> catAlreadySeen = new HashSet<>(); catToSee.add(Pair.of(id, 0)); Pair<String, Integer> currentCategory; while (!catToSee.isEmpty()) { currentCategory = catToSee.pop(); DQCategory dqCategory = crm.getCategoryMetadataById(currentCategory.getLeft()); if (dqCategory != null && !CollectionUtils.isEmpty(dqCategory.getParents())) { int parentLevel = currentCategory.getRight() + 1; for (DQCategory parent : dqCategory.getParents()) { if (!catAlreadySeen.contains(parent.getId())) { catAlreadySeen.add(parent.getId()); catToSee.add(Pair.of(parent.getId(), parentLevel)); DQCategory meta = crm.getCategoryMetadataById(parent.getId()); if (meta != null) { incrementCategory(meta.getName(), meta.getLabel(), parentLevel); categories.add(meta.getName()); } } } } } }
From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ArrayIndexExpressionEvaluator.java
private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) { List<JRJsonNode> result = new ArrayList<>(); Deque<JRJsonNode> stack = new ArrayDeque<>(); JsonNode initialDataNode = jrJsonNode.getDataNode(); if (log.isDebugEnabled()) { log.debug("initial stack population with: " + initialDataNode); }//from w ww .ja va 2 s . co m // populate the stack initially stack.push(jrJsonNode); while (!stack.isEmpty()) { JRJsonNode stackNode = stack.pop(); JsonNode stackDataNode = stackNode.getDataNode(); addChildrenToStack(stackNode, stack); // process the current stack item if (stackDataNode.isArray()) { if (log.isDebugEnabled()) { log.debug("processing stack element: " + stackDataNode); } if (expression.getIndex() >= 0 && expression.getIndex() < stackDataNode.size()) { JsonNode nodeAtIndex = stackDataNode.get(expression.getIndex()); JRJsonNode child = stackNode.createChild(nodeAtIndex); if (applyFilter(child)) { result.add(child); } } } } return result; }
From source file:org.apache.hadoop.hive.ql.parse.ASTNode.java
private String toStringTree(ASTNode rootNode) { Deque<ASTNode> stack = new ArrayDeque<ASTNode>(); stack.push(this); while (!stack.isEmpty()) { ASTNode next = stack.peek();//from w w w .j a v a2s. c o m if (!next.visited) { if (next.parent != null && next.parent.getChildCount() > 1 && next != next.parent.getChild(0)) { rootNode.addtoMemoizedString(" "); } next.rootNode = rootNode; next.startIndx = rootNode.getMemoizedStringLen(); // Leaf if (next.children == null || next.children.size() == 0) { String str = next.toString(); rootNode.addtoMemoizedString( next.getType() != HiveParser.StringLiteral ? str.toLowerCase() : str); next.endIndx = rootNode.getMemoizedStringLen(); stack.pop(); continue; } if (!next.isNil()) { rootNode.addtoMemoizedString("("); String str = next.toString(); rootNode.addtoMemoizedString( (next.getType() == HiveParser.StringLiteral || null == str) ? str : str.toLowerCase()); rootNode.addtoMemoizedString(" "); } if (next.children != null) { for (int i = next.children.size() - 1; i >= 0; i--) { stack.push((ASTNode) next.children.get(i)); } } next.visited = true; } else { if (!next.isNil()) { rootNode.addtoMemoizedString(")"); } next.endIndx = rootNode.getMemoizedStringLen(); next.visited = false; stack.pop(); } } return rootNode.getMemoizedSubString(startIndx, endIndx); }