List of usage examples for java.util Deque push
void push(E e);
From source file:org.synku4j.wbxml.marshal.impl.DefaultWbxmlMarshaller.java
@Override public <T> T unmarshal(WbxmlContext cntx, InputStream is, Class<T> targetClass, String... filter) throws IOException, WbxmlMarshallerException { T target;/*from w ww . j a v a 2 s. com*/ try { target = targetClass.newInstance(); } catch (Exception e) { if (log.isWarnEnabled()) { log.warn("Unable to create an instance of the target class", e); } throw new WbxmlMarshallerException("Exception raised creating new instance of(" + targetClass + ")", e); } final ParseStackEntry pse = new ParseStackEntry(target); final Deque<ParseStackEntry> parseStack = new ArrayDeque<ParseStackEntry>(); parseStack.push(pse); return target; }
From source file:com.streamsets.datacollector.definition.ConfigDefinitionExtractor.java
void resolveDependencies(String configPrefix, List<ConfigDefinition> defs, Object contextMsg) { Map<String, ConfigDefinition> definitionsMap = new HashMap<>(); Map<String, Map<String, Set<Object>>> dependencyMap = new HashMap<>(); Map<String, Boolean> isFullyProcessed = new HashMap<>(); for (ConfigDefinition def : defs) { definitionsMap.put(def.getName(), def); dependencyMap.put(def.getName(), new HashMap<String, Set<Object>>()); isFullyProcessed.put(def.getName(), false); }/*from www .j ava 2 s .co m*/ cycles.clear(); for (ConfigDefinition def : defs) { String dependsOnKey = def.getDependsOn(); if (!StringUtils.isEmpty(dependsOnKey)) { verifyDependencyExists(definitionsMap, def, dependsOnKey, contextMsg); ConfigDefinition dependsOnDef = definitionsMap.get(dependsOnKey); // evaluate dependsOn triggers ConfigDef annotation = def.getConfigField().getAnnotation(ConfigDef.class); Set<Object> triggers = new HashSet<>(); for (String trigger : annotation.triggeredByValue()) { triggers.add(ConfigValueExtractor.get().extract(dependsOnDef.getConfigField(), dependsOnDef.getType(), trigger, contextMsg, true)); } dependencyMap.get(def.getName()).put(dependsOnDef.getName(), triggers); } // Add direct dependencies to dependencyMap if (!def.getDependsOnMap().isEmpty()) { // Copy same as above. for (Map.Entry<String, List<Object>> dependsOn : def.getDependsOnMap().entrySet()) { dependsOnKey = dependsOn.getKey(); if (!StringUtils.isEmpty(dependsOnKey)) { verifyDependencyExists(definitionsMap, def, dependsOnKey, contextMsg); Set<Object> triggers = new HashSet<>(); ConfigDefinition dependsOnDef = definitionsMap.get(dependsOnKey); for (Object trigger : dependsOn.getValue()) { triggers.add(ConfigValueExtractor.get().extract(dependsOnDef.getConfigField(), dependsOnDef.getType(), (String) trigger, contextMsg, true)); } Map<String, Set<Object>> dependencies = dependencyMap.get(def.getName()); if (dependencies.containsKey(dependsOnKey)) { dependencies.get(dependsOnKey).addAll(triggers); } else { dependencies.put(dependsOnKey, triggers); } } } } } for (ConfigDefinition def : defs) { if (isFullyProcessed.get(def.getName())) { continue; } // Now find all indirect dependencies Deque<StackNode> stack = new ArrayDeque<>(); stack.push(new StackNode(def, new LinkedHashSet<String>())); while (!stack.isEmpty()) { StackNode current = stack.peek(); // We processed this one's dependencies before, don't bother adding its children // The dependencies of this one have all been processed if (current.childrenAddedToStack) { stack.pop(); Map<String, Set<Object>> currentDependencies = dependencyMap.get(current.def.getName()); Set<String> children = new HashSet<>(current.def.getDependsOnMap().keySet()); for (String child : children) { if (StringUtils.isEmpty(child)) { continue; } Map<String, Set<Object>> depsOfChild = dependencyMap.get(child); for (Map.Entry<String, Set<Object>> depOfChild : depsOfChild.entrySet()) { if (currentDependencies.containsKey(depOfChild.getKey())) { // Add only the common trigger values, // since it has to be one of those for both these to be triggered. Set<Object> currentTriggers = currentDependencies.get(depOfChild.getKey()); Set<Object> childTriggers = depOfChild.getValue(); currentDependencies.put(depOfChild.getKey(), Sets.intersection(currentTriggers, childTriggers)); } else { currentDependencies.put(depOfChild.getKey(), new HashSet<>(depOfChild.getValue())); } } } isFullyProcessed.put(current.def.getName(), true); } else { Set<String> children = current.def.getDependsOnMap().keySet(); String dependsOn = current.def.getDependsOn(); LinkedHashSet<String> dependencyAncestors = new LinkedHashSet<>(current.ancestors); dependencyAncestors.add(current.def.getName()); if (!StringUtils.isEmpty(dependsOn) && !isFullyProcessed.get(current.def.getDependsOn()) && !detectCycle(dependencyAncestors, cycles, dependsOn)) { stack.push( new StackNode(definitionsMap.get(current.def.getDependsOn()), dependencyAncestors)); } for (String child : children) { if (!StringUtils.isEmpty(child) && !isFullyProcessed.get(child) && !detectCycle(dependencyAncestors, cycles, child)) { stack.push(new StackNode(definitionsMap.get(child), dependencyAncestors)); } } current.childrenAddedToStack = true; } } } Preconditions.checkState(cycles.isEmpty(), "The following cycles were detected in the configuration dependencies:\n" + Joiner.on("\n").join(cycles)); for (Map.Entry<String, Map<String, Set<Object>>> entry : dependencyMap.entrySet()) { Map<String, List<Object>> dependencies = new HashMap<>(); definitionsMap.get(entry.getKey()).setDependsOnMap(dependencies); for (Map.Entry<String, Set<Object>> trigger : entry.getValue().entrySet()) { List<Object> triggerValues = new ArrayList<>(); triggerValues.addAll(trigger.getValue()); dependencies.put(trigger.getKey(), triggerValues); } definitionsMap.get(entry.getKey()).setDependsOn(""); } }
From source file:net.sf.jasperreports.engine.json.expression.member.evaluation.ObjectKeyExpressionEvaluator.java
private List<JRJsonNode> goAnywhereDown(JRJsonNode jrJsonNode) { if (log.isDebugEnabled()) { log.debug("going " + MemberExpression.DIRECTION.ANYWHERE_DOWN + " by " + (expression.isWildcard() ? "wildcard" : "key: [" + expression.getObjectKey() + "]") + " on " + jrJsonNode.getDataNode()); }/*ww w . j a va 2 s . c o m*/ List<JRJsonNode> result = new ArrayList<>(); Deque<JRJsonNode> stack = new ArrayDeque<>(); JsonNode initialDataNode = jrJsonNode.getDataNode(); if (log.isDebugEnabled()) { log.debug("initial stack population with: " + initialDataNode); } // populate the stack initially if (initialDataNode.isArray()) { for (JsonNode deeper : initialDataNode) { stack.addLast(jrJsonNode.createChild(deeper)); } } else { stack.push(jrJsonNode); } while (!stack.isEmpty()) { JRJsonNode stackNode = stack.pop(); JsonNode stackDataNode = stackNode.getDataNode(); addChildrenToStack(stackNode, stack); if (log.isDebugEnabled()) { log.debug("processing stack element: " + stackDataNode); } // process the current stack item if (stackDataNode.isObject()) { if (log.isDebugEnabled()) { log.debug("stack element is object; wildcard: " + expression.isWildcard()); } // if wildcard => only filter the parent; we already added the object keys to the stack if (expression.isWildcard()) { if (applyFilter(stackNode)) { result.add(stackNode); } } // else go down and filter else { JRJsonNode deeperNode = goDeeperIntoObjectNode(stackNode, false); if (deeperNode != null) { result.add(deeperNode); } } } else if (stackDataNode.isValueNode() || stackDataNode.isArray()) { if (log.isDebugEnabled()) { log.debug("stack element is " + (stackDataNode.isValueNode() ? "value node" : "array") + "; wildcard: " + expression.isWildcard()); } if (expression.isWildcard()) { if (applyFilter(stackNode)) { result.add(stackNode); } } } } return result; }
From source file:org.molasdin.wbase.xml.parser.light.basic.BasicParser.java
@Override public Element parse(String value) throws ParserException { Deque<BasicElement> elements = new LinkedList<BasicElement>(); int index = 0; boolean leftAngleFound = false; int leftAngleIndex = 0; boolean possibleCloseTag = false; int closeSlashIndex = 0; BasicElement rootElement = new BasicElement(); rootElement.setValid(true);// www.j a v a 2 s .co m elements.push(rootElement); while (index < value.length()) { if (value.charAt(index) == '<') { //if '<' is before '<' if (leftAngleFound) { //treat it as '<' symbol //build string from the first '<' till the current String entry = value.substring(leftAngleIndex, index); appendText(entry, elements.peekFirst()); invokeHandlers(value, leftAngleIndex, ErrorType.LESS_FOUND, ""); } leftAngleFound = true; leftAngleIndex = index; possibleCloseTag = false; } else if (value.charAt(index) == '/') { //if '<' has been found if (leftAngleFound) { //slash may be in closing tag closeSlashIndex = index; possibleCloseTag = true; } else { appendText("/", elements.peekFirst()); } } else if (value.charAt(index) == '>') { //if '>' without '<' before if (!leftAngleFound) { //treat '>' as symbol appendText(">", elements.peekFirst()); invokeHandlers(value, index, ErrorType.GREATER_FOUND, ""); } else { leftAngleFound = false; BasicElement elem = elements.peekFirst(); //check if it is a closing tag if (possibleCloseTag && isEmptyRange(value, leftAngleIndex + 1, closeSlashIndex)) { String tag = StringUtils.trim(value.substring(leftAngleIndex + 2, index)); //if tag is most possible closing if (!elem.isValid()) { //check first opening elem = elements.pop(); if (!elem.tagName().equals(tag)) { BasicElement tmp = elem; elem = elements.pop(); //check outer opening if (!elem.tagName().equals(tag)) { throw new BadClosingTag(elem.tagName(), tag); } invokeHandlers(value, -1, ErrorType.NO_CLOSING, tmp.tagName()); elem.consumeContent(tmp); elem.setValid(true); } } else { //closing tag without opening throw new BadClosingTag("", tag); } elem.setClosed(true); elem.setValid(true); elem.setValidClosing(true); } else { //tag is most possible opening or self closing int rightOffset = index; if (possibleCloseTag) { //check if tag is closing but with characters between "<" and "/" if (!elem.isValid()) { String possibleTag = value.substring(closeSlashIndex + 1, index); if (elem.tagName().equals(possibleTag)) { throw new CharactersInClosing(leftAngleIndex); } } //check if "/" is in attributes if (value.substring(closeSlashIndex + 1, rightOffset).trim().length() == 0) { rightOffset = closeSlashIndex; } else { //tag is no closing possibleCloseTag = false; } } //possible start tag String tagName = value.substring(leftAngleIndex + 1, rightOffset); //if no tag but '<>' if (tagName.length() == 0) { //add them to characters String entry = value.substring(leftAngleIndex, index + 1); appendText(entry, elem); invokeHandlers(value, leftAngleIndex, ErrorType.EMPTY_TAG_FOUND, entry); } else { Pair<String, List<Pair<String, String>>> tag = extractTag(tagName); if (tag == null || tag.getLeft() == null) { invokeHandlers(value, leftAngleIndex, ErrorType.INVALID_TEXT_IN_TAG_NAME, String.valueOf(index)); String entry = value.substring(leftAngleIndex, index + 1); appendText(entry, elements.peekFirst()); } else { tagName = tag.getLeft(); //if tag is allowed if (!predicate.evaluate(tagName)) { throw new DisallowedTagFound(tagName); } //add new element with this tag BasicElement newElem = new BasicElement(); newElem.setTagName(tagName); newElem.setAttributes(tag.getRight()); elements.peekFirst().addChild(newElem); if (possibleCloseTag) { newElem.setClosed(true); newElem.setShortenIfEmpty(true); newElem.setValid(true); newElem.setValidClosing(true); } else { elements.push(newElem); } } } } possibleCloseTag = false; } } else if (!leftAngleFound) { //characters block BasicElement elem = elements.peekFirst(); //if other elements exist between tag characters parts elem.addCharacter(value.charAt(index)); /* if (elementTextBreak) { elementTextBreak = false; elem.addCharacter(value.charAt(index)); } else { elem.lastCharacters().append(value.charAt(index)); }*/ } index++; } //if last '<' has not been closed if (leftAngleFound) { //treat it as symbol appendText(value.substring(leftAngleIndex, value.length()), elements.peekFirst()); invokeHandlers(value, leftAngleIndex, ErrorType.LESS_FOUND, ""); } //find unclosed elements if (elements.size() > 1) { for (BasicElement elem : elements) { if (elem == rootElement) { continue; } if (!elem.isClosed() && !elem.isValid()) { invokeHandlers(value, -1, ErrorType.NO_CLOSING, elem.tagName()); ((BasicElement) elem.parent()).consumeContent(elem); } } } return rootElement; }
From source file:RandomChooser.java
private RandomChooser(List<Double> weights, List<T> events, Random random) { double sum = 0.0; for (double prob : weights) sum += prob;//from ww w . ja v a2 s . com this.probs = new double[weights.size()]; for (int i = 0; i < weights.size(); i++) { probs[i] = weights.get(i) * weights.size() / sum; //average = 1.0 } Deque<Integer> smaller = new ArrayDeque<Integer>(weights.size() / 2 + 2); Deque<Integer> greater = new ArrayDeque<Integer>(weights.size() / 2 + 2); for (int i = 0; i < probs.length; i++) { if (probs[i] < 1.0) { smaller.push(i); } else { greater.push(i); } } indexes = new int[weights.size()]; while (!smaller.isEmpty()) { Integer i = smaller.pop(); Integer k = greater.peek(); indexes[i] = k; probs[k] -= (1 - probs[i]); if (probs[k] < 1.0) { greater.pop(); if (greater.isEmpty()) break; smaller.push(k); } } this.events = events; this.random = random; }
From source file:com.grepcurl.random.ObjectGenerator.java
@SuppressWarnings("unused") public <T> T generate(Class<T> klass, Object... constructorArgs) { Validate.notNull(klass);/*from ww w . j a v a 2 s .c o m*/ Validate.notNull(constructorArgs); if (verbose) { log(String.format("generating object of type: %s, with args: %s", klass, Arrays.toString(constructorArgs))); } try { Deque<Object> objectStack = new ArrayDeque<>(); Class[] constructorTypes = _toClasses(constructorArgs); T t; if (klass.isEnum()) { int randomOrdinal = randomInt(0, klass.getEnumConstants().length - 1); t = klass.getEnumConstants()[randomOrdinal]; } else { t = klass.getConstructor(constructorTypes).newInstance(constructorArgs); } objectStack.push(t); Method[] methods = klass.getMethods(); for (Method method : methods) { _processMethod(method, new SetterOverrides(), t, objectStack); } objectStack.pop(); return t; } catch (Exception e) { throw new FailedRandomObjectGenerationException(e); } }
From source file:org.lilyproject.repository.impl.AbstractTypeManager.java
private void collectSubTypes(SchemaId recordTypeId, Set<SchemaId> result, Deque<SchemaId> parents, boolean recursive) throws InterruptedException { // the parent-stack is to protect against endless loops in the type hierarchy. If a type is a subtype // of itself, it will not be included in the result. Thus if record type A extends (directly or indirectly) // from A, and we search the subtypes of A, then the resulting set will not include A. parents.push(recordTypeId); Set<SchemaId> subtypes = schemaCache.findDirectSubTypes(recordTypeId); for (SchemaId subtype : subtypes) { if (!parents.contains(subtype)) { result.add(subtype);// w ww . j a v a 2 s . c o m if (recursive) { collectSubTypes(subtype, result, parents, recursive); } } else { // Loop detected in type hierarchy, log a warning about this log.warn(formatSupertypeLoopError(subtype, parents)); } } parents.pop(); }
From source file:edu.byu.nlp.al.EmpiricalAnnotationInstanceManager.java
private void prioritizeMeasurements(Deque<FlatInstance<D, L>> measurementDeque, boolean prioritizeLabelProportions) { if (prioritizeLabelProportions) { // get a list of all the labeled proportion measurements ArrayList<FlatInstance<D, L>> proportions = Lists.newArrayList(); for (FlatInstance<D, L> inst : measurementDeque) { Measurement meas = inst.getMeasurement(); if (meas instanceof BasicClassificationLabelProportionMeasurement) { proportions.add(inst);/*from w w w . ja v a2s . c o m*/ } } // move them to the front of the line measurementDeque.removeAll(proportions); for (FlatInstance<D, L> prop : proportions) { measurementDeque.push(prop); } } }
From source file:de.escalon.hypermedia.spring.hydra.PagedResourcesSerializer.java
protected void serializeContext(Object bean, JsonGenerator jgen, SerializerProvider serializerProvider, Deque<LdContext> contextStack) throws IOException { // TODO: this code is duplicated from JacksonHydraSerializer, see there for considerations if (proxyUnwrapper != null) { bean = proxyUnwrapper.unwrapProxy(bean); }//from w ww .ja v a 2 s . c o m MixinSource mixinSource = new JacksonMixinSource(serializerProvider.getConfig()); final Class<?> mixInClass = mixinSource.findMixInClassFor(bean.getClass()); final LdContext parentContext = contextStack.peek(); LdContext currentContext = new LdContext(parentContext, ldContextFactory.getVocab(mixinSource, bean, mixInClass), ldContextFactory.getTerms(mixinSource, bean, mixInClass)); contextStack.push(currentContext); // check if we need to write a context for the current bean at all // If it is in the same vocab: no context // If the terms are already defined in the context: no context boolean mustWriteContext; if (parentContext == null || !parentContext.contains(currentContext)) { mustWriteContext = true; } else { mustWriteContext = false; } if (mustWriteContext) { // begin context // default context: schema.org vocab or vocab package annotation jgen.writeObjectFieldStart("@context"); // do not repeat vocab if already defined in current context if (parentContext == null || parentContext.vocab == null || (currentContext.vocab != null && !currentContext.vocab.equals(parentContext.vocab))) { jgen.writeStringField(JsonLdKeywords.AT_VOCAB, currentContext.vocab); } for (Map.Entry<String, Object> termEntry : currentContext.terms.entrySet()) { if (termEntry.getValue() instanceof String) { jgen.writeStringField(termEntry.getKey(), termEntry.getValue().toString()); } else { jgen.writeObjectField(termEntry.getKey(), termEntry.getValue()); } } jgen.writeEndObject(); // end context } }