Example usage for java.util Stack push

List of usage examples for java.util Stack push

Introduction

In this page you can find the example usage for java.util Stack push.

Prototype

public E push(E item) 

Source Link

Document

Pushes an item onto the top of this stack.

Usage

From source file:org.apache.qpid.server.security.access.config.PlainConfiguration.java

@Override
public RuleSet load() {
    RuleSet ruleSet = super.load();

    File file = getFile();//from w w w  .  j  ava  2s. c  o  m
    FileReader fileReader = null;

    try {
        if (_logger.isDebugEnabled()) {
            _logger.debug("About to load ACL file " + file);
        }

        fileReader = new FileReader(file);
        _st = new StreamTokenizer(new BufferedReader(fileReader));
        _st.resetSyntax(); // setup the tokenizer

        _st.commentChar(COMMENT); // single line comments
        _st.eolIsSignificant(true); // return EOL as a token
        _st.ordinaryChar('='); // equals is a token
        _st.ordinaryChar(CONTINUATION); // continuation character (when followed by EOL)
        _st.quoteChar('"'); // double quote
        _st.quoteChar('\''); // single quote
        _st.whitespaceChars('\u0000', '\u0020'); // whitespace (to be ignored) TODO properly
        _st.wordChars('a', 'z'); // unquoted token characters [a-z]
        _st.wordChars('A', 'Z'); // [A-Z]
        _st.wordChars('0', '9'); // [0-9]
        _st.wordChars('_', '_'); // underscore
        _st.wordChars('-', '-'); // dash
        _st.wordChars('.', '.'); // dot
        _st.wordChars('*', '*'); // star
        _st.wordChars('@', '@'); // at
        _st.wordChars(':', ':'); // colon

        // parse the acl file lines
        Stack<String> stack = new Stack<String>();
        int current;
        do {
            current = _st.nextToken();
            switch (current) {
            case StreamTokenizer.TT_EOF:
            case StreamTokenizer.TT_EOL:
                if (stack.isEmpty()) {
                    break; // blank line
                }

                // pull out the first token from the bottom of the stack and check arguments exist
                String first = stack.firstElement();
                stack.removeElementAt(0);
                if (stack.isEmpty()) {
                    throw new IllegalConfigurationException(String.format(NOT_ENOUGH_TOKENS_MSG, getLine()));
                }

                // check for and parse optional initial number for ACL lines
                Integer number = null;
                if (StringUtils.isNumeric(first)) {
                    // set the acl number and get the next element
                    number = Integer.valueOf(first);
                    first = stack.firstElement();
                    stack.removeElementAt(0);
                }

                if (StringUtils.equalsIgnoreCase(ACL, first)) {
                    parseAcl(number, stack);
                } else if (number == null) {
                    if (StringUtils.equalsIgnoreCase("GROUP", first)) {
                        throw new IllegalConfigurationException(String.format(
                                "GROUP keyword not supported. Groups should defined via a Group Provider, not in the ACL file.",
                                getLine()));
                    } else if (StringUtils.equalsIgnoreCase(CONFIG, first)) {
                        parseConfig(stack);
                    } else {
                        throw new IllegalConfigurationException(
                                String.format(UNRECOGNISED_INITIAL_MSG, first, getLine()));
                    }
                } else {
                    throw new IllegalConfigurationException(
                            String.format(NUMBER_NOT_ALLOWED_MSG, first, getLine()));
                }

                // reset stack, start next line
                stack.clear();
                break;
            case StreamTokenizer.TT_NUMBER:
                stack.push(Integer.toString(Double.valueOf(_st.nval).intValue()));
                break;
            case StreamTokenizer.TT_WORD:
                stack.push(_st.sval); // token
                break;
            default:
                if (_st.ttype == CONTINUATION) {
                    int next = _st.nextToken();
                    if (next == StreamTokenizer.TT_EOL) {
                        break; // continue reading next line
                    }

                    // invalid location for continuation character (add one to line beacuse we ate the EOL)
                    throw new IllegalConfigurationException(
                            String.format(PREMATURE_CONTINUATION_MSG, getLine() + 1));
                } else if (_st.ttype == '\'' || _st.ttype == '"') {
                    stack.push(_st.sval); // quoted token
                } else {
                    stack.push(Character.toString((char) _st.ttype)); // single character
                }
            }
        } while (current != StreamTokenizer.TT_EOF);

        if (!stack.isEmpty()) {
            throw new IllegalConfigurationException(String.format(PREMATURE_EOF_MSG, getLine()));
        }
    } catch (IllegalArgumentException iae) {
        throw new IllegalConfigurationException(String.format(PARSE_TOKEN_FAILED_MSG, getLine()), iae);
    } catch (FileNotFoundException fnfe) {
        throw new IllegalConfigurationException(String.format(CONFIG_NOT_FOUND_MSG, file.getName()), fnfe);
    } catch (IOException ioe) {
        throw new IllegalConfigurationException(String.format(CANNOT_LOAD_MSG, file.getName()), ioe);
    } finally {
        if (fileReader != null) {
            try {
                fileReader.close();
            } catch (IOException e) {
                throw new IllegalConfigurationException(String.format(CANNOT_CLOSE_MSG, file.getName()), e);
            }
        }
    }

    return ruleSet;
}

From source file:gdt.data.store.Entigrator.java

/**
 * List components of the entity /*w  ww .j a v  a2  s.  c  o  m*/
 *  @param entity the entity.
 * @return the array of keys of components.
 */
public String[] ent_listComponents(Sack entity) {
    try {
        if (entity == null)
            return null;
        Core[] ca = entity.elementGet("component");
        if (ca == null)
            return null;
        Stack<String> s = new Stack<String>();
        for (Core aCa : ca) {
            if (entity.getKey().equals(aCa.value))
                continue;
            s.push(aCa.value);
        }
        int cnt = s.size();
        if (cnt < 1)
            return null;
        String[] sa = new String[cnt];
        for (int i = 0; i < cnt; i++)
            sa[i] = s.pop();
        return sa;
    } catch (Exception e) {
        Logger.getLogger(getClass().getName()).severe(e.toString());
        return null;
    }
}

From source file:org.apache.tajo.engine.planner.LogicalPlanner.java

@Override
public LogicalNode visitProjection(PlanContext context, Stack<Expr> stack, Projection projection)
        throws PlanningException {

    LogicalPlan plan = context.plan;/*  w ww. ja  v  a2  s  .co m*/
    QueryBlock block = context.queryBlock;

    // If a non-from statement is given
    if (!projection.hasChild()) {
        return buildPlanForNoneFromStatement(context, stack, projection);
    }

    String[] referenceNames;
    // in prephase, insert all target list into NamedExprManagers.
    // Then it gets reference names, each of which points an expression in target list.
    Pair<String[], ExprNormalizer.WindowSpecReferences[]> referencesPair = doProjectionPrephase(context,
            projection);
    referenceNames = referencesPair.getFirst();

    ////////////////////////////////////////////////////////
    // Visit and Build Child Plan
    ////////////////////////////////////////////////////////
    stack.push(projection);
    LogicalNode child = visit(context, stack, projection.getChild());

    // check if it is implicit aggregation. If so, it inserts group-by node to its child.
    if (block.isAggregationRequired()) {
        child = insertGroupbyNode(context, child, stack);
    }

    if (block.hasWindowSpecs()) {
        LogicalNode windowAggNode = insertWindowAggNode(context, child, stack, referenceNames,
                referencesPair.getSecond());
        if (windowAggNode != null) {
            child = windowAggNode;
        }
    }
    stack.pop();
    ////////////////////////////////////////////////////////

    ProjectionNode projectionNode;
    Target[] targets;
    targets = buildTargets(context, referenceNames);

    // Set ProjectionNode
    projectionNode = context.queryBlock.getNodeFromExpr(projection);
    projectionNode.setInSchema(child.getOutSchema());
    projectionNode.setTargets(targets);
    projectionNode.setChild(child);

    if (projection.isDistinct() && block.hasNode(NodeType.GROUP_BY)) {
        throw new VerifyException("Cannot support grouping and distinct at the same time yet");
    } else {
        if (projection.isDistinct()) {
            insertDistinctOperator(context, projectionNode, child, stack);
        }
    }

    // It's for debugging and unit tests purpose.
    // It sets raw targets, all of them are raw expressions instead of references.
    if (context.debugOrUnitTests) {
        setRawTargets(context, targets, referenceNames, projection);
    }

    verifyProjectedFields(block, projectionNode);
    return projectionNode;
}

From source file:com.sqewd.open.dal.core.persistence.db.EntityHelper.java

@SuppressWarnings({ "rawtypes", "unchecked" })
public static Object getColumnValue(final ResultSet rs, final StructAttributeReflect attr,
        final AbstractEntity entity, final AbstractJoinGraph gr, final Stack<KeyValuePair<Class<?>>> path)
        throws Exception {

    Object value = null;//w  w  w  . j  a  v  a 2  s .c  o m

    KeyValuePair<String> alias = gr.getAliasFor(path, attr.Column, 0);
    String tabprefix = alias.getKey();

    if (EnumPrimitives.isPrimitiveType(attr.Field.getType())) {
        EnumPrimitives prim = EnumPrimitives.type(attr.Field.getType());
        switch (prim) {
        case ECharacter:
            String sv = rs.getString(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), sv.charAt(0));
            }
            break;
        case EShort:
            short shv = rs.getShort(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), shv);
            }
            break;
        case EInteger:
            int iv = rs.getInt(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), iv);
            }
            break;
        case ELong:
            long lv = rs.getLong(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), lv);
            }
            break;
        case EFloat:
            float fv = rs.getFloat(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), fv);
            }
            break;
        case EDouble:
            double dv = rs.getDouble(tabprefix + "." + attr.Column);
            if (!rs.wasNull()) {
                PropertyUtils.setSimpleProperty(entity, attr.Field.getName(), dv);
            }
            break;
        default:
            throw new Exception("Unsupported Data type [" + prim.name() + "]");
        }
    } else if (attr.Convertor != null) {
        // TODO : Not supported at this time.
        value = rs.getString(tabprefix + "." + attr.Column);

    } else if (attr.Field.getType().equals(String.class)) {
        value = rs.getString(tabprefix + "." + attr.Column);
        if (rs.wasNull()) {
            value = null;
        }
    } else if (attr.Field.getType().equals(Date.class)) {
        long lvalue = rs.getLong(tabprefix + "." + attr.Column);
        if (!rs.wasNull()) {
            Date dt = new Date(lvalue);
            value = dt;
        }
    } else if (attr.Field.getType().isEnum()) {
        String svalue = rs.getString(tabprefix + "." + attr.Column);
        if (!rs.wasNull()) {
            Class ecls = attr.Field.getType();
            value = Enum.valueOf(ecls, svalue);
        }
    } else if (attr.Reference != null) {
        Class<?> rt = Class.forName(attr.Reference.Class);
        Object obj = rt.newInstance();
        if (!(obj instanceof AbstractEntity))
            throw new Exception("Unsupported Entity type [" + rt.getCanonicalName() + "]");
        AbstractEntity rentity = (AbstractEntity) obj;
        if (path.size() > 0) {
            path.peek().setKey(attr.Column);
        }

        KeyValuePair<Class<?>> cls = new KeyValuePair<Class<?>>();
        cls.setValue(rentity.getClass());
        path.push(cls);
        setEntity(rentity, rs, gr, path);
        value = rentity;
        path.pop();
    }
    return value;
}

From source file:ca.weblite.xmlvm.XMLVM.java

/**
 * Loads all of the dependencies associated with a collection of input
 * files. Dependencies will be loaded as stub files, so that XMLVM can be
 * run without producing a full transitive dependency chain.
 * @deprecated//from w w w . j  a  va2  s .c o  m
 * @param dir The directory containing the current .xmlvm files to be
 * converted. Dependent stub files will be copied to this directory by this
 * method.
 * @param files Collection of .xmlvm files to be parsed for dependencies. It
 * is assumed that these files are already located inside the "dir"
 * directory.
 * @throws ParserConfigurationException
 * @throws SAXException
 * @throws IOException
 * @throws FileNotFoundException
 * @throws UnsatisfiedDependencyException If a dependency cannot be found in
 * the cache. If you receive this exception, you should probably just run a
 * recursive dependent build with XMLVM to generate the cache.
 * @throws TransformerException
 */
public void loadDependencies(File dir, Collection<File> files) throws ParserConfigurationException,
        SAXException, IOException, FileNotFoundException, UnsatisfiedDependencyException, TransformerException {
    Set<String> loadedClasses = new HashSet<String>();

    // First find out which classes are already loaded.
    for (File f : dir.listFiles()) {
        if (f.getName().endsWith(".xmlvm")) {
            ClassFile cf = getClassFile(f.getParentFile(), f);
            if (cf == null) {
                throw new RuntimeException("Failed to get class file for " + f);
            }
            loadedClasses.add(cf.getName());
        }
    }

    System.out.println(loadedClasses.size() + " classes loaded.  Looking for dependencies...");

    // Now go through each of the loaded classes, and find dependencies.
    Set<File> newFiles = new HashSet<File>();
    File xmlvmstubDir = getXmlvmCacheDir("xmlvmstubs");
    File xmlvmDir = getXmlvmCacheDir("xmlvm");
    Stack<File> stack = new Stack<File>();
    stack.addAll(files);
    while (!stack.isEmpty()) {
        File f = stack.pop();
        String[] deps = getRequiredClasses(f, true);
        System.out.println(deps.length + " required classes in " + f.getName());
        for (String dep : deps) {
            if (!loadedClasses.contains(dep)) {
                // Check for cached stub
                System.out.println("Unloaded dependency found " + dep + ". Looking for cached version...");
                ClassFile cf = new ClassFile(dep);
                String fileName = cf.getcPrefix() + ".xmlvm";
                File cachedStubFile = new File(xmlvmstubDir, fileName);
                File cachedFullFile = new File(xmlvmDir, fileName);
                if (cachedStubFile.exists()) {
                    File destStubFile = new File(dir, fileName);
                    FileUtils.copyFile(cachedStubFile, destStubFile);
                    stack.push(destStubFile);
                    loadedClasses.add(cf.getName());

                } else if (cachedFullFile.exists()) {
                    File tempStubFile = new File(xmlvmstubDir, fileName);
                    createXMLVMClassStub(cachedFullFile, tempStubFile);
                    File destFile = new File(dir, fileName);
                    FileUtils.copyFile(tempStubFile, destFile);
                    stack.push(destFile);
                    loadedClasses.add(cf.getName());
                } else {

                    // At this point, we'll just give up this foolish mission
                    // If the caller receives this exception, it should
                    // just run xmlvm with transitive dependency loading.
                    throw new UnsatisfiedDependencyException(dep);
                }
            }
        }

    }

}

From source file:com.espertech.esper.epl.join.plan.NStreamOuterQueryPlanBuilder.java

/**
 * Recusivly builds a substream-per-stream ordered tree graph using the
 * join information supplied for outer joins and from the query graph (where clause).
 * <p>//from   ww  w.j  av a 2 s  . c  om
 * Required streams are considered first and their lookup is placed first in the list
 * to gain performance.
 * @param streamNum is the root stream number that supplies the incoming event to build the tree for
 * @param queryGraph contains where-clause stream relationship info
 * @param outerInnerGraph contains the outer join stream relationship info
 * @param completedStreams is a temporary holder for streams already considered
 * @param substreamsPerStream is the ordered, tree-like structure to be filled
 * @param requiredPerStream indicates which streams are required and which are optional
 * @param streamCallStack the query plan call stack of streams available via cursor
 * @param dependencyGraph - dependencies between historical streams
 * @throws ExprValidationException if the query planning failed
 */
protected static void recursiveBuild(int streamNum, Stack<Integer> streamCallStack, QueryGraph queryGraph,
        OuterInnerDirectionalGraph outerInnerGraph, InnerJoinGraph innerJoinGraph,
        Set<Integer> completedStreams, LinkedHashMap<Integer, int[]> substreamsPerStream,
        boolean[] requiredPerStream, DependencyGraph dependencyGraph) throws ExprValidationException {
    // add this stream to the set of completed streams
    completedStreams.add(streamNum);

    // check if the dependencies have been satisfied
    if (dependencyGraph.hasDependency(streamNum)) {
        Set<Integer> dependencies = dependencyGraph.getDependenciesForStream(streamNum);
        for (Integer dependentStream : dependencies) {
            if (!streamCallStack.contains(dependentStream)) {
                throw new ExprValidationException(
                        "Historical stream " + streamNum + " parameter dependency originating in stream "
                                + dependentStream + " cannot or may not be satisfied by the join");
            }
        }
    }

    // Determine the streams we can navigate to from this stream
    Set<Integer> navigableStreams = queryGraph.getNavigableStreams(streamNum);

    // add unqualified navigable streams (since on-expressions in outer joins are optional)
    Set<Integer> unqualifiedNavigable = outerInnerGraph.getUnqualifiedNavigableStreams().get(streamNum);
    if (unqualifiedNavigable != null) {
        navigableStreams.addAll(unqualifiedNavigable);
    }

    // remove those already done
    navigableStreams.removeAll(completedStreams);

    // Which streams are inner streams to this stream (optional), which ones are outer to the stream (required)
    Set<Integer> requiredStreams = getOuterStreams(streamNum, navigableStreams, outerInnerGraph);

    // Add inner joins, if any, unless already completed for this stream
    innerJoinGraph.addRequiredStreams(streamNum, requiredStreams, completedStreams);

    Set<Integer> optionalStreams = getInnerStreams(streamNum, navigableStreams, outerInnerGraph, innerJoinGraph,
            completedStreams);

    // Remove from the required streams the optional streams which places 'full' joined streams
    // into the optional stream category
    requiredStreams.removeAll(optionalStreams);

    // if we are a leaf node, we are done
    if (navigableStreams.isEmpty()) {
        substreamsPerStream.put(streamNum, new int[0]);
        return;
    }

    // First the outer (required) streams to this stream, then the inner (optional) streams
    int[] substreams = new int[requiredStreams.size() + optionalStreams.size()];
    substreamsPerStream.put(streamNum, substreams);
    int count = 0;
    for (int stream : requiredStreams) {
        substreams[count++] = stream;
        requiredPerStream[stream] = true;
    }
    for (int stream : optionalStreams) {
        substreams[count++] = stream;
    }

    // next we look at all the required streams and add their dependent streams
    for (int stream : requiredStreams) {
        completedStreams.add(stream);
    }

    for (int stream : requiredStreams) {
        streamCallStack.push(stream);
        recursiveBuild(stream, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams,
                substreamsPerStream, requiredPerStream, dependencyGraph);
        streamCallStack.pop();
    }
    // look at all the optional streams and add their dependent streams
    for (int stream : optionalStreams) {
        streamCallStack.push(stream);
        recursiveBuild(stream, streamCallStack, queryGraph, outerInnerGraph, innerJoinGraph, completedStreams,
                substreamsPerStream, requiredPerStream, dependencyGraph);
        streamCallStack.pop();
    }
}

From source file:org.apache.rya.rdftriplestore.inference.InferenceEngine.java

/**
 * Queries domain and range information, then populates the inference engine with direct
 * domain/range relations and any that can be inferred from the subclass graph, subproperty
 * graph, and inverse property map. Should be called after that class and property information
 * has been refreshed./*  w w  w .ja va2  s  .c o  m*/
 *
 * Computes indirect domain/range:
 *  - If p1 has domain c, and p2 is a subproperty of p1, then p2 also has domain c.
 *  - If p1 has range c, and p2 is a subproperty of p1, then p2 also has range c.
 *  - If p1 has domain c, and p2 is the inverse of p1, then p2 has range c.
 *  - If p1 has range c, and p2 is the inverse of p1, then p2 has domain c.
 *  - If p has domain c1, and c1 is a subclass of c2, then p also has domain c2.
 *  - If p has range c1, and c1 is a subclass of c2, then p also has range c2.
 * @throws QueryEvaluationException
 */
private void refreshDomainRange() throws QueryEvaluationException {
    final Map<URI, Set<URI>> domainByTypePartial = new ConcurrentHashMap<>();
    final Map<URI, Set<URI>> rangeByTypePartial = new ConcurrentHashMap<>();
    // First, populate domain and range based on direct domain/range triples.
    CloseableIteration<Statement, QueryEvaluationException> iter = RyaDAOHelper.query(ryaDAO, null, RDFS.DOMAIN,
            null, conf);
    try {
        while (iter.hasNext()) {
            final Statement st = iter.next();
            final Resource property = st.getSubject();
            final Value domainType = st.getObject();
            if (domainType instanceof URI && property instanceof URI) {
                if (!domainByTypePartial.containsKey(domainType)) {
                    domainByTypePartial.put((URI) domainType, new HashSet<>());
                }
                domainByTypePartial.get(domainType).add((URI) property);
            }
        }
    } finally {
        if (iter != null) {
            iter.close();
        }
    }
    iter = RyaDAOHelper.query(ryaDAO, null, RDFS.RANGE, null, conf);
    try {
        while (iter.hasNext()) {
            final Statement st = iter.next();
            final Resource property = st.getSubject();
            final Value rangeType = st.getObject();
            if (rangeType instanceof URI && property instanceof URI) {
                if (!rangeByTypePartial.containsKey(rangeType)) {
                    rangeByTypePartial.put((URI) rangeType, new HashSet<>());
                }
                rangeByTypePartial.get(rangeType).add((URI) property);
            }
        }
    } finally {
        if (iter != null) {
            iter.close();
        }
    }
    // Then combine with the subclass/subproperty graphs and the inverse property map to compute
    // the closure of domain and range per class.
    final Set<URI> domainRangeTypeSet = new HashSet<>(domainByTypePartial.keySet());
    domainRangeTypeSet.addAll(rangeByTypePartial.keySet());
    // Extend to subproperties: make sure that using a more specific form of a property
    // still triggers its domain/range inferences.
    // Mirror for inverse properties: make sure that using the inverse form of a property
    // triggers the inverse domain/range inferences.
    // These two rules can recursively trigger one another.
    for (final URI domainRangeType : domainRangeTypeSet) {
        final Set<URI> propertiesWithDomain = domainByTypePartial.getOrDefault(domainRangeType,
                new HashSet<>());
        final Set<URI> propertiesWithRange = rangeByTypePartial.getOrDefault(domainRangeType, new HashSet<>());
        // Since findParents will traverse the subproperty graph and find all indirect
        // subproperties, the subproperty rule does not need to trigger itself directly.
        // And since no more than one inverseOf relationship is stored for any property, the
        // inverse property rule does not need to trigger itself directly. However, each rule
        // can trigger the other, so keep track of how the inferred domains/ranges were
        // discovered so we can apply only those rules that might yield new information.
        final Stack<URI> domainViaSuperProperty = new Stack<>();
        final Stack<URI> rangeViaSuperProperty = new Stack<>();
        final Stack<URI> domainViaInverseProperty = new Stack<>();
        final Stack<URI> rangeViaInverseProperty = new Stack<>();
        // Start with the direct domain/range assertions, which can trigger any rule.
        domainViaSuperProperty.addAll(propertiesWithDomain);
        domainViaInverseProperty.addAll(propertiesWithDomain);
        rangeViaSuperProperty.addAll(propertiesWithRange);
        rangeViaInverseProperty.addAll(propertiesWithRange);
        // Repeatedly infer domain/range from subproperties/inverse properties until no new
        // information can be generated.
        while (!(domainViaSuperProperty.isEmpty() && rangeViaSuperProperty.isEmpty()
                && domainViaInverseProperty.isEmpty() && rangeViaInverseProperty.isEmpty())) {
            // For a type c and property p, if c is a domain of p, then c is the range of any
            // inverse of p. Would be redundant for properties discovered via inverseOf.
            while (!domainViaSuperProperty.isEmpty()) {
                final URI property = domainViaSuperProperty.pop();
                final URI inverseProperty = findInverseOf(property);
                if (inverseProperty != null && propertiesWithRange.add(inverseProperty)) {
                    rangeViaInverseProperty.push(inverseProperty);
                }
            }
            // For a type c and property p, if c is a range of p, then c is the domain of any
            // inverse of p. Would be redundant for properties discovered via inverseOf.
            while (!rangeViaSuperProperty.isEmpty()) {
                final URI property = rangeViaSuperProperty.pop();
                final URI inverseProperty = findInverseOf(property);
                if (inverseProperty != null && propertiesWithDomain.add(inverseProperty)) {
                    domainViaInverseProperty.push(inverseProperty);
                }
            }
            // For a type c and property p, if c is a domain of p, then c is also a domain of
            // p's subproperties. Would be redundant for properties discovered via this rule.
            while (!domainViaInverseProperty.isEmpty()) {
                final URI property = domainViaInverseProperty.pop();
                final Set<URI> subProperties = getSubProperties(property);
                subProperties.removeAll(propertiesWithDomain);
                propertiesWithDomain.addAll(subProperties);
                domainViaSuperProperty.addAll(subProperties);
            }
            // For a type c and property p, if c is a range of p, then c is also a range of
            // p's subproperties. Would be redundant for properties discovered via this rule.
            while (!rangeViaInverseProperty.isEmpty()) {
                final URI property = rangeViaInverseProperty.pop();
                final Set<URI> subProperties = getSubProperties(property);
                subProperties.removeAll(propertiesWithRange);
                propertiesWithRange.addAll(subProperties);
                rangeViaSuperProperty.addAll(subProperties);
            }
        }
        if (!propertiesWithDomain.isEmpty()) {
            domainByTypePartial.put(domainRangeType, propertiesWithDomain);
        }
        if (!propertiesWithRange.isEmpty()) {
            rangeByTypePartial.put(domainRangeType, propertiesWithRange);
        }
    }
    // Once all properties have been found for each domain/range class, extend to superclasses:
    // make sure that the consequent of a domain/range inference goes on to apply any more
    // general classes as well.
    for (final URI subtype : domainRangeTypeSet) {
        final Set<URI> supertypes = getSuperClasses(subtype);
        final Set<URI> propertiesWithDomain = domainByTypePartial.getOrDefault(subtype, new HashSet<>());
        final Set<URI> propertiesWithRange = rangeByTypePartial.getOrDefault(subtype, new HashSet<>());
        for (final URI supertype : supertypes) {
            // For a property p and its domain c: all of c's superclasses are also domains of p.
            if (!propertiesWithDomain.isEmpty() && !domainByTypePartial.containsKey(supertype)) {
                domainByTypePartial.put(supertype, new HashSet<>());
            }
            for (final URI property : propertiesWithDomain) {
                domainByTypePartial.get(supertype).add(property);
            }
            // For a property p and its range c: all of c's superclasses are also ranges of p.
            if (!propertiesWithRange.isEmpty() && !rangeByTypePartial.containsKey(supertype)) {
                rangeByTypePartial.put(supertype, new HashSet<>());
            }
            for (final URI property : propertiesWithRange) {
                rangeByTypePartial.get(supertype).add(property);
            }
        }
    }
    domainByType = domainByTypePartial;
    rangeByType = rangeByTypePartial;
}

From source file:org.apache.tajo.engine.planner.LogicalPlanner.java

@Override
public LogicalNode visitSort(PlanContext context, Stack<Expr> stack, Sort sort) throws PlanningException {
    QueryBlock block = context.queryBlock;

    int sortKeyNum = sort.getSortSpecs().length;
    Sort.SortSpec[] sortSpecs = sort.getSortSpecs();
    String[] referNames = new String[sortKeyNum];

    ExprNormalizedResult[] normalizedExprList = new ExprNormalizedResult[sortKeyNum];
    for (int i = 0; i < sortKeyNum; i++) {
        normalizedExprList[i] = normalizer.normalize(context, sortSpecs[i].getKey());
    }//www  . j a  v  a2s .c o m
    for (int i = 0; i < sortKeyNum; i++) {
        referNames[i] = block.namedExprsMgr.addExpr(normalizedExprList[i].baseExpr);
        block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].aggExprs);
        block.namedExprsMgr.addNamedExprArray(normalizedExprList[i].scalarExprs);
    }

    ////////////////////////////////////////////////////////
    // Visit and Build Child Plan
    ////////////////////////////////////////////////////////
    stack.push(sort);
    LogicalNode child = visit(context, stack, sort.getChild());
    if (block.isAggregationRequired()) {
        child = insertGroupbyNode(context, child, stack);
    }
    stack.pop();
    ////////////////////////////////////////////////////////

    SortNode sortNode = block.getNodeFromExpr(sort);
    sortNode.setChild(child);
    sortNode.setInSchema(child.getOutSchema());
    sortNode.setOutSchema(child.getOutSchema());

    // Building sort keys
    Column column;
    List<SortSpec> annotatedSortSpecs = Lists.newArrayList();
    for (int i = 0; i < sortKeyNum; i++) {
        String refName = referNames[i];
        if (block.isConstReference(refName)) {
            continue;
        } else if (block.namedExprsMgr.isEvaluated(refName)) {
            column = block.namedExprsMgr.getTarget(refName).getNamedColumn();
        } else {
            throw new IllegalStateException("Unexpected State: " + TUtil.arrayToString(sortSpecs));
        }
        annotatedSortSpecs.add(new SortSpec(column, sortSpecs[i].isAscending(), sortSpecs[i].isNullFirst()));
    }

    if (annotatedSortSpecs.size() == 0) {
        return child;
    } else {
        sortNode.setSortSpecs(annotatedSortSpecs.toArray(new SortSpec[annotatedSortSpecs.size()]));
        return sortNode;
    }
}

From source file:com.square.core.service.implementations.ActionServiceImplementation.java

/**
 * Mthode prive rcursive qui recherche les actions lies.
 * @param idActionSource l'identifiant de l'action source
 * @param niveau le niveau//from   w ww  . j a va  2 s.c  o m
 * @param filtrerDateEffet bollen pour indiquer si on filtre ou non sur la date d'effet
 */
private void rechercherActionsLiees(Stack<ActionSyntheseDto> pileEnCours, Long idActionSource, int niveau,
        Long idOpportunite, Boolean filtrerDateEffet) {
    final Long idTypeActionRelance = squareMappingService.getIdTypeActionRelance();
    final List<Action> actionsLiees = actionDao.rechercherActionsLiees(idActionSource, idOpportunite,
            filtrerDateEffet);
    for (Action actionLiee : actionsLiees) {
        // Ajout de l'action source dans la pile
        final ActionSyntheseDto actionSyntheseDto = mapperDozerBean.map(actionLiee, ActionSyntheseDto.class);
        // On incrmente le niveau des actions de type Relance
        int niveauSuivant = 0;
        if (actionLiee.getType() != null && idTypeActionRelance.equals(actionLiee.getType().getId())) {
            niveauSuivant = niveau + 1;
        }
        actionSyntheseDto.setNiveau(niveauSuivant);
        actionSyntheseDto.setAttribueA(genererLibelleAttribueA(actionLiee.getActionAttribution()));
        // Rcupration si la date d'action est ditable
        if (actionEditable(actionLiee)) {
            actionSyntheseDto.setDateActionEditable(true);
        }
        pileEnCours.push(actionSyntheseDto);
        rechercherActionsLiees(pileEnCours, actionLiee.getId(), niveauSuivant, idOpportunite, filtrerDateEffet);
    }
}

From source file:gdt.data.store.Entigrator.java

/**
 * Get keys of entities having certain property name assigned. 
 *  @param propertyName$ property name./*from  ww w .j  a  va  2s  . c o m*/
 * @return array of entities keys.
 */
public String[] indx_listEntitiesAtPropertyName(String propertyName$) {
    if ("label".equals(propertyName$)) {
        return quickMap.elementListNoSorted("label");
    }
    try {
        String property$ = propertyIndex.getElementItemAt("property", propertyName$);

        if (property$ == null) {
            LOGGER.severe(":indx_listEntitiesAtPropertyName:cannot find property in property index  property ="
                    + propertyName$);
            return null;
        }
        Sack property = getMember("property.base", property$);
        if (property == null) {
            LOGGER.severe(":indx_listEntitiesAtPropertyName:cannot find property =" + property$);
            return null;
        }
        Stack<String> s = new Stack<String>();
        Stack<String> s2 = new Stack<String>();
        if ("label".equals(propertyName$)) {
            Core[] ca = property.elementGet("value");
            if (ca != null)
                for (Core aCa : ca)
                    if (aCa.value != null)
                        s.push(aCa.value);
        } else {
            String[] ma = property.elementList("value");
            if (ma == null) {
                LOGGER.severe(":indx_listEntitiesAtPropertyName:no values in property =" + property$);
                return null;
            }

            Sack map;
            String[] ea;
            for (int i = 0; i < ma.length; i++) {
                s2.clear();
                map = getMember("property.map.base", property.getElementItemAt("value", ma[i]));
                if (map == null) {
                    LOGGER.severe(":indx_listEntitiesAtPropertyName:cannot get map[" + i + "]=" + ma[i]);
                    continue;
                }
                ea = map.elementList("entity");
                if (ea == null) {
                    LOGGER.severe(":indx_listEntitiesAtPropertyName:empty map[" + i + "]=" + ma[i]);
                    continue;
                }
                for (String anEa : ea) {
                    if (!touchEntity(anEa))
                        s.push(anEa);
                }
            }
        }
        int cnt = s.size();
        if (cnt < 1) {
            LOGGER.severe(":indx_listEntitiesAtPropertyName:no entities found");
            return null;
        }
        String[] sa = new String[cnt];
        for (int i = 0; i < cnt; i++)
            sa[i] = s.pop();
        return sa;
    } catch (Exception e) {
        LOGGER.severe(":indx_listEntitiesAtPropertyName:" + e.toString());
        return null;
    }
}