Example usage for java.util Stack Stack

List of usage examples for java.util Stack Stack

Introduction

In this page you can find the example usage for java.util Stack Stack.

Prototype

public Stack() 

Source Link

Document

Creates an empty Stack.

Usage

From source file:edu.ku.brc.specify.ui.db.ResultSetTableModel.java

@Override
//@SuppressWarnings("null")
public synchronized void exectionDone(final SQLExecutionProcessor process, final ResultSet resultSet) {
    if (statusBar != null) {
        statusBar.incrementValue(getClass().getSimpleName());
    }/*w  ww. j  a  va 2  s.  c  om*/

    if (resultSet == null || results == null) {
        log.error("The " + (resultSet == null ? "resultSet" : "results") + " is null.");
        if (propertyListener != null) {
            propertyListener.propertyChange(new PropertyChangeEvent(this, "rowCount", null, 0));
        }
        return;
    }

    List<ERTICaptionInfo> captions = results.getVisibleCaptionInfo();

    // This can do one of two things:
    // 1) Take multiple columns and create an object and use a DataObjectFormatter to format the object.
    // 2) Table multiple objects that were derived from the columns and roll those up into a single column's value.
    //    This happens when you get back rows of info where part of the columns are duplicated because you really
    //    want those value to be put into a single column.
    //
    // Step One - Is to figure out what type of object needs to be created and what the Columns are 
    //            that need to be set into the object so the dataObjFormatter can do its job.
    //
    // Step Two - If the objects are being aggregated then the object created from the columns are added to a List
    //            and then last formatted as an "aggregation"

    try {
        if (resultSet.next()) {
            ResultSetMetaData metaData = resultSet.getMetaData();

            // Composite
            boolean hasCompositeObj = false;
            DataObjSwitchFormatter dataObjFormatter = null;
            UIFieldFormatterIFace formatter = null;
            Object compObj = null;

            // Aggregates
            ERTICaptionInfo aggCaption = null;
            ERTICaptionInfo compositeCaption = null;
            Vector<Object> aggList = null;
            DataObjectSettable aggSetter = null;
            Stack<Object> aggListRecycler = null;

            DataObjectSettable dataSetter = null; // data getter for Aggregate or the Subclass

            // Loop through the caption to figure out what columns will be displayed.
            // Watch for Captions with an Aggregator or Composite 
            numColumns = captions.size();
            for (ERTICaptionInfo caption : captions) {
                colNames.addElement(caption.getColLabel());

                int inx = caption.getPosIndex() + 1;
                //log.debug(metaData.getColumnClassName(inx));
                Class<?> cls = null;
                try {
                    cls = Class.forName(metaData.getColumnClassName(inx));
                    if (cls == Calendar.class || cls == java.sql.Date.class || cls == Date.class) {
                        cls = String.class;
                    }
                } catch (SQLException ex) {
                    cls = String.class;
                }
                classNames.addElement(cls);
                caption.setColClass(cls);

                if (caption.getAggregatorName() != null) {
                    //log.debug("The Agg is ["+caption.getAggregatorName()+"] "+caption.getColName());

                    // Alright we have an aggregator
                    aggList = new Vector<Object>();
                    aggListRecycler = new Stack<Object>();
                    aggCaption = caption;
                    aggSetter = DataObjectSettableFactory.get(aggCaption.getAggClass().getName(),
                            FormHelper.DATA_OBJ_SETTER);

                    // Now check to see if we are aggregating the this type of object or a child object of this object
                    // For example Collectors use an Agent as part of the aggregation
                    if (aggCaption.getSubClass() != null) {
                        dataSetter = DataObjectSettableFactory.get(aggCaption.getSubClass().getName(),
                                FormHelper.DATA_OBJ_SETTER);
                    } else {
                        dataSetter = aggSetter;
                    }

                } else if (caption.getColInfoList() != null) {
                    formatter = caption.getUiFieldFormatter();
                    if (formatter != null) {
                        compositeCaption = caption;
                    } else {
                        // OK, now aggregation but we will be rolling up multiple columns into a single object for formatting
                        // We need to get the formatter to see what the Class is of the object
                        hasCompositeObj = true;
                        aggCaption = caption;
                        dataObjFormatter = caption.getDataObjFormatter();
                        if (dataObjFormatter != null) {
                            if (dataObjFormatter.getDataClass() != null) {
                                aggSetter = DataObjectSettableFactory.get(
                                        dataObjFormatter.getDataClass().getName(),
                                        "edu.ku.brc.af.ui.forms.DataSetterForObj");
                            } else {
                                log.error("formatterObj.getDataClass() was null for " + caption.getColName());
                            }
                        } else {
                            log.error("DataObjFormatter was null for " + caption.getColName());
                        }
                    }

                }
                //colNames.addElement(metaData.getColumnName(i));
                //System.out.println("**************** " + caption.getColLabel()+ " "+inx+ " " + caption.getColClass().getSimpleName());
            }

            // aggCaption will be non-null for both a Aggregate AND a Composite
            if (aggCaption != null) {
                // Here we need to dynamically discover what the column indexes are that we to grab
                // in order to set them into the created data object
                for (ERTICaptionInfo.ColInfo colInfo : aggCaption.getColInfoList()) {
                    for (int i = 0; i < metaData.getColumnCount(); i++) {
                        String colName = StringUtils.substringAfterLast(colInfo.getColumnName(), ".");
                        if (colName.equalsIgnoreCase(metaData.getColumnName(i + 1))) {
                            colInfo.setPosition(i);
                            break;
                        }
                    }
                }

                // Now check to see if there is an Order Column because the Aggregator 
                // might need it for sorting the Aggregation
                String ordColName = aggCaption.getOrderCol();
                if (StringUtils.isNotEmpty(ordColName)) {
                    String colName = StringUtils.substringAfterLast(ordColName, ".");
                    //log.debug("colName ["+colName+"]");
                    for (int i = 0; i < metaData.getColumnCount(); i++) {
                        //log.debug("["+colName+"]["+metaData.getColumnName(i+1)+"]");
                        if (colName.equalsIgnoreCase(metaData.getColumnName(i + 1))) {
                            aggCaption.setOrderColIndex(i);
                            break;
                        }
                    }
                    if (aggCaption.getOrderColIndex() == -1) {
                        log.error("Agg Order Column Index wasn't found [" + ordColName + "]");
                    }
                }
            }

            if (ids == null) {
                ids = new Vector<Integer>();
            } else {
                ids.clear();
            }

            // Here is the tricky part.
            // When we are doing a Composite we are just taking multiple columns and 
            // essentially replace them with a single value from the DataObjFormatter
            //
            // But when doing an Aggregation we taking several rows and rolling them up into a single value.
            // so this code knows when it is doing an aggregation, so it knows to only add a new row to the display-able
            // results when primary id changes.

            DataObjFieldFormatMgr dataObjMgr = DataObjFieldFormatMgr.getInstance();
            Vector<Object> row = null;
            boolean firstTime = true;
            int prevId = Integer.MAX_VALUE; // really can't assume any value but will choose Max

            int numCols = resultSet.getMetaData().getColumnCount();
            do {
                int id = resultSet.getInt(1);
                //log.debug("id: "+id+"  prevId: "+prevId);

                // Remember aggCaption is used by both a Aggregation and a Composite
                if (aggCaption != null && !hasCompositeObj) {
                    if (firstTime) {
                        prevId = id;
                        row = new Vector<Object>();
                        firstTime = false;
                        cache.add(row);
                        ids.add(id);

                    } else if (id != prevId) {
                        //log.debug("Agg List len: "+aggList.size());

                        if (row != null && aggList != null) {
                            int aggInx = captions.indexOf(aggCaption);
                            row.remove(aggInx);
                            row.insertElementAt(dataObjMgr.aggregate(aggList, aggCaption.getAggClass()),
                                    aggInx);

                            if (aggListRecycler != null) {
                                aggListRecycler.addAll(aggList);
                            }
                            aggList.clear();

                            row = new Vector<Object>();
                            cache.add(row);
                            ids.add(id);
                        }
                        prevId = id;

                    } else if (row == null) {
                        row = new Vector<Object>();
                        cache.add(row);
                        ids.add(id);
                    }
                } else {
                    row = new Vector<Object>();
                    cache.add(row);
                    ids.add(id);
                }

                // Now for each Caption column get a value
                for (ERTICaptionInfo caption : captions) {
                    int posIndex = caption.getPosIndex();
                    if (caption == aggCaption) // Checks to see if we need to take multiple columns and make one column
                    {
                        if (hasCompositeObj) // just doing a Composite
                        {
                            if (aggSetter != null && row != null && dataObjFormatter != null) {
                                if (compObj == null) {
                                    compObj = aggCaption.getAggClass().newInstance();
                                }

                                for (ERTICaptionInfo.ColInfo colInfo : aggCaption.getColInfoList()) {
                                    setField(aggSetter, compObj, colInfo.getFieldName(),
                                            colInfo.getFieldClass(), resultSet, colInfo.getPosition());
                                }
                                row.add(DataObjFieldFormatMgr.getInstance().format(compObj,
                                        compObj.getClass()));

                            } else if (formatter != null) {
                                int len = compositeCaption.getColInfoList().size();
                                Object[] val = new Object[len];
                                int i = 0;
                                for (ERTICaptionInfo.ColInfo colInfo : compositeCaption.getColInfoList()) {
                                    int colInx = colInfo.getPosition() + posIndex + 1;
                                    if (colInx < numCols) {
                                        val[i++] = resultSet.getObject(colInx);
                                    } else {
                                        //val[i++] = resultSet.getObject(posIndex+1);
                                        val[i++] = "(Missing Data)";
                                    }
                                }
                                row.add(formatter.formatToUI(val));

                            } else {
                                log.error("Aggregator is null! [" + aggCaption.getAggregatorName()
                                        + "] or row or aggList");
                            }
                        } else if (aggSetter != null && row != null && aggList != null) // Doing an Aggregation
                        {
                            Object aggObj;
                            if (aggListRecycler.size() == 0) {
                                aggObj = aggCaption.getAggClass().newInstance();
                            } else {
                                aggObj = aggListRecycler.pop();
                            }
                            Object aggSubObj = aggCaption.getSubClass() != null
                                    ? aggCaption.getSubClass().newInstance()
                                    : null;
                            aggList.add(aggObj);

                            //@SuppressWarnings("unused")
                            //DataObjAggregator aggregator = DataObjFieldFormatMgr.getInstance().getAggregator(aggCaption.getAggregatorName());
                            //log.debug(" aggCaption.getOrderColIndex() "+ aggCaption.getOrderColIndex());

                            //aggSetter.setFieldValue(aggObj, aggregator.getOrderFieldName(), resultSet.getObject(aggCaption.getOrderColIndex() + 1));

                            Object dataObj;
                            if (aggSubObj != null) {
                                aggSetter.setFieldValue(aggObj, aggCaption.getSubClassFieldName(), aggSubObj);
                                dataObj = aggSubObj;
                            } else {
                                dataObj = aggObj;
                            }

                            for (ERTICaptionInfo.ColInfo colInfo : aggCaption.getColInfoList()) {
                                setField(dataSetter, dataObj, colInfo.getFieldName(), colInfo.getFieldClass(),
                                        resultSet, colInfo.getPosition());
                            }
                            row.add("PlaceHolder");

                        } else if (aggSetter == null || aggList == null) {
                            log.error("Aggregator is null! [" + aggCaption.getAggregatorName() + "] or aggList["
                                    + aggList + "]");
                        }

                    } else if (row != null) {
                        if (caption.getColName() == null && caption.getColInfoList().size() > 0) {
                            int len = caption.getColInfoList().size();
                            Object[] val = new Object[len];
                            for (int i = 0; i < caption.getColInfoList().size(); i++) {
                                int inx = posIndex + 1 + i;
                                val[i] = caption.processValue(resultSet.getObject(inx));
                            }
                            row.add(caption.getUiFieldFormatter().formatToUI(val));
                            //col += caption.getColInfoList().size() - 1;

                        } else {
                            Object obj = caption.processValue(resultSet.getObject(posIndex + 1));
                            row.add(obj);
                        }
                    }
                }

            } while (resultSet.next());

            // We were always setting the rolled up data when the ID changed
            // but on the last row we need to do it here manually (so to speak)
            if (aggCaption != null && aggList != null && aggList.size() > 0 && row != null) {
                int aggInx = captions.indexOf(aggCaption);
                row.remove(aggInx);
                String colStr;
                if (StringUtils.isNotEmpty(aggCaption.getAggregatorName())) {
                    colStr = DataObjFieldFormatMgr.getInstance().aggregate(aggList,
                            aggCaption.getAggregatorName());

                } else {
                    colStr = DataObjFieldFormatMgr.getInstance().aggregate(aggList, aggCaption.getAggClass());
                }
                row.insertElementAt(colStr, aggInx);
                aggList.clear();
                aggListRecycler.clear();
            }

            fireTableStructureChanged();
            fireTableDataChanged();
        }

    } catch (Exception ex) {
        ex.printStackTrace();
    }

    if (propertyListener != null) {
        propertyListener
                .propertyChange(new PropertyChangeEvent(this, "rowCount", null, new Integer(cache.size())));
    }
}

From source file:barrysw19.calculon.icc.ICCInterface.java

private List<ResponseBlockLv2> parseBlockResponseLv2(String lvl2String) {
    List<ResponseBlockLv2> rv = new ArrayList<>();

    Stack<StringBuffer> allBlocks = new Stack<>();
    for (int i = 0; i < lvl2String.length(); i++) {
        if (lvl2String.charAt(i) == ('Y' & 0x1F) && lvl2String.charAt(i + 1) == '(') {
            allBlocks.push(new StringBuffer());
            i++;//from   ww  w.  j  a va2 s.c o m
            continue;
        }
        if (lvl2String.charAt(i) == ('Y' & 0x1F) && lvl2String.charAt(i + 1) == ')') {
            rv.add(ResponseBlockLv2.createResponseBlock(allBlocks.pop().toString()));
            i++;
            continue;
        }
        allBlocks.peek().append(lvl2String.charAt(i));
    }

    LOG.debug(String.valueOf(rv));
    return rv;
}

From source file:it.cnr.icar.eric.server.security.authorization.RegistryAttributeFinderModule.java

/**
 * Parses the attribute name from a URI rep of the Attribute id
 * If input is: "urn:oasis:names:tc:ebxml-regrep:3.0:rim:acp:resource:sourceObject:targetObject:objectType"
 * then return value will be a Stack with entries "sourceObject;targetObject;objectType".
 **///from  w  ww.  j a va  2 s  . co m
private Stack<String> getAttributeStackFromAttributeId(URI attributeId) {
    Stack<String> attrStack = new Stack<String>();

    String attrIdStr = attributeId.toString();

    String relevantSuffix = null;
    if (attrIdStr.startsWith(AuthorizationServiceImpl.RESOURCE_ATTRIBUTE_PREFIX)) {
        int startIndex = AuthorizationServiceImpl.RESOURCE_ATTRIBUTE_PREFIX.length();
        int endIndex = attrIdStr.length();
        relevantSuffix = attrIdStr.substring(startIndex, endIndex);
    } else if (attrIdStr.startsWith(AuthorizationServiceImpl.SUBJECT_ATTRIBUTE_ID)) {
        //Special case. We should get rid of use of XACML subject-id attribute from spec??
        relevantSuffix = "id";
    } else if (attrIdStr.startsWith(AuthorizationServiceImpl.ACTION_ATTRIBUTE_PREFIX)) {
        //Not clear what to do here but following will preserve old behavior which may not have been right always
        relevantSuffix = getAttributeFromAttributeId(attributeId);
    }

    //Now split into attributes and push them onto stack
    String[] attrs = relevantSuffix.split(":");
    for (int i = attrs.length - 1; i >= 0; i--) {
        attrStack.push(attrs[i]);
    }

    return attrStack;
}

From source file:com.flexive.core.storage.genericSQL.GenericTreeStorageSpreaded.java

protected long _reorganizeSpace(Connection con, SequencerEngine seq, FxTreeMode sourceMode, FxTreeMode destMode,
        long nodeId, boolean includeNodeId, BigInteger overrideSpacing, BigInteger overrideLeft,
        FxTreeNodeInfo insertParent, int insertPosition, BigInteger insertSpace, BigInteger insertBoundaries[],
        int depthDelta, Long destinationNode, boolean createMode, boolean createKeepIds,
        boolean disableSpaceOptimization) throws FxTreeException {
    long firstCreatedNodeId = -1;
    FxTreeNodeInfoSpreaded nodeInfo;//from www .j  a  v a  2  s  .c om
    try {
        nodeInfo = (FxTreeNodeInfoSpreaded) getTreeNodeInfo(con, sourceMode, nodeId);
    } catch (Exception e) {
        return -1;
    }

    if (!nodeInfo.isSpaceOptimizable() && !disableSpaceOptimization) {
        // The Root node and cant be optimize any more ... so all we can do is fail :-/
        // This should never really happen
        if (nodeId == ROOT_NODE) {
            return -1;
        }
        //System.out.println("### UP we go, depthDelta=" + depthDelta);
        return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId,
                overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries,
                depthDelta, destinationNode, createMode, createKeepIds, false);
    }

    BigInteger spacing = nodeInfo.getDefaultSpacing();
    if (overrideSpacing != null && (overrideSpacing.compareTo(spacing) < 0 || overrideLeft != null)) {
        // override spacing unless it is greater OR overrideLeft is specified (in that case we
        // have to use the spacing for valid tree ranges)  
        spacing = overrideSpacing;
    } else {
        if (spacing.compareTo(GO_UP) < 0 && !createMode && !disableSpaceOptimization) {
            return _reorganizeSpace(con, seq, sourceMode, destMode, nodeInfo.getParentId(), includeNodeId,
                    overrideSpacing, overrideLeft, insertParent, insertPosition, insertSpace, insertBoundaries,
                    depthDelta, destinationNode, createMode, createKeepIds, false);
        }
    }

    if (insertBoundaries != null && insertPosition == -1) {
        insertPosition = 0; // insertPosition cannot be negative
    }

    Statement stmt = null;
    PreparedStatement ps = null;
    ResultSet rs;
    BigInteger left = overrideLeft == null ? nodeInfo.getLeft() : overrideLeft;
    BigInteger right = null;
    String includeNode = includeNodeId ? "=" : "";
    long counter = 0;
    long newId = -1;
    try {
        final long start = System.currentTimeMillis();
        String createProps = createMode ? ",PARENT,REF,NAME,TEMPLATE" : "";
        String sql = " SELECT ID," + StorageManager.getIfFunction( // compute total child count only when the node has children
                "CHILDCOUNT = 0", "0",
                "(SELECT COUNT(*) FROM " + getTable(sourceMode) + " WHERE LFT > NODE.LFT AND RGT < NODE.RGT)") +
        // 3           4             5   6
                ", CHILDCOUNT, LFT AS LFTORD,RGT,DEPTH" + createProps
                + " FROM (SELECT ID,CHILDCOUNT,LFT,RGT,DEPTH" + createProps + " FROM " + getTable(sourceMode)
                + " WHERE " + "LFT>" + includeNode + nodeInfo.getLeft() + " AND LFT<" + includeNode
                + nodeInfo.getRight() + ") NODE " + "ORDER BY LFTORD ASC";
        stmt = con.createStatement();
        rs = stmt.executeQuery(sql);
        if (createMode) {
            //                                                                 1  2      3     4     5   6        7   8
            ps = con.prepareStatement(
                    "INSERT INTO " + getTable(destMode) + " (ID,PARENT,DEPTH,DIRTY,REF,TEMPLATE,LFT,RGT," +
                    //9           10    11
                            "CHILDCOUNT,NAME,MODIFIED_AT) " + "VALUES (?,?,?,?,?,?,?,?,?,?,?)");
        } else {
            ps = con.prepareStatement("UPDATE " + getTable(sourceMode) + " SET LFT=?,RGT=?,DEPTH=? WHERE ID=?");
        }
        long id;
        int total_childs;
        int direct_childs;
        BigInteger nextLeft;
        int lastDepth = nodeInfo.getDepth() + (includeNodeId ? 0 : 1);
        int depth;
        BigInteger _rgt;
        BigInteger _lft;
        Long ref = null;
        String data = null;
        String name = "";

        Stack<Long> currentParent = null;
        if (createMode) {
            currentParent = new Stack<Long>();
            currentParent.push(destinationNode);
        }

        //System.out.println("Spacing:"+SPACING);
        while (rs.next()) {
            //System.out.println("------------------");
            id = rs.getLong(1);
            total_childs = rs.getInt(2);
            direct_childs = rs.getInt(3);
            _lft = getNodeBounds(rs, 4);
            _rgt = getNodeBounds(rs, 5);
            depth = rs.getInt(6);
            if (createMode) {
                // Reading these properties is slow, only do it when needed
                ref = rs.getLong(8);
                if (rs.wasNull())
                    ref = null;
                name = rs.getString(9);
                data = rs.getString(10);
                if (rs.wasNull())
                    data = null;
            }
            left = left.add(spacing).add(BigInteger.ONE);

            // Handle depth differences
            if (lastDepth - depth > 0) {
                BigInteger depthDifference = spacing.add(BigInteger.ONE);
                left = left.add(depthDifference.multiply(BigInteger.valueOf(lastDepth - depth)));
            }
            if (createMode) {
                if (lastDepth < depth) {
                    currentParent.push(newId);
                } else if (lastDepth > depth) {
                    for (int p = 0; p < (lastDepth - depth); p++)
                        currentParent.pop();
                }
            }

            right = left.add(spacing).add(BigInteger.ONE);

            // add child space if needed
            if (total_childs > 0) {
                BigInteger childSpace = spacing.multiply(BigInteger.valueOf(total_childs * 2));
                childSpace = childSpace.add(BigInteger.valueOf((total_childs * 2) - 1));
                right = right.add(childSpace);
                nextLeft = left;
            } else {
                nextLeft = right;
            }

            if (insertBoundaries != null) {
                // insert gap at requested position
                // If we're past the gap, keep adding the insert space to left/right because the added
                // space is never "injected" into the loop, i.e. without adding it the left/right boundaries of
                // nodes after the gap would be too far to the left.
                if (_lft.compareTo(insertBoundaries[0]) > 0) {
                    left = left.add(insertSpace);
                }
                if (_rgt.compareTo(insertBoundaries[0]) > 0) {
                    right = right.add(insertSpace);
                }
            }

            // sanity checks
            if (left.compareTo(right) >= 0) {
                throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right,
                        "left greater than right");
            }
            if (insertParent != null && right.compareTo((BigInteger) insertParent.getRight()) > 0) {
                throw new FxTreeException(LOG, "ex.tree.reorganize.failed", counter, left, right,
                        "wrote past parent node bounds");
            }

            // Update the node
            if (createMode) {
                newId = createKeepIds ? id : seq.getId(destMode.getSequencer());
                if (firstCreatedNodeId == -1)
                    firstCreatedNodeId = newId;

                // Create the main entry
                ps.setLong(1, newId);
                ps.setLong(2, currentParent.peek());
                ps.setLong(3, depth + depthDelta);
                ps.setBoolean(4, destMode != FxTreeMode.Live); //only flag non-live tree's dirty
                if (ref == null) {
                    ps.setNull(5, java.sql.Types.NUMERIC);
                } else {
                    ps.setLong(5, ref);
                }
                if (data == null) {
                    ps.setNull(6, java.sql.Types.VARCHAR);
                } else {
                    ps.setString(6, data);
                }
                //                    System.out.println("=> id:"+newId+" left:"+left+" right:"+right);
                setNodeBounds(ps, 7, left);
                setNodeBounds(ps, 8, right);
                ps.setInt(9, direct_childs);
                ps.setString(10, name);
                ps.setLong(11, System.currentTimeMillis());
                ps.addBatch();
            } else {
                setNodeBounds(ps, 1, left);
                setNodeBounds(ps, 2, right);
                ps.setInt(3, depth + depthDelta);
                ps.setLong(4, id);
                ps.addBatch();
                //                    ps.executeBatch();
                //                    ps.clearBatch();
            }

            // Prepare variables for the next node
            left = nextLeft;
            lastDepth = depth;
            counter++;

            // Execute batch every 10000 items to avoid out of memory
            if (counter % 10000 == 0) {
                ps.executeBatch();
                ps.clearBatch();
            }
        }
        rs.close();
        stmt.close();
        stmt = null;
        ps.executeBatch();

        if (LOG.isDebugEnabled()) {
            final long time = System.currentTimeMillis() - start;

            LOG.debug("Tree reorganization of " + counter + " items completed in " + time + " ms (spaceLen="
                    + spacing + ")");
        }
        return firstCreatedNodeId;
    } catch (FxApplicationException e) {
        throw e instanceof FxTreeException ? (FxTreeException) e : new FxTreeException(e);
    } catch (SQLException e) {
        String next = "";
        if (e.getNextException() != null)
            next = " next:" + e.getNextException().getMessage();
        if (StorageManager.isDuplicateKeyViolation(e))
            throw new FxTreeException(LOG, e, "ex.tree.reorganize.duplicateKey");
        throw new FxTreeException(LOG, e, "ex.tree.reorganize.failed", counter, left, right,
                e.getMessage() + next);
    } catch (Exception e) {
        throw new FxTreeException(e);
    } finally {
        try {
            if (stmt != null)
                stmt.close();
        } catch (Throwable t) {
            /*ignore*/}
        try {
            if (ps != null)
                ps.close();
        } catch (Throwable t) {
            /*ignore*/}
    }
}

From source file:axiom.framework.core.Application.java

/**
 * Get the application ready to run, initializing the evaluators and type manager.
 *
 * @param ignoreDirs comma separated list of directory names to ignore
 *///from   w  w  w  . ja  va  2 s  . com
public synchronized void init(String ignoreDirs)
        throws DatabaseException, IllegalAccessException, InstantiationException, ClassNotFoundException {

    running = true;

    // create and init type mananger
    typemgr = new TypeManager(this, ignoreDirs);
    // set the context classloader. Note that this must be done before
    // using the logging framework so that a new LogFactory gets created
    // for this app.
    Thread.currentThread().setContextClassLoader(typemgr.getClassLoader());
    try {
        typemgr.createPrototypes();
    } catch (Exception x) {
        logError("Error creating prototypes", x);
    }

    if (Server.getServer() != null) {
        Vector<AxiomExtension> extensions = Server.getServer().getExtensions();

        for (int i = 0; i < extensions.size(); i++) {
            AxiomExtension ext = (AxiomExtension) extensions.get(i);

            try {
                ext.applicationStarted(this);
            } catch (ConfigurationException e) {
                logEvent("couldn't init extension " + ext.getName() + ": " + e.toString());
            }
        }
    }

    // create and init evaluator/thread lists
    freeThreads = new Stack<RequestEvaluator>();
    allThreads = new Vector<RequestEvaluator>();

    // preallocate minThreads request evaluators
    int minThreads = 0;

    try {
        minThreads = Integer.parseInt(props.getProperty("minThreads"));
    } catch (Exception ignore) {
        try {
            minThreads = Integer.parseInt(props.getProperty("maxThreads"));
            minThreads /= 4;
        } catch (Exception ignoreagain) {
            minThreads = 0;
        }
    }

    if (minThreads > 0) {
        logEvent("Starting " + minThreads + " evaluator(s) for " + name);
    }

    for (int i = 0; i < minThreads; i++) {
        RequestEvaluator ev = new RequestEvaluator(this);

        ev.initScriptingEngine();

        freeThreads.push(ev);
        allThreads.addElement(ev);
    }

    activeRequests = new Hashtable<RequestTrans, RequestEvaluator>();
    activeCronJobs = new Hashtable<String, CronRunner>();
    customCronJobs = new Hashtable<String, CronJob>();

    // read in root id, root prototype, user prototype
    rootId = props.getProperty("rootid", "0");
    String rootPrototype = props.getProperty("rootprototype", "root");
    String userPrototype = props.getProperty("userprototype", "user");

    rootMapping = getDbMapping(rootPrototype);
    if (rootMapping == null)
        throw new RuntimeException("rootPrototype does not exist: " + rootPrototype);
    userMapping = getDbMapping(userPrototype);
    if (userMapping == null)
        throw new RuntimeException("userPrototype does not exist: " + userPrototype);

    // The whole user/userroot handling is basically old
    // ugly obsolete crap. Don't bother.
    ResourceProperties p = new ResourceProperties();
    String usernameField = (userMapping != null) ? userMapping.getNameField() : null;

    if (usernameField == null) {
        usernameField = "name";
    }

    p.put("_children", "");
    p.put("_children.type", "collection(" + userPrototype + ")");
    p.put("_children.accessname", usernameField);
    p.put("roles", "");
    p.put("roles.type", "String");
    p.put("roles.multivalue", "true");
    userRootMapping = new DbMapping(this, "__userroot__", p);
    userRootMapping.update();

    // add the session mappings for persisting sessions in the object database
    String sessionPrototype = props.getProperty("sessionprototype", "session");
    sessionMapping = getDbMapping(sessionPrototype);
    if (sessionMapping == null)
        throw new RuntimeException("sessionPrototype does not exist: " + sessionPrototype);
    p = new ResourceProperties();
    String nameField = (sessionMapping != null) ? sessionMapping.getNameField() : null;
    if (nameField == null) {
        nameField = "name";
    }
    p.put("_children", "");
    p.put("_children.type", "collection(" + sessionPrototype + ")");
    p.put("_children.accessname", nameField);
    sessionRootMapping = new DbMapping(this, "__sessionroot__", p);
    sessionRootMapping.update();

    // create/setup the path indexer for this application
    try {
        pathIndexer = new PathIndexer(this);
    } catch (Exception ex) {
        ex.printStackTrace();
        throw new RuntimeException("Could not create the path indexer for the application " + this.name);
    }

    String cluster = this.getProperty("cluster", "false");
    if ("true".equalsIgnoreCase(cluster)) {
        try {
            this.clusterComm = new ClusterCommunicator(this);
        } catch (Exception ex) {
            ex.printStackTrace();
            throw new InstantiationException("Could not initiate the jgroups cluster for " + this.name);
        }

        this.clusterHost = this.getProperty("cluster.host");
        if (this.clusterHost == null) {
            throw new InstantiationException("ERROR: cluster.host not specified in app.properties");
        }
    }

    // create the node manager
    nmgr = new NodeManager(this);
    nmgr.init(dbDir.getAbsoluteFile(), props);

    this.executionCache = new ExecutionCache(this, "rhino");

    this.talCache = new ExecutionCache(this, "tal");

    // create and init session manager
    String sessionMgrImpl = props.getProperty("sessionManagerImpl", "axiom.framework.core.SessionManager");
    sessionMgr = (SessionManager) Class.forName(sessionMgrImpl).newInstance();
    logEvent("Using session manager class " + sessionMgrImpl);
    sessionMgr.init(this);

    // read the sessions if wanted
    if ("true".equalsIgnoreCase(getProperty("persistentSessions"))) {
        RequestEvaluator ev = getEvaluator();
        try {
            ev.initScriptingEngine();
            sessionMgr.loadSessionData(null, ev.scriptingEngine);
        } finally {
            releaseEvaluator(ev);
        }
    }

    // reset the classloader to the parent/system/server classloader.
    Thread.currentThread().setContextClassLoader(typemgr.getClassLoader().getParent());

    try {
        this.qbean = new QueryBean(this, "query-filter-" + getName());
    } catch (Exception ex) {
        ex.printStackTrace();
        throw new InstantiationException("Could not instantiate the QueryBean for " + this.name);
    }

    Enumeration e = this.dbProps.keys();
    while (e.hasMoreElements()) {
        String key = (String) e.nextElement();
        if (key.indexOf(".type") != -1) {
            String value = this.dbProps.getProperty(key);
            if (!extDbTypes.contains(value) && !value.equalsIgnoreCase("relational")) {
                extDbTypes.add(value);
            }
        }
    }
    try {
        updateResources();
    } catch (IOException ex) {
        ex.printStackTrace();
    }
}

From source file:org.esco.grouper.services.GrouperAPIExposerImpl.java

/**
 * Gives the root groups from a given stem.
 * @param key The name of the stem./*w  ww.  ja  v a2 s  .  com*/
 * @return The list of the groups in the specified stem its child stems.
 */
public GrouperDTO[] getAllRootGroupsFromStem(final String key) {
    final GrouperSession session = sessionUtil.createSession();
    final Stem s = fetchStem(session, key);
    Set<GrouperDTO> groups = null;

    if (s != null) {
        groups = new HashSet<GrouperDTO>();
        Stack<Stem> stems = new Stack<Stem>();
        stems.add(s);
        while (!stems.isEmpty()) {
            final Stem currentStem = stems.pop();
            @SuppressWarnings("rawtypes")
            final Set currentChildGroups = currentStem.getChildGroups();
            @SuppressWarnings("rawtypes")
            final Set currentChildStems = currentStem.getChildStems();

            for (Object o : currentChildGroups) {
                final Group g = (Group) o;
                if (g.toMember().getImmediateMemberships().isEmpty()) {
                    groups.add(fetchGrouperData((Group) o));
                }
            }

            for (Object o : currentChildStems) {
                stems.add((Stem) o);
            }
        }
    }

    if (LOGGER.isDebugEnabled()) {
        final StringBuffer sb = new StringBuffer("Child groups for stem ");
        sb.append(key);
        sb.append(": ");
        sb.append(groups);
        LOGGER.debug(sb);
    }

    sessionUtil.stopSession(session);

    if (groups == null) {
        return null;
    }

    return groups.toArray(new GrouperDTO[groups.size()]);
}

From source file:com.continusec.client.VerifiableLog.java

/**
 * Utility method for auditors that wish to audit the full content of a log, as well as the log operation.
 * This method will retrieve all entries in batch from the log, and ensure that the root hash in head can be confirmed to accurately represent the contents
 * of all of the log entries. If prev is not null, then additionally it is proven that the root hash in head is consistent with the root hash in prev.
 * @param prev a previous LogTreeHead representing the set of entries that have been previously audited. To indicate this is has not previously been audited, pass null,
 * @param head the LogTreeHead up to which we wish to audit the log. Upon successful completion the caller should persist this for a future iteration.
 * @param auditor caller should implemented a LogAuditor which is called sequentially for each log entry as it is encountered.
 * @param factory the factory to use for instantiating log entries. Typically this is one of {@link RawDataEntryFactory#getInstance()}, {@link JsonEntryFactory#getInstance()}, {@link RedactedJsonEntryFactory#getInstance()}.
 * @throws ContinusecException upon error
 *//*  w  ww. ja v a 2s.  c  o  m*/
public void verifyEntries(LogTreeHead prev, LogTreeHead head, VerifiableEntryFactory factory,
        LogAuditor auditor) throws ContinusecException {
    if ((prev == null) || prev.getTreeSize() < head.getTreeSize()) {
        Stack<byte[]> merkleTreeStack = new Stack<byte[]>();
        if ((prev != null) && (prev.getTreeSize() > 0)) {
            LogInclusionProof p = this.getInclusionProofByIndex(prev.getTreeSize() + 1, prev.getTreeSize());
            byte[] firstHash = null;
            for (byte[] b : p.getAuditPath()) {
                if (firstHash == null) {
                    firstHash = b;
                } else {
                    firstHash = Util.nodeMerkleTreeHash(b, firstHash);
                }
            }
            if (!(Arrays.equals(firstHash, prev.getRootHash()))) {
                throw new VerificationFailedException();
            }
            for (int i = p.getAuditPath().length - 1; i >= 0; i--) {
                merkleTreeStack.push(p.getAuditPath()[i]);
            }
        }

        int idx = (prev == null) ? 0 : prev.getTreeSize();
        try {
            for (VerifiableEntry e : this.getEntries(idx, head.getTreeSize(), factory)) {
                // do whatever content audit is desired on e
                auditor.auditLogEntry(idx, e);

                // update the merkle tree hash stack:
                merkleTreeStack.add(e.getLeafHash());
                for (int z = idx; (z & 1) == 1; z >>= 1) {
                    byte[] right = merkleTreeStack.pop();
                    byte[] left = merkleTreeStack.pop();
                    merkleTreeStack.push(Util.nodeMerkleTreeHash(left, right));
                }
                idx++;
            }
        } catch (RuntimeException e2) {
            // since get entries iterator throws a runtime exception that wraps the real continusec exception...
            Throwable cause = e2.getCause();
            if (cause instanceof ContinusecException) {
                throw (ContinusecException) cause;
            } else {
                throw e2;
            }
        }

        if (idx != head.getTreeSize()) {
            throw new NotAllEntriesReturnedException();
        }

        byte[] headHash = merkleTreeStack.pop();
        while (!merkleTreeStack.empty()) {
            headHash = Util.nodeMerkleTreeHash(merkleTreeStack.pop(), headHash);
        }

        if (!(Arrays.equals(headHash, head.getRootHash()))) {
            throw new VerificationFailedException();
        }
    }
}

From source file:fr.inria.oak.paxquery.common.xml.navigation.NavigationTreePatternNode.java

/**
 * /*from  w w  w.ja v a2  s.c  om*/
 * @param edge
 */
public void copyVirtualChild(NavigationTreePatternEdge edge) {
    NavigationTreePatternNode childCopy = edge.n2.deepCopy();
    // marking childCopy and its subtree as virtual:
    Stack<NavigationTreePatternNode> st = new Stack<NavigationTreePatternNode>();
    st.push(childCopy);
    while (!st.empty()) {
        NavigationTreePatternNode pn = st.pop();
        // Parameters.logger.info("Set virtual node: " + pn.tag);
        pn.virtual = true;
        pn.nodeCode = NavigationTreePatternNode.globalNodeCounter.getAndIncrement();
        // virtual nodes obtained by navigation cannot store ID
        pn.storesID = false;
        Iterator<NavigationTreePatternEdge> pnChildren = pn.edges.iterator();
        while (pnChildren.hasNext()) {
            NavigationTreePatternEdge pnEdge = pnChildren.next();
            st.push(pnEdge.n2);
        }
    }
    addEdge(childCopy, edge.isParent(), edge.isNested(), edge.isOptional());
}

From source file:com.microsoft.live.sample.skydrive.SkyDriveActivity.java

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.skydrive);//  w w w  . j ava  2  s .  c  o m

    mPrevFolderIds = new Stack<String>();

    ListView lv = getListView();
    lv.setTextFilterEnabled(true);
    lv.setOnItemClickListener(new OnItemClickListener() {
        @Override
        public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
            SkyDriveObject skyDriveObj = (SkyDriveObject) parent.getItemAtPosition(position);

            skyDriveObj.accept(new Visitor() {
                @Override
                public void visit(SkyDriveAlbum album) {
                    mPrevFolderIds.push(mCurrentFolderId);
                    loadFolder(album.getId());
                }

                @Override
                public void visit(SkyDrivePhoto photo) {
                    ViewPhotoDialog dialog = new ViewPhotoDialog(SkyDriveActivity.this, photo);
                    dialog.setOwnerActivity(SkyDriveActivity.this);
                    dialog.show();
                }

                @Override
                public void visit(SkyDriveFolder folder) {
                    mPrevFolderIds.push(mCurrentFolderId);
                    loadFolder(folder.getId());
                }

                @Override
                public void visit(SkyDriveFile file) {
                    Bundle b = new Bundle();
                    b.putString(JsonKeys.ID, file.getId());
                    b.putString(JsonKeys.NAME, file.getName());
                    showDialog(DIALOG_DOWNLOAD_ID, b);
                }

                @Override
                public void visit(SkyDriveVideo video) {
                    PlayVideoDialog dialog = new PlayVideoDialog(SkyDriveActivity.this, video);
                    dialog.setOwnerActivity(SkyDriveActivity.this);
                    dialog.show();
                }

                @Override
                public void visit(SkyDriveAudio audio) {
                    PlayAudioDialog audioDialog = new PlayAudioDialog(SkyDriveActivity.this, audio);
                    audioDialog.show();
                }
            });
        }
    });

    LinearLayout layout = new LinearLayout(this);
    Button newFolderButton = new Button(this);
    newFolderButton.setText("New Folder");
    newFolderButton.setOnClickListener(new OnClickListener() {
        @Override
        public void onClick(View v) {
            NewFolderDialog dialog = new NewFolderDialog(SkyDriveActivity.this);
            dialog.setOwnerActivity(SkyDriveActivity.this);
            dialog.show();
        }
    });

    layout.addView(newFolderButton);

    Button uploadFileButton = new Button(this);
    uploadFileButton.setText("Upload File");
    uploadFileButton.setOnClickListener(new OnClickListener() {
        @Override
        public void onClick(View v) {
            Intent intent = new Intent(getApplicationContext(), FilePicker.class);
            startActivityForResult(intent, FilePicker.PICK_FILE_REQUEST);
        }
    });

    layout.addView(uploadFileButton);
    lv.addHeaderView(layout);

    mPhotoAdapter = new SkyDriveListAdapter(this);
    setListAdapter(mPhotoAdapter);

    LiveSdkSampleApplication app = (LiveSdkSampleApplication) getApplication();
    mClient = app.getConnectClient();
}

From source file:hudson.gridmaven.MavenModuleSet.java

public void onLoad(ItemGroup<? extends Item> parent, String name) throws IOException {
    modules = Collections.emptyMap(); // needed during load
    super.onLoad(parent, name);

    modules = loadChildren(this, getModulesDir(), new Function1<ModuleName, MavenModule>() {
        public ModuleName call(MavenModule module) {
            return module.getModuleName();
        }//from w  ww  .j  a v  a 2 s  .  c o  m
    });
    // update the transient nest level field.
    MavenModule root = getRootModule();
    if (root != null && root.getChildren() != null) {
        List<MavenModule> sortedList = new ArrayList<MavenModule>();
        Stack<MavenModule> q = new Stack<MavenModule>();
        root.nestLevel = 0;
        q.push(root);
        while (!q.isEmpty()) {
            MavenModule p = q.pop();
            sortedList.add(p);
            List<MavenModule> children = p.getChildren();
            if (children != null) {
                for (MavenModule m : children)
                    m.nestLevel = p.nestLevel + 1;
                for (int i = children.size() - 1; i >= 0; i--) // add them in the reverse order
                    q.push(children.get(i));
            }
        }
        this.sortedActiveModules = sortedList;
    } else {
        this.sortedActiveModules = getDisabledModules(false);
    }

    if (reporters == null) {
        reporters = new DescribableList<MavenReporter, Descriptor<MavenReporter>>(this);
    }
    reporters.setOwner(this);
    if (publishers == null) {
        publishers = new DescribableList<Publisher, Descriptor<Publisher>>(this);
    }
    publishers.setOwner(this);
    if (buildWrappers == null) {
        buildWrappers = new DescribableList<BuildWrapper, Descriptor<BuildWrapper>>(this);
    }
    buildWrappers.setOwner(this);
    if (prebuilders == null) {
        prebuilders = new DescribableList<Builder, Descriptor<Builder>>(this);
    }
    prebuilders.setOwner(this);
    if (postbuilders == null) {
        postbuilders = new DescribableList<Builder, Descriptor<Builder>>(this);
    }
    postbuilders.setOwner(this);

    if (perModuleEmail == null) {
        perModuleEmail = Boolean.TRUE;
    }

    if (Boolean.TRUE.equals(usePrivateRepository)) {
        this.localRepository = new PerJobLocalRepositoryLocator();
        usePrivateRepository = null;
    }

    updateTransientActions();
}