Example usage for java.util Stack empty

List of usage examples for java.util Stack empty

Introduction

In this page you can find the example usage for java.util Stack empty.

Prototype

public boolean empty() 

Source Link

Document

Tests if this stack is empty.

Usage

From source file:com.sm.store.server.QueryIterator.java

private boolean needTableScan() {
    //check tableScan
    if (tableScan)
        return true;
    else {//from  ww w.j  a  v a  2  s .  co m
        if (!sorted) {
            Stack<Predicate> stack = (Stack<Predicate>) predicateStack.clone();
            while (!stack.empty()) {
                if (traverse(stack.pop()))
                    return true;
            }
        }
        return false;
    }
}

From source file:org.jolokia.handler.list.MBeanInfoData.java

private void addPartialMBeanInfo(JSONObject pMBeanMap, MBeanInfo pMBeanInfo, Stack<String> pPathStack) {
    String what = pPathStack.empty() ? null : pPathStack.pop();
    DataUpdater updater = UPDATERS.get(what);
    if (updater != null) {
        updater.update(pMBeanMap, pMBeanInfo, pPathStack);
    } else {// ww  w .  j  a va 2 s. com
        throw new IllegalArgumentException("Illegal path element " + what);
    }
}

From source file:org.jolokia.handler.list.MBeanInfoData.java

/**
 * Add information about an MBean as obtained from an {@link MBeanInfo} descriptor. The information added
 * can be restricted by a given path (which has already be prepared as a stack). Also, a max depth as given in the
 * constructor restricts the size of the map from the top.
 *
 * @param mBeanInfo the MBean info//from  ww w .  ja v  a  2 s .c om
 * @param pName the object name of the MBean
 */
public void addMBeanInfo(MBeanInfo mBeanInfo, ObjectName pName)
        throws InstanceNotFoundException, IntrospectionException, ReflectionException, IOException {

    JSONObject mBeansMap = getOrCreateJSONObject(infoMap, pName.getDomain());
    JSONObject mBeanMap = getOrCreateJSONObject(mBeansMap, getKeyPropertyString(pName));
    // Trim down stack to get rid of domain/property list
    Stack<String> stack = truncatePathStack(2);
    if (stack.empty()) {
        addFullMBeanInfo(mBeanMap, mBeanInfo);
    } else {
        addPartialMBeanInfo(mBeanMap, mBeanInfo, stack);
    }
    // Trim if required
    if (mBeanMap.size() == 0) {
        mBeansMap.remove(getKeyPropertyString(pName));
        if (mBeansMap.size() == 0) {
            infoMap.remove(pName.getDomain());
        }
    }
}

From source file:edu.ucsb.nceas.metacattest.SubTreeTest.java

/**
 * Test the method getSubTreeNodeStack//  w  w  w .  j  a  v  a2s.  c  o m
 */
public void testGetSubTreeNodeStack() {
    Stack nodeStack = null;
    try {
        nodeStack = tree.getSubTreeNodeStack();
    } //try
    catch (Exception e) {
        log.debug("Error in SubTreeTest.suite: " + e.getMessage());
    } //catch

    while (nodeStack != null && !nodeStack.empty()) {
        NodeRecord node = (NodeRecord) nodeStack.pop();
        String nodeType = node.getNodeType();
        if (nodeType != null && nodeType.equals("ELEMENT")) {
            log.debug("Elment: " + node.getNodeName());
        } else if (nodeType != null && nodeType.equals("ATTRIBUTE")) {
            log.debug("Attribute: " + node.getNodeName() + " = " + node.getNodeData());
        } else {
            log.debug("text: " + node.getNodeData());
        }
    }

}

From source file:com.pinterest.hdfsbackup.distcp.DistCp.java

/**
 * Initialize DFSCopyFileMapper specific job-configuration.
 * @param conf : The dfs/mapred configuration.
 * @param jobConf : The handle to the jobConf object to be initialized.
 * @param args Arguments/*from www .jav a 2  s.c o m*/
 */
private static void setup(Configuration conf, JobConf jobConf, final Arguments args) throws IOException {
    jobConf.set(DST_DIR_LABEL, args.dst.toUri().toString());

    //set boolean values
    final boolean update = args.flags.contains(Options.UPDATE);
    final boolean overwrite = !update && args.flags.contains(Options.OVERWRITE);
    jobConf.setBoolean(Options.UPDATE.propertyname, update);
    jobConf.setBoolean(Options.OVERWRITE.propertyname, overwrite);
    jobConf.setBoolean(Options.IGNORE_READ_FAILURES.propertyname,
            args.flags.contains(Options.IGNORE_READ_FAILURES));
    jobConf.setBoolean(Options.PRESERVE_STATUS.propertyname, args.flags.contains(Options.PRESERVE_STATUS));

    final String randomId = getRandomId();
    JobClient jClient = new JobClient(jobConf);
    Path jobDirectory = new Path(jClient.getSystemDir(), NAME + "_" + randomId);
    jobConf.set(JOB_DIR_LABEL, jobDirectory.toString());

    FileSystem dstfs = args.dst.getFileSystem(conf);
    boolean dstExists = dstfs.exists(args.dst);
    boolean dstIsDir = false;
    if (dstExists) {
        dstIsDir = dstfs.getFileStatus(args.dst).isDir();
    }

    // default logPath
    Path logPath = args.log;
    if (logPath == null) {
        String filename = "_distcp_logs_" + randomId;
        if (!dstExists || !dstIsDir) {
            Path parent = args.dst.getParent();
            if (!dstfs.exists(parent)) {
                dstfs.mkdirs(parent);
            }
            logPath = new Path(parent, filename);
        } else {
            logPath = new Path(args.dst, filename);
        }
    }
    FileOutputFormat.setOutputPath(jobConf, logPath);

    // create src list, dst list
    FileSystem jobfs = jobDirectory.getFileSystem(jobConf);

    Path srcfilelist = new Path(jobDirectory, "_distcp_src_files");
    jobConf.set(SRC_LIST_LABEL, srcfilelist.toString());
    SequenceFile.Writer src_writer = SequenceFile.createWriter(jobfs, jobConf, srcfilelist, LongWritable.class,
            FilePair.class, SequenceFile.CompressionType.NONE);

    Path dstfilelist = new Path(jobDirectory, "_distcp_dst_files");
    SequenceFile.Writer dst_writer = SequenceFile.createWriter(jobfs, jobConf, dstfilelist, Text.class,
            Text.class, SequenceFile.CompressionType.NONE);

    Path dstdirlist = new Path(jobDirectory, "_distcp_dst_dirs");
    jobConf.set(DST_DIR_LIST_LABEL, dstdirlist.toString());
    SequenceFile.Writer dir_writer = SequenceFile.createWriter(jobfs, jobConf, dstdirlist, Text.class,
            FilePair.class, SequenceFile.CompressionType.NONE);

    // handle the case where the destination directory doesn't exist
    // and we've only a single src directory OR we're updating/overwriting
    // the contents of the destination directory.
    final boolean special = (args.srcs.size() == 1 && !dstExists) || update || overwrite;
    int srcCount = 0, cnsyncf = 0, dirsyn = 0;
    long fileCount = 0L, byteCount = 0L, cbsyncs = 0L;
    try {
        for (Iterator<Path> srcItr = args.srcs.iterator(); srcItr.hasNext();) {
            final Path src = srcItr.next();
            FileSystem srcfs = src.getFileSystem(conf);
            FileStatus srcfilestat = srcfs.getFileStatus(src);
            Path root = special && srcfilestat.isDir() ? src : src.getParent();
            if (srcfilestat.isDir()) {
                ++srcCount;
            }

            Stack<FileStatus> pathstack = new Stack<FileStatus>();
            for (pathstack.push(srcfilestat); !pathstack.empty();) {
                FileStatus cur = pathstack.pop();
                FileStatus[] children = srcfs.listStatus(cur.getPath());
                for (int i = 0; i < children.length; i++) {
                    boolean skipfile = false;
                    final FileStatus child = children[i];
                    final String dst = makeRelative(root, child.getPath());
                    ++srcCount;

                    if (child.isDir()) {
                        pathstack.push(child);
                    } else {
                        //skip file if the src and the dst files are the same.
                        skipfile = update && sameFile(srcfs, child, dstfs, new Path(args.dst, dst));
                        //skip file if it exceed file limit or size limit
                        skipfile |= fileCount == args.filelimit || byteCount + child.getLen() > args.sizelimit;

                        if (!skipfile) {
                            ++fileCount;
                            byteCount += child.getLen();

                            if (LOG.isTraceEnabled()) {
                                LOG.trace("adding file " + child.getPath());
                            }

                            ++cnsyncf;
                            cbsyncs += child.getLen();
                            if (cnsyncf > SYNC_FILE_MAX || cbsyncs > BYTES_PER_MAP) {
                                src_writer.sync();
                                dst_writer.sync();
                                cnsyncf = 0;
                                cbsyncs = 0L;
                            }
                        }
                    }

                    if (!skipfile) {
                        src_writer.append(new LongWritable(child.isDir() ? 0 : child.getLen()),
                                new FilePair(child, dst));
                    }

                    dst_writer.append(new Text(dst), new Text(child.getPath().toString()));
                }

                if (cur.isDir()) {
                    String dst = makeRelative(root, cur.getPath());
                    dir_writer.append(new Text(dst), new FilePair(cur, dst));
                    if (++dirsyn > SYNC_FILE_MAX) {
                        dirsyn = 0;
                        dir_writer.sync();
                    }
                }
            }
        }
    } finally {
        checkAndClose(src_writer);
        checkAndClose(dst_writer);
        checkAndClose(dir_writer);
    }

    FileStatus dststatus = null;
    try {
        dststatus = dstfs.getFileStatus(args.dst);
    } catch (FileNotFoundException fnfe) {
        LOG.info(args.dst + " does not exist.");
    }

    // create dest path dir if copying > 1 file
    if (dststatus == null) {
        if (srcCount > 1 && !dstfs.mkdirs(args.dst)) {
            throw new IOException("Failed to create" + args.dst);
        }
    }

    final Path sorted = new Path(jobDirectory, "_distcp_sorted");
    checkDuplication(jobfs, dstfilelist, sorted, conf);

    if (dststatus != null && args.flags.contains(Options.DELETE)) {
        deleteNonexisting(dstfs, dststatus, sorted, jobfs, jobDirectory, jobConf, conf);
    }

    Path tmpDir = new Path(
            (dstExists && !dstIsDir) || (!dstExists && srcCount == 1) ? args.dst.getParent() : args.dst,
            "_distcp_tmp_" + randomId);
    jobConf.set(TMP_DIR_LABEL, tmpDir.toUri().toString());
    LOG.info("srcCount=" + srcCount);
    jobConf.setInt(SRC_COUNT_LABEL, srcCount);
    jobConf.setLong(TOTAL_SIZE_LABEL, byteCount);
    setMapCount(byteCount, jobConf);
}

From source file:org.exoplatform.wiki.tree.TreeNode.java

private void pushChildren(HashMap<String, Object> context) throws Exception {

    Stack<WikiPageParams> paramsStk = (Stack<WikiPageParams>) context.get(this.STACK_PARAMS);

    if (paramsStk == null) {
        pushChild(context);//from   w w  w.  j  a  v  a2  s. c om
    } else {
        if (paramsStk.empty()) {
            this.isSelected = true;
        } else {
            WikiPageParams params = new WikiPageParams();
            params = paramsStk.pop();
            context.put(this.STACK_PARAMS, paramsStk);
            if (this instanceof RootTreeNode) {
                SpaceTreeNode spaceNode = new SpaceTreeNode(params.getType());
                pushChild(spaceNode, context);
            } else if (this instanceof SpaceTreeNode) {
                Wiki wiki = (Wiki) Utils.getObjectFromParams(params);
                WikiTreeNode wikiNode = new WikiTreeNode(wiki);
                pushChild(wikiNode, context);
            } else if (this instanceof WikiTreeNode) {
                pushChild(context);
            } else if (this instanceof WikiHomeTreeNode || this instanceof PageTreeNode) {
                PageImpl page = (PageImpl) Utils.getObjectFromParams(params);
                PageTreeNode pageNode = new PageTreeNode(page);
                pushChild(pageNode, context);
            }
        }
    }
}

From source file:org.apache.whirr.service.DryRunModuleTest.java

/**
 * Simple test that tests dry run module and at the same time enforces clear
 * separation of script execution phases.
 *///from   w  ww .  j  a v a  2  s  . c  om
@Test
public void testNoInitScriptsAfterConfigurationStartedAndNoConfigScriptsAfterDestroy()
        throws ConfigurationException, JSchException, IOException, InterruptedException {

    final List<String> expectedExecutionOrder = ImmutableList.of("bootstrap", "configure", "start", "destroy");

    CompositeConfiguration config = new CompositeConfiguration();
    config.setProperty("whirr.provider", "stub");
    config.setProperty("whirr.cluster-name", "stub-test");
    config.setProperty("whirr.instance-templates", "10 noop+noop3,10 noop2+noop,10 noop3+noop2");
    config.setProperty("whirr.state-store", "memory");

    ClusterSpec clusterSpec = ClusterSpec.withTemporaryKeys(config);
    ClusterController controller = new ClusterController();

    DryRun dryRun = getDryRunInControllerForCluster(controller, clusterSpec);
    dryRun.reset();

    controller.launchCluster(clusterSpec);
    controller.destroyCluster(clusterSpec);

    ListMultimap<NodeMetadata, Statement> perNodeExecutions = dryRun.getExecutions();
    List<StatementOnNode> totalExecutions = dryRun.getTotallyOrderedExecutions();

    // Assert that all nodes executed all three phases and in the right order

    for (Entry<NodeMetadata, Collection<Statement>> entry : perNodeExecutions.asMap().entrySet()) {
        assertSame("An incorrect number of scripts was executed in the node: " + entry.getValue(),
                entry.getValue().size(), expectedExecutionOrder.size());
        List<Statement> asList = Lists.newArrayList(entry.getValue());

        int count = 0;
        for (String phase : expectedExecutionOrder) {
            String scriptName = getScriptName(asList.get(count));
            assertTrue("The '" + phase + "' script was executed in the wrong order, found: " + scriptName,
                    scriptName.startsWith(phase));
            count += 1;
        }
    }

    // This tests the barrier by making sure that once a configure
    // script is executed no more setup scripts are executed

    Stack<String> executedPhases = new Stack<String>();
    for (StatementOnNode script : totalExecutions) {
        String[] parts = getScriptName(script.getStatement()).split("-");
        if ((!executedPhases.empty() && !executedPhases.peek().equals(parts[0])) || executedPhases.empty()) {
            executedPhases.push(parts[0]);
        }
    }

    // Assert that all scripts executed in the right order with no overlaps

    assertEquals(expectedExecutionOrder.size(), executedPhases.size());
    for (String phaseName : Lists.reverse(expectedExecutionOrder)) {
        assertEquals(executedPhases.pop(), phaseName);
    }
}

From source file:org.jolokia.converter.json.TabularDataExtractor.java

private Object convertTabularDataDirectly(TabularData pTd, Stack<String> pExtraArgs,
        ObjectToJsonConverter pConverter) throws AttributeNotFoundException {
    if (!pExtraArgs.empty()) {
        throw new IllegalArgumentException("Cannot use a path for converting tabular data with complex keys ("
                + pTd.getTabularType().getRowType() + ")");
    }/*from   w  w w .  j a va  2 s. c  o  m*/
    JSONObject ret = new JSONObject();
    JSONArray indexNames = new JSONArray();
    TabularType type = pTd.getTabularType();
    for (String index : type.getIndexNames()) {
        indexNames.add(index);
    }
    ret.put("indexNames", indexNames);

    JSONArray values = new JSONArray();
    // Here no special handling for wildcard pathes since pathes are not supported for this use case (yet)
    for (CompositeData cd : (Collection<CompositeData>) pTd.values()) {
        values.add(pConverter.extractObject(cd, pExtraArgs, true));
    }
    ret.put("values", values);

    return ret;
}

From source file:org.dspace.app.xmlui.utils.HandleUtil.java

/**
 * Build a list of trail metadata starting with the owning collection and
 * ending with the root level parent. If the Object is an item, a bundle,
 * or a bitstream, then the object is not included, but its collection and
 * community parents are. However, if the item is a community or collection
 * then it is included along with all parents.
 *
 * <p>//from w ww . j  a va 2  s  .c o m
 * If the terminal object in the trail is the passed object, do not link to
 * it, because that is (presumably) the page at which the user has arrived.
 *
 * @param context session context.
 * @param dso the DSpace who's parents we will add to the pageMeta
 * @param pageMeta the object to which we link our trial
 * @param contextPath The context path
 * @param linkOriginalObject whether or not to make a link of the original object
 * @throws java.sql.SQLException passed through.
 * @throws org.dspace.app.xmlui.wing.WingException passed through.
 */
public static void buildHandleTrail(Context context, DSpaceObject dso, PageMeta pageMeta, String contextPath,
        boolean linkOriginalObject) throws SQLException, WingException {
    // Add the trail back to the repository root.
    Stack<DSpaceObject> stack = new Stack<DSpaceObject>();
    DSpaceObject aDso = dso;

    if (aDso instanceof Bitstream) {
        Bitstream bitstream = (Bitstream) aDso;
        List<Bundle> bundles = bitstream.getBundles();

        aDso = bundles.get(0);
    }

    if (aDso instanceof Bundle) {
        Bundle bundle = (Bundle) aDso;
        List<Item> items = bundle.getItems();

        aDso = items.get(0);
    }

    if (aDso instanceof Item) {
        Item item = (Item) aDso;
        Collection collection = item.getOwningCollection();

        aDso = collection;
    }

    if (aDso instanceof Collection) {
        Collection collection = (Collection) aDso;
        stack.push(collection);
        List<Community> communities = collection.getCommunities();

        aDso = communities.get(0);
    }

    if (aDso instanceof Community) {
        Community community = (Community) aDso;
        stack.push(community);

        for (Community parent : communityService.getAllParents(context, community)) {
            stack.push(parent);
        }
    }

    while (!stack.empty()) {
        DSpaceObject pop = stack.pop();

        String target;
        if (pop == dso && !linkOriginalObject)
            target = null; // Do not link "back" to the terminal object
        else
            target = contextPath + "/handle/" + pop.getHandle();

        if (pop instanceof Collection) {
            Collection collection = (Collection) pop;
            String name = collection.getName();
            if (name == null || name.length() == 0) {
                pageMeta.addTrailLink(target, new Message("default", "xmlui.general.untitled"));
            } else {
                pageMeta.addTrailLink(target, name);
            }
        } else if (pop instanceof Community) {
            Community community = (Community) pop;
            String name = community.getName();
            if (name == null || name.length() == 0) {
                pageMeta.addTrailLink(target, new Message("default", "xmlui.general.untitled"));
            } else {
                pageMeta.addTrailLink(target, name);
            }
        }

    }
}

From source file:org.rhq.plugins.jbossas.util.FileContentDelegate.java

/**
 * Compute SHA256 for the content of an exploded war deployment. This method should be used to
 * compute the SHA256 for content deployed outside RHQ or for the initial content delivered
 * with the server.//from   w  ww.ja v  a  2 s .  co m
 *
 * @param deploymentDirectory app deployment folder
 * @return
 */
private String computeAndSaveSHA(File deploymentDirectory) {
    String sha = null;
    try {
        if (deploymentDirectory.isDirectory()) {
            MessageDigestGenerator messageDigest = new MessageDigestGenerator(MessageDigestGenerator.SHA_256);

            Stack<File> unvisitedFolders = new Stack<File>();
            unvisitedFolders.add(deploymentDirectory);
            while (!unvisitedFolders.empty()) {
                File[] files = unvisitedFolders.pop().listFiles();
                Arrays.sort(files, new Comparator<File>() {
                    public int compare(File f1, File f2) {
                        try {
                            return f1.getCanonicalPath().compareTo(f2.getCanonicalPath());
                        } catch (IOException e) {
                            //do nothing if the sort fails at this point
                        }

                        return 0;
                    }
                });

                for (File file : files) {
                    if (file.isDirectory()) {
                        unvisitedFolders.add(file);
                    } else {
                        FileInputStream inputStream = null;
                        try {
                            inputStream = new FileInputStream(file);
                            messageDigest.add(inputStream);
                        } finally {
                            if (inputStream != null) {
                                inputStream.close();
                            }
                        }
                    }
                }
            }

            sha = messageDigest.getDigestString();
            writeSHAToManifest(deploymentDirectory, sha);
        }
    } catch (IOException e) {
        throw new RuntimeException("Error creating artifact for contentFile: " + deploymentDirectory, e);
    }

    return sha;
}