Example usage for java.util Deque push

List of usage examples for java.util Deque push

Introduction

In this page you can find the example usage for java.util Deque push.

Prototype

void push(E e);

Source Link

Document

Pushes an element onto the stack represented by this deque (in other words, at the head of this deque) if it is possible to do so immediately without violating capacity restrictions, throwing an IllegalStateException if no space is currently available.

Usage

From source file:edu.upenn.cis.orchestra.workloadgenerator.Generator.java

public void findSimpleCycles(List<List<Integer>> cycles, List<List<Object>> mappings) {
    // First, index the edges
    List<List<Integer>> edges = new ArrayList<List<Integer>>();

    for (int i = 0; i < _peers.size(); i++) {
        edges.add(new ArrayList<Integer>());
    }//from   w  w  w  .  ja  v  a  2  s  .co m

    for (List<Object> thisMapping : mappings) {
        edges.get((Integer) thisMapping.get(0)).add((Integer) thisMapping.get(1));
    }

    for (List<Integer> thisEdge : edges) {
        Collections.sort(thisEdge);
    }

    // Find simple cycles as follows:
    // - Handle the peers in order
    // - Find simple cycles where the smallest node in the cycle
    // is the peer
    cycles.clear();
    for (int i = 0; i < _peers.size(); i++) {
        Deque<List<Integer>> paths = new ArrayDeque<List<Integer>>();
        paths.push(new ArrayList<Integer>());
        paths.peek().add(i);
        while (0 != paths.size()) {
            List<Integer> path = paths.pop();
            for (Integer j : edges.get(path.get(path.size() - 1))) {
                if (j.equals(i)) {
                    List<Integer> cycle = new ArrayList<Integer>();
                    cycle.addAll(path);
                    cycle.add(j);
                    cycles.add(cycle);
                } else if (j > i && !path.contains(j)) {
                    List<Integer> newPath = new ArrayList<Integer>();
                    newPath.addAll(path);
                    newPath.add(j);
                    paths.push(newPath);
                }
            }
        }
    }
}

From source file:com.puppycrawl.tools.checkstyle.checks.javadoc.JavadocStyleCheck.java

/**
 * Checks the comment for HTML tags that do not have a corresponding close
 * tag or a close tag that has no previous open tag.  This code was
 * primarily copied from the DocCheck checkHtml method.
 *
 * @param ast the node with the Javadoc//from  w ww. ja  v a 2s.c  om
 * @param comment the {@code TextBlock} which represents
 *                 the Javadoc comment.
 */
private void checkHtmlTags(final DetailAST ast, final TextBlock comment) {
    final int lineNo = comment.getStartLineNo();
    final Deque<HtmlTag> htmlStack = new ArrayDeque<>();
    final String[] text = comment.getText();

    final TagParser parser = new TagParser(text, lineNo);

    while (parser.hasNextTag()) {
        final HtmlTag tag = parser.nextTag();

        if (tag.isIncompleteTag()) {
            log(tag.getLineNo(), INCOMPLETE_TAG, text[tag.getLineNo() - lineNo]);
            return;
        }
        if (tag.isClosedTag()) {
            //do nothing
            continue;
        }
        if (tag.isCloseTag()) {
            // We have found a close tag.
            if (isExtraHtml(tag.getId(), htmlStack)) {
                // No corresponding open tag was found on the stack.
                log(tag.getLineNo(), tag.getPosition(), EXTRA_HTML, tag);
            } else {
                // See if there are any unclosed tags that were opened
                // after this one.
                checkUnclosedTags(htmlStack, tag.getId());
            }
        } else {
            //We only push html tags that are allowed
            if (isAllowedTag(tag)) {
                htmlStack.push(tag);
            }
        }
    }

    // Identify any tags left on the stack.
    // Skip multiples, like <b>...<b>
    String lastFound = "";
    final List<String> typeParameters = CheckUtils.getTypeParameterNames(ast);
    for (final HtmlTag htmlTag : htmlStack) {
        if (!isSingleTag(htmlTag) && !htmlTag.getId().equals(lastFound)
                && !typeParameters.contains(htmlTag.getId())) {
            log(htmlTag.getLineNo(), htmlTag.getPosition(), UNCLOSED_HTML, htmlTag);
            lastFound = htmlTag.getId();
        }
    }
}

From source file:org.alfresco.repo.content.transform.TransformerDebug.java

private void push(String transformerName, String fromUrl, String sourceMimetype, String targetMimetype,
        long sourceSize, TransformationOptions options, Call callType) {
    Deque<Frame> ourStack = ThreadInfo.getStack();
    Frame frame = ourStack.peek();

    if (callType == Call.TRANSFORM && frame != null && frame.callType == Call.AVAILABLE) {
        frame.setTransformerName(transformerName);
        frame.setSourceSize(sourceSize);
        frame.callType = Call.AVAILABLE_AND_TRANSFORM;
    }/* ww w .j a  va 2 s.  c  o m*/

    // Create a new frame. Logging level is set to trace if the file size is 0
    boolean origDebugOutput = ThreadInfo.setDebugOutput(ThreadInfo.getDebugOutput() && sourceSize != 0);
    frame = new Frame(frame, transformerName, fromUrl, sourceMimetype, targetMimetype, sourceSize, options,
            callType, origDebugOutput);
    ourStack.push(frame);

    if (callType == Call.TRANSFORM) {
        // Log the basic info about this transformation
        logBasicDetails(frame, sourceSize, options.getUse(), transformerName, (ourStack.size() == 1));
    }
}

From source file:com.blackberry.logdriver.admin.LogMaintenance.java

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    // If run by Oozie, then load the Oozie conf too
    if (System.getProperty("oozie.action.conf.xml") != null) {
        conf.addResource(new URL("file://" + System.getProperty("oozie.action.conf.xml")));
    }//from  www  .ja v  a  2s .c o m

    // For some reason, Oozie needs some options to be set in system instead of
    // in the confiuration. So copy the configs over.
    {
        Iterator<Entry<String, String>> i = conf.iterator();
        while (i.hasNext()) {
            Entry<String, String> next = i.next();
            System.setProperty(next.getKey(), next.getValue());
        }
    }

    if (args.length < 3) {
        printUsage();
        return 1;
    }

    String userName = args[0];
    String dcNumber = args[1];
    String service = args[2];
    String date = null;
    String hour = null;
    if (args.length >= 4) {
        date = args[3];
    }
    if (args.length >= 5) {
        hour = args[4];
    }

    // Set from environment variables
    String mergeJobPropertiesFile = getConfOrEnv(conf, "MERGEJOB_CONF");
    String filterJobPropertiesFile = getConfOrEnv(conf, "FILTERJOB_CONF");
    String daysBeforeArchive = getConfOrEnv(conf, "DAYS_BEFORE_ARCHIVE");
    String daysBeforeDelete = getConfOrEnv(conf, "DAYS_BEFORE_DELETE");
    String maxConcurrentMR = getConfOrEnv(conf, "MAX_CONCURRENT_MR", "-1");
    String zkConnectString = getConfOrEnv(conf, "ZK_CONNECT_STRING");
    String logdir = getConfOrEnv(conf, "logdriver.logdir.name");
    boolean resetOrphanedJobs = Boolean.parseBoolean(getConfOrEnv(conf, "reset.orphaned.jobs", "true"));
    String rootDir = getConfOrEnv(conf, "service.root.dir");
    String maxTotalMR = getConfOrEnv(conf, "MAX_TOTAL_MR", "-1");

    boolean doMerge = true;
    boolean doArchive = true;
    boolean doDelete = true;

    if (zkConnectString == null) {
        LOG.error("ZK_CONNECT_STRING is not set.  Exiting.");
        return 1;
    }
    if (mergeJobPropertiesFile == null) {
        LOG.info("MERGEJOB_CONF is not set.  Not merging.");
        doMerge = false;
    }
    if (filterJobPropertiesFile == null) {
        LOG.info("FILTERJOB_CONF is not set.  Not archiving.");
        doArchive = false;
    }
    if (daysBeforeArchive == null) {
        LOG.info("DAYS_BEFORE_ARCHIVE is not set.  Not archiving.");
        doArchive = false;
    }
    if (doArchive && Integer.parseInt(daysBeforeArchive) < 0) {
        LOG.info("DAYS_BEFORE_ARCHIVE is negative.  Not archiving.");
        doArchive = false;
    }
    if (daysBeforeDelete == null) {
        LOG.info("DAYS_BEFORE_DELETE is not set.  Not deleting.");
        doDelete = false;
    }
    if (doDelete && Integer.parseInt(daysBeforeDelete) < 0) {
        LOG.info("DAYS_BEFORE_DELETE is negative.  Not deleting.");
        doDelete = false;
    }
    if (logdir == null) {
        LOG.info("LOGDRIVER_LOGDIR_NAME is not set.  Using default value of 'logs'.");
        logdir = "logs";
    }
    if (rootDir == null) {
        LOG.info("SERVICE_ROOT_DIR is not set.  Using default value of 'service'.");
        rootDir = "/service";
    }

    // We can hang if this fails. So make sure we abort if it fails.
    fs = null;
    try {
        fs = FileSystem.get(conf);
        fs.exists(new Path("/")); // Test if it works.
    } catch (IOException e) {
        LOG.error("Error getting filesystem.", e);
        return 1;
    }

    // Create the LockUtil instance
    lockUtil = new LockUtil(zkConnectString);

    // Now it's safe to create our Job Runner
    JobRunner jobRunner = new JobRunner(Integer.parseInt(maxConcurrentMR), Integer.parseInt(maxTotalMR));
    Thread jobRunnerThread = new Thread(jobRunner);
    jobRunnerThread.setName("JobRunner");
    jobRunnerThread.setDaemon(false);
    jobRunnerThread.start();

    // Figure out what date we start filters on.
    String filterCutoffDate = "";
    if (doArchive) {
        Calendar cal = Calendar.getInstance();
        cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeArchive));
        filterCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR),
                (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY));
        LOG.info("Archiving logs from before {}", filterCutoffDate);
    }
    String deleteCutoffDate = "";
    if (doDelete) {
        Calendar cal = Calendar.getInstance();
        cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeDelete));
        deleteCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR),
                (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY));
        LOG.info("Deleting logs from before {}", deleteCutoffDate);
    }

    long now = System.currentTimeMillis();

    // Various exceptions have been popping up here. So make sure I catch them
    // all.
    try {

        // Patterns to recognize hour, day and incoming directories, so that they
        // can be processed.
        Pattern datePathPattern;
        Pattern hourPathPattern;
        Pattern incomingPathPattern;
        Pattern dataPathPattern;
        Pattern archivePathPattern;
        Pattern workingPathPattern;
        if (hour != null) {
            datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")");
            hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")");
            incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/incoming");
            dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/data");
            archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/archive");
            workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/working/([^/]+)_(\\d+)");
        } else if (date != null) {
            datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")");
            hourPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})");
            incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date)
                    + ")/(\\d{2})/([^/]+)/incoming");
            dataPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/data");
            archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date)
                    + ")/(\\d{2})/([^/]+)/archive");
            workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date)
                    + ")/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)");
        } else {
            datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})");
            hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})");
            incomingPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/incoming");
            dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/data");
            archivePathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/archive");
            workingPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)");
        }

        // Do a depth first search of the directory, processing anything that
        // looks
        // interesting along the way
        Deque<Path> paths = new ArrayDeque<Path>();
        Path rootPath = new Path(rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/");
        paths.push(rootPath);

        while (paths.size() > 0) {
            Path p = paths.pop();
            LOG.debug("{}", p.toString());

            if (!fs.exists(p)) {
                continue;
            }

            FileStatus dirStatus = fs.getFileStatus(p);
            FileStatus[] children = fs.listStatus(p);
            boolean addChildren = true;

            boolean old = dirStatus.getModificationTime() < now - WAIT_TIME;
            LOG.debug("    Was last modified {}ms ago", now - dirStatus.getModificationTime());

            if (!old) {
                LOG.debug("    Skipping, since it's not old enough.");

            } else if ((!rootPath.equals(p)) && (children.length == 0
                    || (children.length == 1 && children[0].getPath().getName().equals(READY_MARKER)))) {
                // old and no children? Delete!
                LOG.info("    Deleting empty directory {}", p.toString());
                fs.delete(p, true);

            } else {
                Matcher matcher = datePathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.debug("Checking date directory");

                    // If this is already done, then skip it. So only process if it
                    // doesn't exist.
                    if (fs.exists(new Path(p, READY_MARKER)) == false) {
                        // Check each subdirectory. If they all have ready markers, then I
                        // guess we're ready.
                        boolean ready = true;
                        for (FileStatus c : children) {
                            if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) {
                                ready = false;
                                break;
                            }
                        }

                        if (ready) {
                            fs.createNewFile(new Path(p, READY_MARKER));
                        }
                    }
                }

                matcher = hourPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.debug("Checking hour directory");

                    // If this is already done, then skip it. So only process if it
                    // doesn't exist.
                    if (fs.exists(new Path(p, READY_MARKER)) == false) {
                        // Check each subdirectory. If they all have ready markers, then I
                        // guess we're ready.
                        boolean ready = true;
                        for (FileStatus c : children) {
                            if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) {
                                ready = false;
                                break;
                            }
                        }

                        if (ready) {
                            fs.createNewFile(new Path(p, READY_MARKER));
                        }
                    }
                }

                // Check to see if we have to run a merge
                matcher = incomingPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.debug("Checking incoming directory");
                    String matchDate = matcher.group(1);
                    String matchHour = matcher.group(2);
                    String matchComponent = matcher.group(3);

                    String timestamp = matchDate + matchHour;

                    if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) {
                        LOG.info("Deleting old directory: {}", p);
                        fs.delete(p, true);
                        addChildren = false;
                    } else if (doMerge) {

                        // old, looks right, and has children? Run it!
                        boolean hasMatchingChildren = false;
                        boolean subdirTooYoung = false;

                        for (FileStatus child : children) {
                            if (!hasMatchingChildren) {
                                FileStatus[] grandchildren = fs.listStatus(child.getPath());
                                for (FileStatus gc : grandchildren) {
                                    if (VALID_FILE.matcher(gc.getPath().getName()).matches()) {
                                        hasMatchingChildren = true;
                                        break;
                                    }
                                }
                            }
                            if (!subdirTooYoung) {
                                if (child.getModificationTime() >= now - WAIT_TIME) {
                                    subdirTooYoung = true;
                                    LOG.debug("    Subdir {} is too young.", child.getPath());
                                }
                            }
                        }

                        if (!hasMatchingChildren) {
                            LOG.debug("    No files match the expected pattern ({})", VALID_FILE.pattern());
                        }

                        if (hasMatchingChildren && !subdirTooYoung) {
                            LOG.info("    Run Merge job {} :: {} {} {} {} {}", new Object[] { p.toString(),
                                    dcNumber, service, matchDate, matchHour, matchComponent });

                            Properties jobProps = new Properties();
                            jobProps.load(new FileInputStream(mergeJobPropertiesFile));

                            jobProps.setProperty("jobType", "merge");
                            jobProps.setProperty("rootDir", rootDir);
                            jobProps.setProperty("dcNumber", dcNumber);
                            jobProps.setProperty("service", service);
                            jobProps.setProperty("date", matchDate);
                            jobProps.setProperty("hour", matchHour);
                            jobProps.setProperty("component", matchComponent);
                            jobProps.setProperty("user.name", userName);
                            jobProps.setProperty("logdir", logdir);

                            jobRunner.submit(jobProps);

                            addChildren = false;
                        }
                    }
                }

                // Check to see if we need to run a filter and archive
                matcher = dataPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    String matchDate = matcher.group(1);
                    String matchHour = matcher.group(2);
                    String matchComponent = matcher.group(3);

                    String timestamp = matchDate + matchHour;

                    if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) {
                        LOG.info("Deleting old directory: {}", p);
                        fs.delete(p, true);
                        addChildren = false;
                    } else if (doArchive && timestamp.compareTo(filterCutoffDate) < 0) {

                        Properties jobProps = new Properties();
                        jobProps.load(new FileInputStream(filterJobPropertiesFile));

                        jobProps.setProperty("jobType", "filter");
                        jobProps.setProperty("rootDir", rootDir);
                        jobProps.setProperty("dcNumber", dcNumber);
                        jobProps.setProperty("service", service);
                        jobProps.setProperty("date", matchDate);
                        jobProps.setProperty("hour", matchHour);
                        jobProps.setProperty("component", matchComponent);
                        jobProps.setProperty("user.name", userName);
                        jobProps.setProperty("logdir", logdir);

                        // Check to see if we should just keep all or delete all here.
                        // The filter file should be here
                        String appPath = jobProps.getProperty("oozie.wf.application.path");
                        appPath = appPath.replaceFirst("\\$\\{.*?\\}", "");
                        Path filterFile = new Path(
                                appPath + "/" + conf.get("filter.definition.file", service + ".yaml"));
                        LOG.info("Filter file is {}", filterFile);
                        if (fs.exists(filterFile)) {
                            List<BoomFilterMapper.Filter> filters = BoomFilterMapper.loadFilters(matchComponent,
                                    fs.open(filterFile));

                            if (filters == null) {
                                LOG.warn(
                                        "    Got null when getting filters.  Not processing. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                            } else if (filters.size() == 0) {
                                LOG.warn("    Got no filters.  Not processing. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                            } else if (filters.size() == 1
                                    && filters.get(0) instanceof BoomFilterMapper.KeepAllFilter) {
                                LOG.info("    Keeping everything. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                                // Move files from data to archive
                                // delete it all!
                                String destination = rootDir + "/" + dcNumber + "/" + service + "/" + logdir
                                        + "/" + matchDate + "/" + matchHour + "/" + matchComponent
                                        + "/archive/";

                                PathInfo pathInfo = new PathInfo();
                                pathInfo.setDcNumber(dcNumber);
                                pathInfo.setService(service);
                                pathInfo.setLogdir(logdir);
                                pathInfo.setDate(matchDate);
                                pathInfo.setHour(matchHour);
                                pathInfo.setComponent(matchComponent);

                                try {
                                    lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo));
                                    fs.mkdirs(new Path(destination));
                                    for (FileStatus f : fs.listStatus(p)) {
                                        fs.rename(f.getPath(), new Path(destination));
                                    }
                                } finally {
                                    lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo));
                                }
                            } else if (filters.size() == 1
                                    && filters.get(0) instanceof BoomFilterMapper.DropAllFilter) {
                                LOG.info("    Dropping everything. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });

                                PathInfo pathInfo = new PathInfo();
                                pathInfo.setDcNumber(dcNumber);
                                pathInfo.setService(service);
                                pathInfo.setLogdir(logdir);
                                pathInfo.setDate(matchDate);
                                pathInfo.setHour(matchHour);
                                pathInfo.setComponent(matchComponent);

                                try {
                                    lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo));
                                    fs.delete(p, true);
                                } finally {
                                    lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo));
                                }

                            } else {
                                LOG.info("    Run Filter/Archive job {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                                jobRunner.submit(jobProps);
                            }
                        } else {
                            LOG.warn("Skipping filter job, since no filter file exists");
                        }

                        addChildren = false;
                    }
                }

                matcher = archivePathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    String matchDate = matcher.group(1);
                    String matchHour = matcher.group(2);

                    String timestamp = matchDate + matchHour;

                    if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) {
                        LOG.info("Deleting old directory: {}", p);
                        fs.delete(p, true);
                        addChildren = false;
                    }
                }

                matcher = workingPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.info("  Matches working pattern ({})", p);
                    if (resetOrphanedJobs) {
                        String matchDate = matcher.group(1);
                        String matchHour = matcher.group(2);
                        String matchComponent = matcher.group(3);

                        // Move everything from working/xxx/incoming/ to incoming/
                        PathInfo lockPathInfo = new PathInfo(logdir, rootDir + "/" + dcNumber + "/" + service
                                + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent);
                        lockUtil.acquireWriteLock(lockUtil.getLockPath(lockPathInfo));

                        FileStatus[] fileStatuses = fs.listStatus(new Path(p.toUri().getPath() + "/incoming/"));
                        if (fileStatuses != null) {
                            for (FileStatus fileStatus : fileStatuses) {
                                Path toPath = new Path(
                                        fileStatus.getPath().getParent().getParent().getParent().getParent(),
                                        "incoming/" + fileStatus.getPath().getName());

                                LOG.info("  Moving data from {} to {}", fileStatus.getPath(), toPath);
                                LOG.info("    mkdir {}", toPath);
                                fs.mkdirs(toPath);

                                Path fromDir = new Path(p.toUri().getPath(),
                                        "incoming/" + fileStatus.getPath().getName());
                                LOG.info("    moving from {}", fromDir);
                                FileStatus[] files = fs.listStatus(fromDir);
                                if (files == null || files.length == 0) {
                                    LOG.info("    Nothing to move from  {}", fromDir);
                                } else {
                                    for (FileStatus f : files) {
                                        LOG.info("    rename {} {}", f.getPath(),
                                                new Path(toPath, f.getPath().getName()));
                                        fs.rename(f.getPath(), new Path(toPath, f.getPath().getName()));
                                    }
                                }

                                LOG.info("    rm {}", fileStatus.getPath());
                                fs.delete(fileStatus.getPath(), true);
                            }
                            lockUtil.releaseWriteLock(lockUtil.getLockPath(lockPathInfo));

                            fs.delete(new Path(p.toUri().getPath()), true);
                        }
                    }

                    addChildren = false;
                }
            }

            // Add any children which are directories to the stack.
            if (addChildren) {
                for (int i = children.length - 1; i >= 0; i--) {
                    FileStatus child = children[i];
                    if (child.isDirectory()) {
                        paths.push(child.getPath());
                    }
                }
            }
        }

        // Since we may have deleted a bunch of directories, delete any unused
        // locks
        // from ZooKeeper.
        {
            LOG.info("Checking for unused locks in ZooKeeper");
            String scanPath = rootDir + "/" + dcNumber + "/" + service + "/" + logdir;
            if (date != null) {
                scanPath += "/" + date;
                if (hour != null) {
                    scanPath += "/" + hour;
                }
            }

            List<LockInfo> lockInfo = lockUtil.scan(scanPath);

            for (LockInfo li : lockInfo) {
                // Check if the lock path still exists in HDFS. If it doesn't, then
                // delete it from ZooKeeper.
                String path = li.getPath();
                String hdfsPath = path.substring(LockUtil.ROOT.length());
                if (!fs.exists(new Path(hdfsPath))) {
                    ZooKeeper zk = lockUtil.getZkClient();

                    while (!path.equals(LockUtil.ROOT)) {
                        try {
                            zk.delete(path, -1);
                        } catch (KeeperException.NotEmptyException e) {
                            // That's fine. just stop trying then.
                            break;
                        } catch (Exception e) {
                            LOG.error("Caught exception trying to delete from ZooKeeper.", e);
                            break;
                        }
                        LOG.info("Deleted from ZooKeeper: {}", path);
                        path = path.substring(0, path.lastIndexOf('/'));
                    }

                }
            }
        }

        // Now that we're done, wait for the Oozie Runner to stop, and print the
        // results.
        LOG.info("Waiting for Oozie jobs to complete.");
        jobRunner.shutdown();
        jobRunnerThread.join();
        LOG.info("Job Stats : Started={} Succeeded={} failed={} errors={}",
                new Object[] { jobRunner.getStarted(), jobRunner.getSucceeded(), jobRunner.getFailed(),
                        jobRunner.getErrors() });

        lockUtil.close();

    } catch (Exception e) {
        LOG.error("Unexpected exception caught.", e);
        return 1;
    }

    return 0;
}

From source file:loci.formats.in.LIFReader.java

private void populateOriginalMetadata(Element root, Deque<String> nameStack) {
    String name = root.getNodeName();
    if (root.hasAttributes() && !name.equals("Element") && !name.equals("Attachment")
            && !name.equals("LMSDataContainerHeader")) {
        nameStack.push(name);

        String suffix = root.getAttribute("Identifier");
        String value = root.getAttribute("Variant");
        if (suffix == null || suffix.trim().length() == 0) {
            suffix = root.getAttribute("Description");
        }/*from   www  .ja va  2 s.  c om*/
        StringBuffer key = new StringBuffer();
        final Iterator<String> nameStackIterator = nameStack.descendingIterator();
        while (nameStackIterator.hasNext()) {
            final String k = nameStackIterator.next();
            key.append(k);
            key.append("|");
        }
        if (suffix != null && value != null && suffix.length() > 0 && value.length() > 0
                && !suffix.equals("HighInteger") && !suffix.equals("LowInteger")) {
            addSeriesMetaList(key.toString() + suffix, value);
        } else {
            NamedNodeMap attributes = root.getAttributes();
            for (int i = 0; i < attributes.getLength(); i++) {
                Attr attr = (Attr) attributes.item(i);
                if (!attr.getName().equals("HighInteger") && !attr.getName().equals("LowInteger")) {
                    addSeriesMeta(key.toString() + attr.getName(), attr.getValue());
                }
            }
        }
    }

    NodeList children = root.getChildNodes();
    for (int i = 0; i < children.getLength(); i++) {
        Object child = children.item(i);
        if (child instanceof Element) {
            populateOriginalMetadata((Element) child, nameStack);
        }
    }

    if (root.hasAttributes() && !name.equals("Element") && !name.equals("Attachment")
            && !name.equals("LMSDataContainerHeader")) {
        nameStack.pop();
    }
}

From source file:cgeo.geocaching.CacheListActivity.java

/**
 * Method to asynchronously refresh the caches details.
 *///w  ww. j  ava2  s . com
private void loadDetails(final DisposableHandler handler, final List<Geocache> caches,
        final Set<Integer> additionalListIds) {
    final Observable<Geocache> allCaches;
    if (Settings.isStoreOfflineMaps()) {
        allCaches = Observable.create(new ObservableOnSubscribe<Geocache>() {
            private final Disposable disposable = Disposables.empty();

            @Override
            public void subscribe(final ObservableEmitter<Geocache> emitter) throws Exception {
                emitter.setDisposable(disposable);
                final Deque<Geocache> withStaticMaps = new LinkedList<>();
                for (final Geocache cache : caches) {
                    if (disposable.isDisposed()) {
                        return;
                    }
                    if (cache.hasStaticMap()) {
                        withStaticMaps.push(cache);
                    } else {
                        emitter.onNext(cache);
                    }
                }
                for (final Geocache cache : withStaticMaps) {
                    if (disposable.isDisposed()) {
                        return;
                    }
                    emitter.onNext(cache);
                }
                emitter.onComplete();
            }
        }).subscribeOn(Schedulers.io());
    } else {
        allCaches = Observable.fromIterable(caches);
    }
    final Observable<Geocache> loaded = allCaches.flatMap(new Function<Geocache, Observable<Geocache>>() {
        @Override
        public Observable<Geocache> apply(final Geocache cache) {
            return Observable.create(new ObservableOnSubscribe<Geocache>() {
                @Override
                public void subscribe(final ObservableEmitter<Geocache> emitter) throws Exception {
                    cache.refreshSynchronous(null, additionalListIds);
                    detailProgress.incrementAndGet();
                    handler.obtainMessage(DownloadProgress.MSG_LOADED, cache).sendToTarget();
                    emitter.onComplete();
                }
            }).subscribeOn(AndroidRxUtils.refreshScheduler);
        }
    }).doOnComplete(new Action() {
        @Override
        public void run() {
            handler.sendEmptyMessage(DownloadProgress.MSG_DONE);
        }
    });
    handler.add(loaded.subscribe());
}

From source file:jetbrains.exodus.entitystore.PersistentEntityStoreImpl.java

void registerTransaction(@NotNull final PersistentStoreTransaction txn) {
    final Thread thread = Thread.currentThread();
    Deque<PersistentStoreTransaction> stack = txns.get(thread);
    if (stack == null) {
        stack = new ArrayDeque<>(4);
        txns.put(thread, stack);/*  www. j a  va 2  s .  c  om*/
    }
    stack.push(txn);
}

From source file:org.apache.hadoop.hive.ql.QTestUtil.java

/**
 * Given the current configurations (e.g., hadoop version and execution mode), return
 * the correct file name to compare with the current test run output.
 * @param outDir The directory where the reference log files are stored.
 * @param testName The test file name (terminated by ".out").
 * @return The file name appended with the configuration values if it exists.
 *//*from  ww w  .  j a va 2s .c o m*/
public String outPath(String outDir, String testName) {
    String ret = (new File(outDir, testName)).getPath();
    // List of configurations. Currently the list consists of hadoop version and execution mode only
    List<String> configs = new ArrayList<String>();
    configs.add(this.hadoopVer);

    Deque<String> stack = new LinkedList<String>();
    StringBuilder sb = new StringBuilder();
    sb.append(testName);
    stack.push(sb.toString());

    // example file names are input1.q.out_0.20.0_minimr or input2.q.out_0.17
    for (String s : configs) {
        sb.append('_');
        sb.append(s);
        stack.push(sb.toString());
    }
    while (stack.size() > 0) {
        String fileName = stack.pop();
        File f = new File(outDir, fileName);
        if (f.exists()) {
            ret = f.getPath();
            break;
        }
    }
    return ret;
}

From source file:org.betaconceptframework.astroboa.engine.definition.visitor.CmsPropertyVisitor.java

private Deque<XSType> collectAllSuperTypesIncludingProvidedElement(XSElementDecl element) {

    Deque<XSType> types = new ArrayDeque<XSType>();

    XSType currentElement = element.getType();

    boolean elementIsContentObjectType = false;

    while (!elementIsContentObjectType) {

        elementIsContentObjectType = currentElement.getName() != null
                && CmsDefinitionItem.contentObjectType.equals(ItemUtils.createNewItem(null,
                        currentElement.getTargetNamespace(), currentElement.getName()));

        if (elementIsContentObjectType) {

            if (currentElement.isComplexType()) {
                types.push(currentElement);
            }/*from w  w  w .  j a  va 2  s .co  m*/

            break; //Need to go no further
        } else {

            //Method getBaseType is always non-null.
            //According to the documentation, 
            //"Note that if this type represents xs:anyType, this method returns itself. "
            //Threrefore break loop if we come across to such case
            if (currentElement == currentElement.getBaseType()) {
                break;
            } else {
                if (currentElement.isComplexType()) {
                    types.push(currentElement);
                }

                currentElement = currentElement.getBaseType();
            }
        }
    }

    return types;

}

From source file:org.apache.asterix.app.translator.QueryTranslator.java

private static ARecordType createEnforcedType(ARecordType initialType, List<Index> indexes)
        throws AlgebricksException {
    ARecordType enforcedType = initialType;
    for (Index index : indexes) {
        if (!index.isSecondaryIndex() || !index.isEnforcingKeyFileds()) {
            continue;
        }//www  . j  a  v a2  s  .c  o m
        if (index.hasMetaFields()) {
            throw new AlgebricksException("Indexing an open field is only supported on the record part");
        }
        for (int i = 0; i < index.getKeyFieldNames().size(); i++) {
            Deque<Pair<ARecordType, String>> nestedTypeStack = new ArrayDeque<>();
            List<String> splits = index.getKeyFieldNames().get(i);
            ARecordType nestedFieldType = enforcedType;
            boolean openRecords = false;
            String bridgeName = nestedFieldType.getTypeName();
            int j;
            // Build the stack for the enforced type
            for (j = 1; j < splits.size(); j++) {
                nestedTypeStack.push(new Pair<ARecordType, String>(nestedFieldType, splits.get(j - 1)));
                bridgeName = nestedFieldType.getTypeName();
                nestedFieldType = (ARecordType) enforcedType.getSubFieldType(splits.subList(0, j));
                if (nestedFieldType == null) {
                    openRecords = true;
                    break;
                }
            }
            if (openRecords) {
                // create the smallest record
                enforcedType = new ARecordType(splits.get(splits.size() - 2),
                        new String[] { splits.get(splits.size() - 1) },
                        new IAType[] { AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)) },
                        true);
                // create the open part of the nested field
                for (int k = splits.size() - 3; k > (j - 2); k--) {
                    enforcedType = new ARecordType(splits.get(k), new String[] { splits.get(k + 1) },
                            new IAType[] { AUnionType.createUnknownableType(enforcedType) }, true);
                }
                // Bridge the gap
                Pair<ARecordType, String> gapPair = nestedTypeStack.pop();
                ARecordType parent = gapPair.first;

                IAType[] parentFieldTypes = ArrayUtils.addAll(parent.getFieldTypes().clone(),
                        new IAType[] { AUnionType.createUnknownableType(enforcedType) });
                enforcedType = new ARecordType(bridgeName,
                        ArrayUtils.addAll(parent.getFieldNames(), enforcedType.getTypeName()), parentFieldTypes,
                        true);
            } else {
                //Schema is closed all the way to the field
                //enforced fields are either null or strongly typed
                LinkedHashMap<String, IAType> recordNameTypesMap = createRecordNameTypeMap(nestedFieldType);
                // if a an enforced field already exists and the type is correct
                IAType enforcedFieldType = recordNameTypesMap.get(splits.get(splits.size() - 1));
                if (enforcedFieldType != null && enforcedFieldType.getTypeTag() == ATypeTag.UNION
                        && ((AUnionType) enforcedFieldType).isUnknownableType()) {
                    enforcedFieldType = ((AUnionType) enforcedFieldType).getActualType();
                }
                if (enforcedFieldType != null && !ATypeHierarchy.canPromote(enforcedFieldType.getTypeTag(),
                        index.getKeyFieldTypes().get(i).getTypeTag())) {
                    throw new AlgebricksException("Cannot enforce field " + index.getKeyFieldNames().get(i)
                            + " to have type " + index.getKeyFieldTypes().get(i));
                }
                if (enforcedFieldType == null) {
                    recordNameTypesMap.put(splits.get(splits.size() - 1),
                            AUnionType.createUnknownableType(index.getKeyFieldTypes().get(i)));
                }
                enforcedType = new ARecordType(nestedFieldType.getTypeName(),
                        recordNameTypesMap.keySet().toArray(new String[recordNameTypesMap.size()]),
                        recordNameTypesMap.values().toArray(new IAType[recordNameTypesMap.size()]),
                        nestedFieldType.isOpen());
            }

            // Create the enforced type for the nested fields in the schema, from the ground up
            if (!nestedTypeStack.isEmpty()) {
                while (!nestedTypeStack.isEmpty()) {
                    Pair<ARecordType, String> nestedTypePair = nestedTypeStack.pop();
                    ARecordType nestedRecType = nestedTypePair.first;
                    IAType[] nestedRecTypeFieldTypes = nestedRecType.getFieldTypes().clone();
                    nestedRecTypeFieldTypes[nestedRecType.getFieldIndex(nestedTypePair.second)] = enforcedType;
                    enforcedType = new ARecordType(nestedRecType.getTypeName() + "_enforced",
                            nestedRecType.getFieldNames(), nestedRecTypeFieldTypes, nestedRecType.isOpen());
                }
            }
        }
    }
    return enforcedType;
}