Example usage for java.util Deque pop

List of usage examples for java.util Deque pop

Introduction

In this page you can find the example usage for java.util Deque pop.

Prototype

E pop();

Source Link

Document

Pops an element from the stack represented by this deque.

Usage

From source file:net.solarnetwork.util.JavaBeanXmlSerializer.java

/**
 * Parse XML into a simple Map structure.
 * /* w ww .j  a v  a  2 s.  c  o m*/
 * @param in
 *        the input stream to parse
 * @return a Map of the XML
 */
public Map<String, Object> parseXml(InputStream in) {
    Deque<Map<String, Object>> stack = new LinkedList<Map<String, Object>>();
    Map<String, Object> result = null;
    XMLStreamReader reader = startParse(in);
    try {
        int eventType;
        boolean parsing = true;
        while (parsing) {
            eventType = reader.next();
            switch (eventType) {
            case XMLStreamConstants.END_DOCUMENT:
                parsing = false;
                break;

            case XMLStreamConstants.START_ELEMENT:
                String name = reader.getLocalName();
                if (stack.isEmpty()) {
                    result = new LinkedHashMap<String, Object>();
                    stack.push(result);
                } else {
                    Map<String, Object> el = new LinkedHashMap<String, Object>();
                    putMapValue(stack.peek(), name, el);
                    stack.push(el);
                }
                parseElement(stack.peek(), reader);
                break;

            case XMLStreamConstants.END_ELEMENT:
                stack.pop();
                break;

            }
        }
    } catch (XMLStreamException e) {
        throw new RuntimeException(e);
    } finally {
        endParse(reader);
    }
    return result;
}

From source file:edu.upenn.cis.orchestra.workloadgenerator.Generator.java

public void findSimpleCycles(List<List<Integer>> cycles, List<List<Object>> mappings) {
    // First, index the edges
    List<List<Integer>> edges = new ArrayList<List<Integer>>();

    for (int i = 0; i < _peers.size(); i++) {
        edges.add(new ArrayList<Integer>());
    }//ww  w  . ja v a 2  s . c  om

    for (List<Object> thisMapping : mappings) {
        edges.get((Integer) thisMapping.get(0)).add((Integer) thisMapping.get(1));
    }

    for (List<Integer> thisEdge : edges) {
        Collections.sort(thisEdge);
    }

    // Find simple cycles as follows:
    // - Handle the peers in order
    // - Find simple cycles where the smallest node in the cycle
    // is the peer
    cycles.clear();
    for (int i = 0; i < _peers.size(); i++) {
        Deque<List<Integer>> paths = new ArrayDeque<List<Integer>>();
        paths.push(new ArrayList<Integer>());
        paths.peek().add(i);
        while (0 != paths.size()) {
            List<Integer> path = paths.pop();
            for (Integer j : edges.get(path.get(path.size() - 1))) {
                if (j.equals(i)) {
                    List<Integer> cycle = new ArrayList<Integer>();
                    cycle.addAll(path);
                    cycle.add(j);
                    cycles.add(cycle);
                } else if (j > i && !path.contains(j)) {
                    List<Integer> newPath = new ArrayList<Integer>();
                    newPath.addAll(path);
                    newPath.add(j);
                    paths.push(newPath);
                }
            }
        }
    }
}

From source file:net.dv8tion.jda.core.entities.impl.ReceivedMessage.java

@Override
public String getContentStripped() {
    if (strippedContent != null)
        return strippedContent;
    synchronized (mutex) {
        if (strippedContent != null)
            return strippedContent;
        String tmp = getContentDisplay();
        //all the formatting keys to keep track of
        String[] keys = new String[] { "*", "_", "`", "~~" };

        //find all tokens (formatting strings described above)
        TreeSet<FormatToken> tokens = new TreeSet<>(Comparator.comparingInt(t -> t.start));
        for (String key : keys) {
            Matcher matcher = Pattern.compile(Pattern.quote(key)).matcher(tmp);
            while (matcher.find())
                tokens.add(new FormatToken(key, matcher.start()));
        }//ww w . j a va 2  s .  co  m

        //iterate over all tokens, find all matching pairs, and add them to the list toRemove
        Deque<FormatToken> stack = new ArrayDeque<>();
        List<FormatToken> toRemove = new ArrayList<>();
        boolean inBlock = false;
        for (FormatToken token : tokens) {
            if (stack.isEmpty() || !stack.peek().format.equals(token.format)
                    || stack.peek().start + token.format.length() == token.start)

            {
                //we are at opening tag
                if (!inBlock) {
                    //we are outside of block -> handle normally
                    if (token.format.equals("`")) {
                        //block start... invalidate all previous tags
                        stack.clear();
                        inBlock = true;
                    }
                    stack.push(token);
                } else if (token.format.equals("`")) {
                    //we are inside of a block -> handle only block tag
                    stack.push(token);
                }
            } else if (!stack.isEmpty()) {
                //we found a matching close-tag
                toRemove.add(stack.pop());
                toRemove.add(token);
                if (token.format.equals("`") && stack.isEmpty())
                    //close tag closed the block
                    inBlock = false;
            }
        }

        //sort tags to remove by their start-index and iteratively build the remaining string
        toRemove.sort(Comparator.comparingInt(t -> t.start));
        StringBuilder out = new StringBuilder();
        int currIndex = 0;
        for (FormatToken formatToken : toRemove) {
            if (currIndex < formatToken.start)
                out.append(tmp.substring(currIndex, formatToken.start));
            currIndex = formatToken.start + formatToken.format.length();
        }
        if (currIndex < tmp.length())
            out.append(tmp.substring(currIndex));
        //return the stripped text, escape all remaining formatting characters (did not have matching
        // open/close before or were left/right of block
        return strippedContent = out.toString().replace("*", "\\*").replace("_", "\\_").replace("~", "\\~");
    }
}

From source file:com.blackberry.logdriver.admin.LogMaintenance.java

@Override
public int run(String[] args) throws Exception {
    Configuration conf = getConf();
    // If run by Oozie, then load the Oozie conf too
    if (System.getProperty("oozie.action.conf.xml") != null) {
        conf.addResource(new URL("file://" + System.getProperty("oozie.action.conf.xml")));
    }//from ww  w  .j  ava 2 s .c  om

    // For some reason, Oozie needs some options to be set in system instead of
    // in the confiuration. So copy the configs over.
    {
        Iterator<Entry<String, String>> i = conf.iterator();
        while (i.hasNext()) {
            Entry<String, String> next = i.next();
            System.setProperty(next.getKey(), next.getValue());
        }
    }

    if (args.length < 3) {
        printUsage();
        return 1;
    }

    String userName = args[0];
    String dcNumber = args[1];
    String service = args[2];
    String date = null;
    String hour = null;
    if (args.length >= 4) {
        date = args[3];
    }
    if (args.length >= 5) {
        hour = args[4];
    }

    // Set from environment variables
    String mergeJobPropertiesFile = getConfOrEnv(conf, "MERGEJOB_CONF");
    String filterJobPropertiesFile = getConfOrEnv(conf, "FILTERJOB_CONF");
    String daysBeforeArchive = getConfOrEnv(conf, "DAYS_BEFORE_ARCHIVE");
    String daysBeforeDelete = getConfOrEnv(conf, "DAYS_BEFORE_DELETE");
    String maxConcurrentMR = getConfOrEnv(conf, "MAX_CONCURRENT_MR", "-1");
    String zkConnectString = getConfOrEnv(conf, "ZK_CONNECT_STRING");
    String logdir = getConfOrEnv(conf, "logdriver.logdir.name");
    boolean resetOrphanedJobs = Boolean.parseBoolean(getConfOrEnv(conf, "reset.orphaned.jobs", "true"));
    String rootDir = getConfOrEnv(conf, "service.root.dir");
    String maxTotalMR = getConfOrEnv(conf, "MAX_TOTAL_MR", "-1");

    boolean doMerge = true;
    boolean doArchive = true;
    boolean doDelete = true;

    if (zkConnectString == null) {
        LOG.error("ZK_CONNECT_STRING is not set.  Exiting.");
        return 1;
    }
    if (mergeJobPropertiesFile == null) {
        LOG.info("MERGEJOB_CONF is not set.  Not merging.");
        doMerge = false;
    }
    if (filterJobPropertiesFile == null) {
        LOG.info("FILTERJOB_CONF is not set.  Not archiving.");
        doArchive = false;
    }
    if (daysBeforeArchive == null) {
        LOG.info("DAYS_BEFORE_ARCHIVE is not set.  Not archiving.");
        doArchive = false;
    }
    if (doArchive && Integer.parseInt(daysBeforeArchive) < 0) {
        LOG.info("DAYS_BEFORE_ARCHIVE is negative.  Not archiving.");
        doArchive = false;
    }
    if (daysBeforeDelete == null) {
        LOG.info("DAYS_BEFORE_DELETE is not set.  Not deleting.");
        doDelete = false;
    }
    if (doDelete && Integer.parseInt(daysBeforeDelete) < 0) {
        LOG.info("DAYS_BEFORE_DELETE is negative.  Not deleting.");
        doDelete = false;
    }
    if (logdir == null) {
        LOG.info("LOGDRIVER_LOGDIR_NAME is not set.  Using default value of 'logs'.");
        logdir = "logs";
    }
    if (rootDir == null) {
        LOG.info("SERVICE_ROOT_DIR is not set.  Using default value of 'service'.");
        rootDir = "/service";
    }

    // We can hang if this fails. So make sure we abort if it fails.
    fs = null;
    try {
        fs = FileSystem.get(conf);
        fs.exists(new Path("/")); // Test if it works.
    } catch (IOException e) {
        LOG.error("Error getting filesystem.", e);
        return 1;
    }

    // Create the LockUtil instance
    lockUtil = new LockUtil(zkConnectString);

    // Now it's safe to create our Job Runner
    JobRunner jobRunner = new JobRunner(Integer.parseInt(maxConcurrentMR), Integer.parseInt(maxTotalMR));
    Thread jobRunnerThread = new Thread(jobRunner);
    jobRunnerThread.setName("JobRunner");
    jobRunnerThread.setDaemon(false);
    jobRunnerThread.start();

    // Figure out what date we start filters on.
    String filterCutoffDate = "";
    if (doArchive) {
        Calendar cal = Calendar.getInstance();
        cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeArchive));
        filterCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR),
                (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY));
        LOG.info("Archiving logs from before {}", filterCutoffDate);
    }
    String deleteCutoffDate = "";
    if (doDelete) {
        Calendar cal = Calendar.getInstance();
        cal.add(Calendar.DAY_OF_MONTH, Integer.parseInt("-" + daysBeforeDelete));
        deleteCutoffDate = String.format("%04d%02d%02d%02d", cal.get(Calendar.YEAR),
                (cal.get(Calendar.MONTH) + 1), cal.get(Calendar.DAY_OF_MONTH), cal.get(Calendar.HOUR_OF_DAY));
        LOG.info("Deleting logs from before {}", deleteCutoffDate);
    }

    long now = System.currentTimeMillis();

    // Various exceptions have been popping up here. So make sure I catch them
    // all.
    try {

        // Patterns to recognize hour, day and incoming directories, so that they
        // can be processed.
        Pattern datePathPattern;
        Pattern hourPathPattern;
        Pattern incomingPathPattern;
        Pattern dataPathPattern;
        Pattern archivePathPattern;
        Pattern workingPathPattern;
        if (hour != null) {
            datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")");
            hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")");
            incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/incoming");
            dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/data");
            archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/archive");
            workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/("
                    + Pattern.quote(hour) + ")/([^/]+)/working/([^/]+)_(\\d+)");
        } else if (date != null) {
            datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")");
            hourPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})");
            incomingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date)
                    + ")/(\\d{2})/([^/]+)/incoming");
            dataPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(" + Pattern.quote(date) + ")/(\\d{2})/([^/]+)/data");
            archivePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date)
                    + ")/(\\d{2})/([^/]+)/archive");
            workingPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(" + Pattern.quote(date)
                    + ")/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)");
        } else {
            datePathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})");
            hourPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})");
            incomingPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/incoming");
            dataPathPattern = Pattern.compile(rootDir + "/" + Pattern.quote(dcNumber) + "/"
                    + Pattern.quote(service) + "/" + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/data");
            archivePathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/archive");
            workingPathPattern = Pattern
                    .compile(rootDir + "/" + Pattern.quote(dcNumber) + "/" + Pattern.quote(service) + "/"
                            + Pattern.quote(logdir) + "/(\\d{8})/(\\d{2})/([^/]+)/working/([^/]+)_(\\d+)");
        }

        // Do a depth first search of the directory, processing anything that
        // looks
        // interesting along the way
        Deque<Path> paths = new ArrayDeque<Path>();
        Path rootPath = new Path(rootDir + "/" + dcNumber + "/" + service + "/" + logdir + "/");
        paths.push(rootPath);

        while (paths.size() > 0) {
            Path p = paths.pop();
            LOG.debug("{}", p.toString());

            if (!fs.exists(p)) {
                continue;
            }

            FileStatus dirStatus = fs.getFileStatus(p);
            FileStatus[] children = fs.listStatus(p);
            boolean addChildren = true;

            boolean old = dirStatus.getModificationTime() < now - WAIT_TIME;
            LOG.debug("    Was last modified {}ms ago", now - dirStatus.getModificationTime());

            if (!old) {
                LOG.debug("    Skipping, since it's not old enough.");

            } else if ((!rootPath.equals(p)) && (children.length == 0
                    || (children.length == 1 && children[0].getPath().getName().equals(READY_MARKER)))) {
                // old and no children? Delete!
                LOG.info("    Deleting empty directory {}", p.toString());
                fs.delete(p, true);

            } else {
                Matcher matcher = datePathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.debug("Checking date directory");

                    // If this is already done, then skip it. So only process if it
                    // doesn't exist.
                    if (fs.exists(new Path(p, READY_MARKER)) == false) {
                        // Check each subdirectory. If they all have ready markers, then I
                        // guess we're ready.
                        boolean ready = true;
                        for (FileStatus c : children) {
                            if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) {
                                ready = false;
                                break;
                            }
                        }

                        if (ready) {
                            fs.createNewFile(new Path(p, READY_MARKER));
                        }
                    }
                }

                matcher = hourPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.debug("Checking hour directory");

                    // If this is already done, then skip it. So only process if it
                    // doesn't exist.
                    if (fs.exists(new Path(p, READY_MARKER)) == false) {
                        // Check each subdirectory. If they all have ready markers, then I
                        // guess we're ready.
                        boolean ready = true;
                        for (FileStatus c : children) {
                            if (c.isDirectory() && fs.exists(new Path(c.getPath(), READY_MARKER)) == false) {
                                ready = false;
                                break;
                            }
                        }

                        if (ready) {
                            fs.createNewFile(new Path(p, READY_MARKER));
                        }
                    }
                }

                // Check to see if we have to run a merge
                matcher = incomingPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.debug("Checking incoming directory");
                    String matchDate = matcher.group(1);
                    String matchHour = matcher.group(2);
                    String matchComponent = matcher.group(3);

                    String timestamp = matchDate + matchHour;

                    if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) {
                        LOG.info("Deleting old directory: {}", p);
                        fs.delete(p, true);
                        addChildren = false;
                    } else if (doMerge) {

                        // old, looks right, and has children? Run it!
                        boolean hasMatchingChildren = false;
                        boolean subdirTooYoung = false;

                        for (FileStatus child : children) {
                            if (!hasMatchingChildren) {
                                FileStatus[] grandchildren = fs.listStatus(child.getPath());
                                for (FileStatus gc : grandchildren) {
                                    if (VALID_FILE.matcher(gc.getPath().getName()).matches()) {
                                        hasMatchingChildren = true;
                                        break;
                                    }
                                }
                            }
                            if (!subdirTooYoung) {
                                if (child.getModificationTime() >= now - WAIT_TIME) {
                                    subdirTooYoung = true;
                                    LOG.debug("    Subdir {} is too young.", child.getPath());
                                }
                            }
                        }

                        if (!hasMatchingChildren) {
                            LOG.debug("    No files match the expected pattern ({})", VALID_FILE.pattern());
                        }

                        if (hasMatchingChildren && !subdirTooYoung) {
                            LOG.info("    Run Merge job {} :: {} {} {} {} {}", new Object[] { p.toString(),
                                    dcNumber, service, matchDate, matchHour, matchComponent });

                            Properties jobProps = new Properties();
                            jobProps.load(new FileInputStream(mergeJobPropertiesFile));

                            jobProps.setProperty("jobType", "merge");
                            jobProps.setProperty("rootDir", rootDir);
                            jobProps.setProperty("dcNumber", dcNumber);
                            jobProps.setProperty("service", service);
                            jobProps.setProperty("date", matchDate);
                            jobProps.setProperty("hour", matchHour);
                            jobProps.setProperty("component", matchComponent);
                            jobProps.setProperty("user.name", userName);
                            jobProps.setProperty("logdir", logdir);

                            jobRunner.submit(jobProps);

                            addChildren = false;
                        }
                    }
                }

                // Check to see if we need to run a filter and archive
                matcher = dataPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    String matchDate = matcher.group(1);
                    String matchHour = matcher.group(2);
                    String matchComponent = matcher.group(3);

                    String timestamp = matchDate + matchHour;

                    if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) {
                        LOG.info("Deleting old directory: {}", p);
                        fs.delete(p, true);
                        addChildren = false;
                    } else if (doArchive && timestamp.compareTo(filterCutoffDate) < 0) {

                        Properties jobProps = new Properties();
                        jobProps.load(new FileInputStream(filterJobPropertiesFile));

                        jobProps.setProperty("jobType", "filter");
                        jobProps.setProperty("rootDir", rootDir);
                        jobProps.setProperty("dcNumber", dcNumber);
                        jobProps.setProperty("service", service);
                        jobProps.setProperty("date", matchDate);
                        jobProps.setProperty("hour", matchHour);
                        jobProps.setProperty("component", matchComponent);
                        jobProps.setProperty("user.name", userName);
                        jobProps.setProperty("logdir", logdir);

                        // Check to see if we should just keep all or delete all here.
                        // The filter file should be here
                        String appPath = jobProps.getProperty("oozie.wf.application.path");
                        appPath = appPath.replaceFirst("\\$\\{.*?\\}", "");
                        Path filterFile = new Path(
                                appPath + "/" + conf.get("filter.definition.file", service + ".yaml"));
                        LOG.info("Filter file is {}", filterFile);
                        if (fs.exists(filterFile)) {
                            List<BoomFilterMapper.Filter> filters = BoomFilterMapper.loadFilters(matchComponent,
                                    fs.open(filterFile));

                            if (filters == null) {
                                LOG.warn(
                                        "    Got null when getting filters.  Not processing. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                            } else if (filters.size() == 0) {
                                LOG.warn("    Got no filters.  Not processing. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                            } else if (filters.size() == 1
                                    && filters.get(0) instanceof BoomFilterMapper.KeepAllFilter) {
                                LOG.info("    Keeping everything. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                                // Move files from data to archive
                                // delete it all!
                                String destination = rootDir + "/" + dcNumber + "/" + service + "/" + logdir
                                        + "/" + matchDate + "/" + matchHour + "/" + matchComponent
                                        + "/archive/";

                                PathInfo pathInfo = new PathInfo();
                                pathInfo.setDcNumber(dcNumber);
                                pathInfo.setService(service);
                                pathInfo.setLogdir(logdir);
                                pathInfo.setDate(matchDate);
                                pathInfo.setHour(matchHour);
                                pathInfo.setComponent(matchComponent);

                                try {
                                    lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo));
                                    fs.mkdirs(new Path(destination));
                                    for (FileStatus f : fs.listStatus(p)) {
                                        fs.rename(f.getPath(), new Path(destination));
                                    }
                                } finally {
                                    lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo));
                                }
                            } else if (filters.size() == 1
                                    && filters.get(0) instanceof BoomFilterMapper.DropAllFilter) {
                                LOG.info("    Dropping everything. {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });

                                PathInfo pathInfo = new PathInfo();
                                pathInfo.setDcNumber(dcNumber);
                                pathInfo.setService(service);
                                pathInfo.setLogdir(logdir);
                                pathInfo.setDate(matchDate);
                                pathInfo.setHour(matchHour);
                                pathInfo.setComponent(matchComponent);

                                try {
                                    lockUtil.acquireWriteLock(lockUtil.getLockPath(pathInfo));
                                    fs.delete(p, true);
                                } finally {
                                    lockUtil.releaseWriteLock(lockUtil.getLockPath(pathInfo));
                                }

                            } else {
                                LOG.info("    Run Filter/Archive job {} :: {} {} {} {} {}",
                                        new Object[] { p.toString(), dcNumber, service, matchDate, matchHour,
                                                matchComponent });
                                jobRunner.submit(jobProps);
                            }
                        } else {
                            LOG.warn("Skipping filter job, since no filter file exists");
                        }

                        addChildren = false;
                    }
                }

                matcher = archivePathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    String matchDate = matcher.group(1);
                    String matchHour = matcher.group(2);

                    String timestamp = matchDate + matchHour;

                    if (doDelete && timestamp.compareTo(deleteCutoffDate) < 0) {
                        LOG.info("Deleting old directory: {}", p);
                        fs.delete(p, true);
                        addChildren = false;
                    }
                }

                matcher = workingPathPattern.matcher(p.toUri().getPath());
                if (matcher.matches()) {
                    LOG.info("  Matches working pattern ({})", p);
                    if (resetOrphanedJobs) {
                        String matchDate = matcher.group(1);
                        String matchHour = matcher.group(2);
                        String matchComponent = matcher.group(3);

                        // Move everything from working/xxx/incoming/ to incoming/
                        PathInfo lockPathInfo = new PathInfo(logdir, rootDir + "/" + dcNumber + "/" + service
                                + "/" + logdir + "/" + matchDate + "/" + matchHour + "/" + matchComponent);
                        lockUtil.acquireWriteLock(lockUtil.getLockPath(lockPathInfo));

                        FileStatus[] fileStatuses = fs.listStatus(new Path(p.toUri().getPath() + "/incoming/"));
                        if (fileStatuses != null) {
                            for (FileStatus fileStatus : fileStatuses) {
                                Path toPath = new Path(
                                        fileStatus.getPath().getParent().getParent().getParent().getParent(),
                                        "incoming/" + fileStatus.getPath().getName());

                                LOG.info("  Moving data from {} to {}", fileStatus.getPath(), toPath);
                                LOG.info("    mkdir {}", toPath);
                                fs.mkdirs(toPath);

                                Path fromDir = new Path(p.toUri().getPath(),
                                        "incoming/" + fileStatus.getPath().getName());
                                LOG.info("    moving from {}", fromDir);
                                FileStatus[] files = fs.listStatus(fromDir);
                                if (files == null || files.length == 0) {
                                    LOG.info("    Nothing to move from  {}", fromDir);
                                } else {
                                    for (FileStatus f : files) {
                                        LOG.info("    rename {} {}", f.getPath(),
                                                new Path(toPath, f.getPath().getName()));
                                        fs.rename(f.getPath(), new Path(toPath, f.getPath().getName()));
                                    }
                                }

                                LOG.info("    rm {}", fileStatus.getPath());
                                fs.delete(fileStatus.getPath(), true);
                            }
                            lockUtil.releaseWriteLock(lockUtil.getLockPath(lockPathInfo));

                            fs.delete(new Path(p.toUri().getPath()), true);
                        }
                    }

                    addChildren = false;
                }
            }

            // Add any children which are directories to the stack.
            if (addChildren) {
                for (int i = children.length - 1; i >= 0; i--) {
                    FileStatus child = children[i];
                    if (child.isDirectory()) {
                        paths.push(child.getPath());
                    }
                }
            }
        }

        // Since we may have deleted a bunch of directories, delete any unused
        // locks
        // from ZooKeeper.
        {
            LOG.info("Checking for unused locks in ZooKeeper");
            String scanPath = rootDir + "/" + dcNumber + "/" + service + "/" + logdir;
            if (date != null) {
                scanPath += "/" + date;
                if (hour != null) {
                    scanPath += "/" + hour;
                }
            }

            List<LockInfo> lockInfo = lockUtil.scan(scanPath);

            for (LockInfo li : lockInfo) {
                // Check if the lock path still exists in HDFS. If it doesn't, then
                // delete it from ZooKeeper.
                String path = li.getPath();
                String hdfsPath = path.substring(LockUtil.ROOT.length());
                if (!fs.exists(new Path(hdfsPath))) {
                    ZooKeeper zk = lockUtil.getZkClient();

                    while (!path.equals(LockUtil.ROOT)) {
                        try {
                            zk.delete(path, -1);
                        } catch (KeeperException.NotEmptyException e) {
                            // That's fine. just stop trying then.
                            break;
                        } catch (Exception e) {
                            LOG.error("Caught exception trying to delete from ZooKeeper.", e);
                            break;
                        }
                        LOG.info("Deleted from ZooKeeper: {}", path);
                        path = path.substring(0, path.lastIndexOf('/'));
                    }

                }
            }
        }

        // Now that we're done, wait for the Oozie Runner to stop, and print the
        // results.
        LOG.info("Waiting for Oozie jobs to complete.");
        jobRunner.shutdown();
        jobRunnerThread.join();
        LOG.info("Job Stats : Started={} Succeeded={} failed={} errors={}",
                new Object[] { jobRunner.getStarted(), jobRunner.getSucceeded(), jobRunner.getFailed(),
                        jobRunner.getErrors() });

        lockUtil.close();

    } catch (Exception e) {
        LOG.error("Unexpected exception caught.", e);
        return 1;
    }

    return 0;
}

From source file:brainflow.app.presentation.controls.FileObjectGroupSelector.java

private static String globToRegexPattern(final String glob) throws PatternSyntaxException {
    /* Stack to keep track of the parser mode: */
    /* "--" : Base mode (first on the stack)   */
    /* "[]" : Square brackets mode "[...]"     */
    /* "{}" : Curly braces mode "{...}"        */
    final Deque<String> parserMode = new ArrayDeque<String>();
    parserMode.push("--"); // base mode

    final int globLength = glob.length();
    int index = 0; // index in glob

    /* equivalent REGEX expression to be compiled */
    final StringBuilder t = new StringBuilder();

    while (index < globLength) {
        char c = glob.charAt(index++);

        if (c == '\\') {
            /***********************
             * (1) ESCAPE SEQUENCE *//from  w w w  . ja  v  a2 s  .c om
             ***********************/

            if (index == globLength) {
                /* no characters left, so treat '\' as literal char */
                t.append(Pattern.quote("\\"));
            } else {
                /* read next character */
                c = glob.charAt(index);
                final String s = c + "";

                if (("--".equals(parserMode.peek()) && "\\[]{}?*".contains(s))
                        || ("[]".equals(parserMode.peek()) && "\\[]{}?*!-".contains(s))
                        || ("{}".equals(parserMode.peek()) && "\\[]{}?*,".contains(s))) {
                    /* escape the construct char */
                    index++;
                    t.append(Pattern.quote(s));
                } else {
                    /* treat '\' as literal char */
                    t.append(Pattern.quote("\\"));
                }
            }
        } else if (c == '*') {
            /************************
             * (2) GLOB PATTERN '*' *
             ************************/

            /* create non-capturing group to match zero or more characters */
            t.append(".*");
        } else if (c == '?') {
            /************************
             * (3) GLOB PATTERN '?' *
             ************************/

            /* create non-capturing group to match exactly one character */
            t.append('.');
        } else if (c == '[') {
            /****************************
             * (4) GLOB PATTERN "[...]" *
             ****************************/

            /* opening square bracket '[' */
            /* create non-capturing group to match exactly one character */
            /* inside the sequence */
            t.append('[');
            parserMode.push("[]");

            /* check for negation character '!' immediately after */
            /* the opening bracket '[' */
            if ((index < globLength) && (glob.charAt(index) == '!')) {
                index++;
                t.append('^');
            }
        } else if ((c == ']') && "[]".equals(parserMode.peek())) {
            /* closing square bracket ']' */
            t.append(']');
            parserMode.pop();
        } else if ((c == '-') && "[]".equals(parserMode.peek())) {
            /* character range '-' in "[...]" */
            t.append('-');
        } else if (c == '{') {
            /****************************
             * (5) GLOB PATTERN "{...}" *
             ****************************/

            /* opening curly brace '{' */
            /* create non-capturing group to match one of the */
            /* strings inside the sequence */
            t.append("(?:(?:");
            parserMode.push("{}");
        } else if ((c == '}') && "{}".equals(parserMode.peek())) {
            /* closing curly brace '}' */
            t.append("))");
            parserMode.pop();
        } else if ((c == ',') && "{}".equals(parserMode.peek())) {
            /* comma between strings in "{...}" */
            t.append(")|(?:");
        } else {
            /*************************
             * (6) LITERAL CHARACTER *
             *************************/

            /* convert literal character to a regex string */
            t.append(Pattern.quote(c + ""));
        }
    }
    /* done parsing all chars of the source pattern string */

    /* check for mismatched [...] or {...} */
    if ("[]".equals(parserMode.peek()))
        throw new PatternSyntaxException("Cannot find matching closing square bracket ] in GLOB expression",
                glob, -1);

    if ("{}".equals(parserMode.peek()))
        throw new PatternSyntaxException("Cannot find matching closing curly brace } in GLOB expression", glob,
                -1);

    return t.toString();
}

From source file:de.escalon.hypermedia.spring.hydra.PagedResourcesSerializer.java

@Override
public void serialize(PagedResources pagedResources, JsonGenerator jgen, SerializerProvider serializerProvider)
        throws IOException {

    final SerializationConfig config = serializerProvider.getConfig();
    JavaType javaType = config.constructType(pagedResources.getClass());

    JsonSerializer<Object> serializer = BeanSerializerFactory.instance.createSerializer(serializerProvider,
            javaType);//from w ww  .jav a 2 s .com

    // replicate pretty much everything from JacksonHydraSerializer
    // since we must reorganize the internals of pagedResources to get a hydra collection
    // with partial page view, we have to serialize pagedResources with an
    // unwrapping serializer
    Deque<LdContext> contextStack = (Deque<LdContext>) serializerProvider.getAttribute(KEY_LD_CONTEXT);
    if (contextStack == null) {
        contextStack = new ArrayDeque<LdContext>();
        serializerProvider.setAttribute(KEY_LD_CONTEXT, contextStack);
    }

    // TODO: filter next/previous/first/last from link list - maybe create new PagedResources without them?
    List<Link> links = pagedResources.getLinks();
    List<Link> filteredLinks = new ArrayList<Link>();
    for (Link link : links) {
        String rel = link.getRel();
        if (navigationRels.contains(rel)) {
            continue;
        } else {
            filteredLinks.add(link);
        }
    }

    PagedResources toRender = new PagedResources(pagedResources.getContent(), pagedResources.getMetadata(),
            filteredLinks);

    jgen.writeStartObject();

    serializeContext(toRender, jgen, serializerProvider, contextStack);

    jgen.writeStringField(JsonLdKeywords.AT_TYPE, "hydra:Collection");

    // serialize with PagedResourcesMixin
    serializer.unwrappingSerializer(NameTransformer.NOP).serialize(toRender, jgen, serializerProvider);

    PagedResources.PageMetadata metadata = pagedResources.getMetadata();
    jgen.writeNumberField("hydra:totalItems", metadata.getTotalElements());

    // begin hydra:view
    jgen.writeObjectFieldStart("hydra:view");
    jgen.writeStringField(JsonLdKeywords.AT_TYPE, "hydra:PartialCollectionView");
    writeRelLink(pagedResources, jgen, Link.REL_NEXT);
    writeRelLink(pagedResources, jgen, "previous");
    // must also translate prev to its synonym previous
    writeRelLink(pagedResources, jgen, Link.REL_PREVIOUS, "previous");
    writeRelLink(pagedResources, jgen, Link.REL_FIRST);
    writeRelLink(pagedResources, jgen, Link.REL_LAST);
    jgen.writeEndObject();
    // end hydra:view

    jgen.writeEndObject();

    contextStack = (Deque<LdContext>) serializerProvider.getAttribute(KEY_LD_CONTEXT);
    if (!contextStack.isEmpty()) {
        contextStack.pop();
    }

}

From source file:com.datastax.loader.CqlDelimLoad.java

public boolean run(String[] args)
        throws IOException, ParseException, InterruptedException, ExecutionException, KeyStoreException,
        NoSuchAlgorithmException, KeyManagementException, CertificateException, UnrecoverableKeyException {
    if (false == parseArgs(args)) {
        System.err.println("Bad arguments");
        System.err.println(usage());
        return false;
    }//ww  w. j ava 2s . c o  m

    // Setup
    if (false == setup())
        return false;

    // open file
    Deque<File> fileList = new ArrayDeque<File>();
    File infile = null;
    File[] inFileList = null;
    boolean onefile = true;
    if (STDIN.equalsIgnoreCase(filename)) {
        infile = null;
    } else {
        infile = new File(filename);
        if (infile.isFile()) {
        } else {
            inFileList = infile.listFiles();
            if (inFileList.length < 1)
                throw new IOException("directory is empty");
            onefile = false;
            Arrays.sort(inFileList, new Comparator<File>() {
                public int compare(File f1, File f2) {
                    return f1.getName().compareTo(f2.getName());
                }
            });
            for (int i = 0; i < inFileList.length; i++)
                fileList.push(inFileList[i]);
        }
    }

    // Launch Threads
    ExecutorService executor;
    long total = 0;
    if (onefile) {
        // One file/stdin to process
        executor = Executors.newSingleThreadExecutor();
        Callable<Long> worker = new CqlDelimLoadTask(cqlSchema, delimiter, charsPerColumn, nullString,
                commentString, dateFormatString, localDateFormatString, boolStyle, locale, maxErrors, skipRows,
                skipCols, maxRows, badDir, infile, session, consistencyLevel, numFutures, batchSize, numRetries,
                queryTimeout, maxInsertErrors, successDir, failureDir, nullsUnset, format, keyspace, table);
        Future<Long> res = executor.submit(worker);
        total = res.get();
        executor.shutdown();
    } else {
        executor = Executors.newFixedThreadPool(numThreads);
        Set<Future<Long>> results = new HashSet<Future<Long>>();
        while (!fileList.isEmpty()) {
            File tFile = fileList.pop();
            Callable<Long> worker = new CqlDelimLoadTask(cqlSchema, delimiter, charsPerColumn, nullString,
                    commentString, dateFormatString, localDateFormatString, boolStyle, locale, maxErrors,
                    skipRows, skipCols, maxRows, badDir, tFile, session, consistencyLevel, numFutures,
                    batchSize, numRetries, queryTimeout, maxInsertErrors, successDir, failureDir, nullsUnset,
                    format, keyspace, table);
            results.add(executor.submit(worker));
        }
        executor.shutdown();
        for (Future<Long> res : results)
            total += res.get();
    }

    // Cleanup
    cleanup();
    //System.err.println("Total rows inserted: " + total);

    return true;
}

From source file:jetbrains.exodus.entitystore.FileSystemBlobVaultOld.java

@Override
public BackupStrategy getBackupStrategy() {
    return new BackupStrategy() {

        @Override//  ww  w.  jav a  2 s.c  o  m
        public Iterable<FileDescriptor> listFiles() {
            return new Iterable<FileDescriptor>() {
                @Override
                public Iterator<FileDescriptor> iterator() {
                    final Deque<FileDescriptor> queue = new LinkedList<>();
                    queue.add(new FileDescriptor(location, blobsDirectory + File.separator));
                    return new Iterator<FileDescriptor>() {
                        int i = 0;
                        int n = 0;
                        File[] files;
                        FileDescriptor next;
                        String currentPrefix;

                        @Override
                        public boolean hasNext() {
                            if (next != null) {
                                return true;
                            }
                            while (i < n) {
                                final File file = files[i++];
                                final String name = file.getName();
                                if (file.isDirectory()) {
                                    queue.push(new FileDescriptor(file,
                                            currentPrefix + file.getName() + File.separator));
                                } else if (file.isFile()) {
                                    final long fileSize = file.length();
                                    if (fileSize == 0)
                                        continue;
                                    if (name.endsWith(blobExtension) || name.equalsIgnoreCase(VERSION_FILE)) {
                                        next = new FileDescriptor(file, currentPrefix, fileSize);
                                        return true;
                                    }
                                } else {
                                    // something strange with filesystem
                                    throw new EntityStoreException(
                                            "File or directory expected: " + file.toString());
                                }
                            }
                            if (queue.isEmpty()) {
                                return false;
                            }
                            final FileDescriptor fd = queue.pop();
                            files = IOUtil.listFiles(fd.getFile());
                            currentPrefix = fd.getPath();
                            i = 0;
                            n = files.length;
                            next = fd;
                            return true;
                        }

                        @Override
                        public FileDescriptor next() {
                            if (!hasNext()) {
                                throw new NoSuchElementException();
                            }
                            final FileDescriptor result = next;
                            next = null;
                            return result;
                        }

                        @Override
                        public void remove() {
                            throw new UnsupportedOperationException();
                        }
                    };
                }
            };
        }
    };
}

From source file:jetbrains.exodus.entitystore.PersistentEntityStoreImpl.java

void deregisterTransaction(@NotNull final PersistentStoreTransaction txn) {
    final Thread thread = Thread.currentThread();
    final Deque<PersistentStoreTransaction> stack = txns.get(thread);
    if (stack == null) {
        throw new EntityStoreException("Transaction was already finished");
    }//from   ww w.j  a  v  a  2 s.  c o  m
    if (txn != stack.peek()) {
        throw new EntityStoreException("Can't finish transaction: nested transaction is not finished");
    }
    stack.pop();
    if (stack.isEmpty()) {
        txns.remove(thread);
    }
}

From source file:loci.formats.in.LIFReader.java

private void populateOriginalMetadata(Element root, Deque<String> nameStack) {
    String name = root.getNodeName();
    if (root.hasAttributes() && !name.equals("Element") && !name.equals("Attachment")
            && !name.equals("LMSDataContainerHeader")) {
        nameStack.push(name);/*from   w w  w  .ja  v a2  s .  c  om*/

        String suffix = root.getAttribute("Identifier");
        String value = root.getAttribute("Variant");
        if (suffix == null || suffix.trim().length() == 0) {
            suffix = root.getAttribute("Description");
        }
        StringBuffer key = new StringBuffer();
        final Iterator<String> nameStackIterator = nameStack.descendingIterator();
        while (nameStackIterator.hasNext()) {
            final String k = nameStackIterator.next();
            key.append(k);
            key.append("|");
        }
        if (suffix != null && value != null && suffix.length() > 0 && value.length() > 0
                && !suffix.equals("HighInteger") && !suffix.equals("LowInteger")) {
            addSeriesMetaList(key.toString() + suffix, value);
        } else {
            NamedNodeMap attributes = root.getAttributes();
            for (int i = 0; i < attributes.getLength(); i++) {
                Attr attr = (Attr) attributes.item(i);
                if (!attr.getName().equals("HighInteger") && !attr.getName().equals("LowInteger")) {
                    addSeriesMeta(key.toString() + attr.getName(), attr.getValue());
                }
            }
        }
    }

    NodeList children = root.getChildNodes();
    for (int i = 0; i < children.getLength(); i++) {
        Object child = children.item(i);
        if (child instanceof Element) {
            populateOriginalMetadata((Element) child, nameStack);
        }
    }

    if (root.hasAttributes() && !name.equals("Element") && !name.equals("Attachment")
            && !name.equals("LMSDataContainerHeader")) {
        nameStack.pop();
    }
}