List of usage examples for java.util Queue poll
E poll();
From source file:org.apache.hadoop.hbase.replication.regionserver.DumpReplicationQueues.java
static DumpOptions parseOpts(Queue<String> args) { DumpOptions opts = new DumpOptions(); String cmd = null;// ww w . ja va2s. c om while ((cmd = args.poll()) != null) { if (cmd.equals("-h") || cmd.equals("--h") || cmd.equals("--help")) { // place item back onto queue so that caller knows parsing was incomplete args.add(cmd); break; } final String hdfs = "--hdfs"; if (cmd.equals(hdfs)) { opts.setHdfs(true); continue; } final String distributed = "--distributed"; if (cmd.equals(distributed)) { opts.setDistributed(true); continue; } else { printUsageAndExit("ERROR: Unrecognized option/command: " + cmd, -1); } // check that --distributed is present when --hdfs is in the arguments if (!opts.isDistributed() && opts.isHdfs()) { printUsageAndExit("ERROR: --hdfs option can only be used with --distributed: " + cmd, -1); } } return opts; }
From source file:com.wrmsr.wava.basic.BasicLoopInfo.java
public static Set<Name> getLoopContents(Name loop, Multimap<Name, Name> inputs, Multimap<Name, Name> backEdges) { Set<Name> seen = new HashSet<>(); seen.add(loop);/*from ww w . j a v a 2 s.c o m*/ Queue<Name> queue = new LinkedList<>(); inputs.get(loop).stream().filter(n -> !n.equals(loop) && backEdges.containsEntry(loop, n)) .forEach(queue::add); queue.forEach(seen::add); while (!queue.isEmpty()) { Name cur = queue.poll(); inputs.get(cur).stream().filter(input -> !seen.contains(input)).forEach(input -> { seen.add(input); queue.add(input); }); } return seen; }
From source file:org.xwiki.xdomviz.Main.java
/** * <p>//from w ww .j av a 2 s.co m * This method performs a normalization of the tree by removing all the multi-edges (due to a node having multiple * parents). This happens often with SpaceBlocks, which are reused all over the XDOM. The algorithm performs a * breadth first visit. For each visited node, it checks how many times a child occurs in its children list. If a * child occurs more than once, then we create new nodes, one for each occurrence. * </p> * <p> * The node tree corresponding to the XDOM of "this is a test" is: * </p> * <ul> * <li>XDOM -> P</li> * <li>P -> "This"</li> * <li>P -> S (3 edges, each one representing a space)</li> * <li>P -> "is"</li> * <li>P -> "a"</li> * <li>P -> "test"</li> * </ul> * <p> * The normalized tree will be: * </p> * <ul> * <li>XDOM -> P</li> * <li>P -> "This"</li> * <li>P -> S</li> * <li>P -> "is"</li> * <li>P -> S</li> * <li>P -> "a"</li> * <li>P -> S</li> * <li>P -> "test"</li> * </ul> * <p> * In a normalized tree, each node has one and only one parent. * </p> * * @param root The root node of the tree. * @return The root node of the normalized tree. */ private static Node normalize(Node root) { // Breadth first visit of the XDOM to assign simple ids to nodes. Queue<Node> nodesQueue = new ArrayDeque<Node>(); nodesQueue.add(root); while (!nodesQueue.isEmpty()) { Node node = nodesQueue.poll(); // This map contains, for the current node, where are the occurrences in the children list of each child. Map<Node, List<Integer>> nodeToIndexesMap = new HashMap<Node, List<Integer>>(); int i = 0; // For each child calculate store the its position in the indexes list. for (Node child : node.getChildren()) { List<Integer> indexes = nodeToIndexesMap.get(child); if (indexes == null) { indexes = new ArrayList<Integer>(); nodeToIndexesMap.put(child, indexes); } indexes.add(i); i++; } for (Node child : nodeToIndexesMap.keySet()) { List<Integer> indexes = nodeToIndexesMap.get(child); // If the indexes size is > 1 then a child occurs multiple times in a if (indexes.size() > 1) { for (Integer index : indexes) { Node newNode = new Node(child.getBlock()); newNode.getParents().add(node); newNode.getChildren().addAll(child.getChildren()); node.getChildren().set(index, newNode); } } } for (Node child : node.getChildren()) { nodesQueue.add(child); } } return root; }
From source file:org.xwiki.xdomviz.Main.java
/** * <p>/*from w w w. ja v a 2 s.c o m*/ * This method creates an isomorphic tree using node structures instead of blocks. This is necessary because a * single XDOM's block can be a child of multiple parents but the getParent() method is able to return only a single * parent. Using this alternative representation, full parent information is correctly stored in each node. * </p> * <p> * The node tree representation allows also the manipulation of the tree structure because all the attributes of a * node are mutable. * </p> * * @param xdom * @return The root node of the new tree. */ private static Node createNodeTree(XDOM xdom) { // The list of the nodes created from the visited XDOM's blocks. List<Node> nodes = new ArrayList<Node>(); // Breadth first visit of the XDOM. Queue<Block> blocksQueue = new ArrayDeque<Block>(); blocksQueue.add(xdom.getRoot()); while (!blocksQueue.isEmpty()) { Block block = blocksQueue.poll(); // If there isn't a node corresponding to this block, create it! Node parentNode = findNode(nodes, block); if (parentNode == null) { parentNode = new Node(block); nodes.add(parentNode); } for (Block child : block.getChildren()) { blocksQueue.add(child); // If there isn't a node corresponding to this child-block, create it! Node childNode = findNode(nodes, child); if (childNode == null) { childNode = new Node(child); nodes.add(childNode); } // Link parent and child child. parentNode.getChildren().add(childNode); childNode.getParents().add(parentNode); } } return findNode(nodes, xdom.getRoot()); }
From source file:org.janusgraph.util.system.ConfigurationFileFilter.java
public static int filter(String inputContextDirPath, String outputContextDirPath) throws IOException { // Read args[0] as a dirname and iterate recursively over its file contents File inputContextDir = new File(inputContextDirPath); File outputContextDir = new File(outputContextDirPath); log.info("Input context dir: {}", inputContextDir); log.info("Output context dir: {}", outputContextDir); Preconditions.checkArgument(inputContextDir.isDirectory(), "Input context dir %s is not a directory", inputContextDir);/*from w ww . j ava 2 s. c o m*/ Preconditions.checkArgument(inputContextDir.canRead(), "Input context dir %s is not readable", inputContextDir); if (!outputContextDir.exists()) { outputContextDir.mkdirs(); // may fail if path exists as a file } Queue<InputRecord> dirQueue = new LinkedList<InputRecord>(); dirQueue.add(new InputRecord(inputContextDir, File.separator)); int parseErrors = 0; int visitedDirs = 0; int processedFiles = 0; InputRecord rec; while (null != (rec = dirQueue.poll())) { File curDir = rec.getDirectory(); String contextPath = rec.getContextPath(); Preconditions.checkState(curDir.exists()); Preconditions.checkState(curDir.isDirectory()); Preconditions.checkState(curDir.canRead()); visitedDirs++; for (File f : curDir.listFiles()) { if (f.isDirectory()) { if (!f.canRead()) { log.warn("Skipping unreadable directory {} in input basedir", f); continue; } dirQueue.add(new InputRecord(f, contextPath + f.getName() + File.separator)); } else { if (!f.canRead()) { log.warn("Skipping unreadable file {} in input basedir", f); continue; } File outputDir = new File(outputContextDir.getPath() + contextPath); if (!outputDir.exists()) { outputDir.mkdirs(); } parseErrors += processFile(f, new File(outputContextDir.getPath() + contextPath + f.getName())); processedFiles++; } } } String summaryTemplate = "Summary: visited {} dir(s) and processed {} file(s) with {} parse error(s)."; if (0 == parseErrors) { log.info(summaryTemplate, visitedDirs, processedFiles, parseErrors); } else { log.error(summaryTemplate, visitedDirs, processedFiles, parseErrors); } return parseErrors; }
From source file:eu.stratosphere.nephele.jobmanager.scheduler.RecoveryLogic.java
private static void findVerticesToRestart(final ExecutionVertex failedVertex, final Set<ExecutionVertex> verticesToBeCanceled) { final Queue<ExecutionVertex> verticesToTest = new ArrayDeque<ExecutionVertex>(); final Set<ExecutionVertex> visited = new HashSet<ExecutionVertex>(); verticesToTest.add(failedVertex);/*ww w . j a v a2s . co m*/ while (!verticesToTest.isEmpty()) { final ExecutionVertex vertex = verticesToTest.poll(); // Predecessors must be either checkpoints or need to be restarted, too for (int j = 0; j < vertex.getNumberOfPredecessors(); j++) { final ExecutionVertex predecessor = vertex.getPredecessor(j); if (hasInstanceAssigned(predecessor)) { verticesToBeCanceled.add(predecessor); } if (!visited.contains(predecessor)) { verticesToTest.add(predecessor); } } visited.add(vertex); } }
From source file:com.demandware.vulnapp.util.Helpers.java
/** * Given a root dir and a file name, search all subdirectories for the file. * /*w w w . j ava 2 s . c o m*/ * @return null if root or name is null, or if the file is not found. the found file otherwise */ public static File findFile(File root, String name) { if (root == null || name == null) { return null; } File foundFile = null; Queue<File> filesAndDirs = new LinkedList<File>(); filesAndDirs.add(root); while (!filesAndDirs.isEmpty() && foundFile == null) { File file = filesAndDirs.poll(); if (file.isDirectory()) { File[] files = file.listFiles(); if (files != null) { for (File f : files) { filesAndDirs.add(f); } } } else { if (file.getName().equals(name)) { foundFile = file; } } } return foundFile; }
From source file:org.apache.tajo.rpc.RpcChannelFactory.java
/** * This function return eventloopgroup by key. Fetcher client will have one or more eventloopgroup for its throughput. * * @param clientId//from w w w . jav a2 s . c o m * @param workerNum * @return */ public static EventLoopGroup getSharedClientEventloopGroup(ClientChannelId clientId, int workerNum) { Queue<EventLoopGroup> eventLoopGroupQueue; EventLoopGroup returnEventLoopGroup; synchronized (lockObjectForLoopGroup) { eventLoopGroupQueue = eventLoopGroupPool.get(clientId); if (eventLoopGroupQueue == null) { eventLoopGroupQueue = createClientEventloopGroups(clientId, workerNum); } returnEventLoopGroup = eventLoopGroupQueue.poll(); if (isEventLoopGroupShuttingDown(returnEventLoopGroup)) { returnEventLoopGroup = createClientEventloopGroup(clientId.name(), workerNum); } eventLoopGroupQueue.add(returnEventLoopGroup); } return returnEventLoopGroup; }
From source file:com.enderville.enderinstaller.util.InstallScript.java
/** * Repackages all the files in the tmp directory to the new minecraft.jar * * @param tmp The temp directory where mods were installed. * @param mcjar The location to save the new minecraft.jar. * @throws IOException//from w w w . ja v a 2 s . c om */ public static void repackMCJar(File tmp, File mcjar) throws IOException { byte[] dat = new byte[4 * 1024]; JarOutputStream jarout = new JarOutputStream(FileUtils.openOutputStream(mcjar)); Queue<File> queue = new LinkedList<File>(); for (File f : tmp.listFiles()) { queue.add(f); } while (!queue.isEmpty()) { File f = queue.poll(); if (f.isDirectory()) { for (File child : f.listFiles()) { queue.add(child); } } else { //TODO need a better way to do this String name = f.getPath().substring(tmp.getPath().length() + 1); //TODO is this formatting really required for jars? name = name.replace("\\", "/"); if (f.isDirectory() && !name.endsWith("/")) { name = name + "/"; } JarEntry entry = new JarEntry(name); jarout.putNextEntry(entry); FileInputStream in = new FileInputStream(f); int len = -1; while ((len = in.read(dat)) > 0) { jarout.write(dat, 0, len); } in.close(); } jarout.closeEntry(); } jarout.close(); }
From source file:org.xwiki.xdomviz.Main.java
/** * <p>// ww w . j a va 2 s.c o m * This method produces the GraphViz source code corresponding to the node tree. * </p> * * @param root The node tree root. * @return A string containing the GraphViz source code to display the node tree. */ private static String generateGraphViz(Node root) { // The rendering buffer. StringBuffer sb = new StringBuffer(); // This map contains the GraphViz Ids (integers) associated to the nodes in the tree. Map<Node, Integer> nodeToIdMap = new HashMap<Node, Integer>(); sb.append("digraph XDOM {\n"); // Breadth first visit of the XDOM to assign simple ids to nodes. Queue<Node> nodesQueue = new ArrayDeque<Node>(); nodesQueue.add(root); // Counter used to keep track of the assigned ids. It's incremented at each time a new node is found for the // first time. int i = 0; while (!nodesQueue.isEmpty()) { Node node = nodesQueue.poll(); if (nodeToIdMap.get(node) == null) { nodeToIdMap.put(node, i); i++; } for (Node child : node.getChildren()) { if (nodeToIdMap.get(child) == null) { nodeToIdMap.put(child, i); i++; } nodesQueue.add(child); // Render the edge. sb.append(String.format("%d -> %d;\n", nodeToIdMap.get(node), nodeToIdMap.get(child))); } } // Render the label assignment. for (Node node : nodeToIdMap.keySet()) { sb.append(String.format("%d [label = \"%s\"];\n", nodeToIdMap.get(node), node)); } sb.append("}\n"); return sb.toString(); }