List of usage examples for java.util LinkedList removeFirst
public E removeFirst()
From source file:org.xchain.namespaces.jsl.AbstractTemplateCommand.java
/** * Pops the current command execution state array from the stack. */// w ww . j a va 2 s.c om private final void popCommandExecutionState() { // get the stack for the current thread. LinkedList<CommandExecutionState[]> stack = commandExecutionStateStackTL.get(); // if there was not a stack, then we are in an illegal state. if (stack == null) { throw new IllegalStateException( "popCommandExecutionState() called when there was not a current stack."); } // remove the state array from the stack. stack.removeFirst(); // if the stack is now empty, clean the thread local up. if (stack.isEmpty()) { commandExecutionStateStackTL.remove(); } }
From source file:org.commonjava.maven.ext.io.PomIO.java
private List<PomPeek> peekAtPomHierarchy(final File topPom) throws ManipulationException { final List<PomPeek> peeked = new ArrayList<>(); try {/*w ww.jav a 2 s.com*/ final LinkedList<File> pendingPoms = new LinkedList<>(); pendingPoms.add(topPom.getCanonicalFile()); final String topDir = topPom.getAbsoluteFile().getParentFile().getCanonicalPath(); final Set<File> seen = new HashSet<>(); File topLevelParent = topPom; while (!pendingPoms.isEmpty()) { final File pom = pendingPoms.removeFirst(); seen.add(pom); logger.debug("PEEK: " + pom); final PomPeek peek = new PomPeek(pom); final ProjectVersionRef key = peek.getKey(); if (key != null) { peeked.add(peek); final File dir = pom.getParentFile(); final String relPath = peek.getParentRelativePath(); if (relPath != null) { logger.debug("Found parent relativePath: " + relPath + " in pom: " + pom); File parent = new File(dir, relPath); if (parent.isDirectory()) { parent = new File(parent, "pom.xml"); } parent = parent.getCanonicalFile(); if (parent.getParentFile().getCanonicalPath().startsWith(topDir) && parent.exists() && !seen.contains(parent) && !pendingPoms.contains(parent)) { topLevelParent = parent; logger.debug("Possible top level parent " + parent); pendingPoms.add(parent); } else { logger.debug("Skipping reference to non-existent parent relativePath: '" + relPath + "' in: " + pom); } } final Set<String> modules = peek.getModules(); if (modules != null && !modules.isEmpty()) { for (final String module : modules) { logger.debug("Found module: " + module + " in pom: " + pom); File modPom = new File(dir, module); if (modPom.isDirectory()) { modPom = new File(modPom, "pom.xml"); } if (modPom.exists() && !seen.contains(modPom) && !pendingPoms.contains(modPom)) { pendingPoms.addLast(modPom); } else { logger.debug( "Skipping reference to non-existent module: '" + module + "' in: " + pom); } } } } else { logger.debug("Skipping " + pom + " as its a template file."); } } final HashSet<ProjectVersionRef> projectrefs = new HashSet<>(); for (final PomPeek p : peeked) { projectrefs.add(p.getKey()); if (p.getPom().equals(topLevelParent)) { logger.debug("Setting top level parent to " + p.getPom() + " :: " + p.getKey()); p.setInheritanceRoot(true); } } for (final PomPeek p : peeked) { if (p.getParentKey() == null || !seenThisParent(projectrefs, p.getParentKey())) { logger.debug("Found a standalone pom " + p.getPom() + " :: " + p.getKey()); p.setInheritanceRoot(true); } } } catch (final IOException e) { throw new ManipulationException("Problem peeking at POMs.", e); } return peeked; }
From source file:de.hasait.clap.CLAP.java
public CLAPResult parse(final String... pArgs) { final Set<CLAPParseContext> contextsWithInvalidToken = new HashSet<CLAPParseContext>(); final List<CLAPParseContext> parsedContexts = new ArrayList<CLAPParseContext>(); final LinkedList<CLAPParseContext> activeContexts = new LinkedList<CLAPParseContext>(); activeContexts.add(new CLAPParseContext(this, pArgs)); while (!activeContexts.isEmpty()) { final CLAPParseContext context = activeContexts.removeFirst(); if (context.hasMoreTokens()) { final CLAPParseContext[] result = _root.parse(context); if (result != null) { for (final CLAPParseContext nextContext : result) { activeContexts.add(nextContext); }//from w ww . j a v a 2s .c o m } else { contextsWithInvalidToken.add(context); } } else { parsedContexts.add(context); } } if (parsedContexts.isEmpty()) { int maxArgIndex = Integer.MIN_VALUE; final Set<String> invalidTokensOfBestContexts = new HashSet<String>(); for (final CLAPParseContext context : contextsWithInvalidToken) { final int currentArgIndex = context.getCurrentArgIndex(); if (currentArgIndex > maxArgIndex) { invalidTokensOfBestContexts.clear(); } if (currentArgIndex >= maxArgIndex) { maxArgIndex = currentArgIndex; invalidTokensOfBestContexts.add(context.currentArg()); } } throw new CLAPException( nls(NLSKEY_CLAP_ERROR_INVALID_TOKEN_LIST, StringUtils.join(invalidTokensOfBestContexts, ", "))); //$NON-NLS-1$ } final Map<CLAPParseContext, List<String>> contextErrorMessages = new HashMap<CLAPParseContext, List<String>>(); final Set<CLAPResultImpl> results = new LinkedHashSet<CLAPResultImpl>(); for (final CLAPParseContext context : parsedContexts) { final List<String> errorMessages = new ArrayList<String>(); _root.validate(context, errorMessages); if (errorMessages.isEmpty()) { final CLAPResultImpl result = new CLAPResultImpl(); _root.fillResult(context, result); results.add(result); } else { contextErrorMessages.put(context, errorMessages); } } if (results.isEmpty()) { int minErrorMessages = Integer.MAX_VALUE; final List<String> errorMessagesOfBestContexts = new ArrayList<String>(); for (final Entry<CLAPParseContext, List<String>> entry : contextErrorMessages.entrySet()) { final int countErrorMessages = entry.getValue().size(); if (countErrorMessages < minErrorMessages) { errorMessagesOfBestContexts.clear(); } if (countErrorMessages <= minErrorMessages) { minErrorMessages = countErrorMessages; errorMessagesOfBestContexts .add(StringUtils.join(entry.getValue(), nls(NLSKEY_CLAP_ERROR_ERROR_MESSAGE_SPLIT))); } } throw new CLAPException(nls(NLSKEY_CLAP_ERROR_VALIDATION_FAILED, StringUtils.join(errorMessagesOfBestContexts, nls(NLSKEY_CLAP_ERROR_ERROR_MESSAGES_SPLIT)))); } if (results.size() > 1) { throw new CLAPException(nls(NLSKEY_CLAP_ERROR_AMBIGUOUS_RESULT)); } return results.iterator().next(); }
From source file:com.zimbra.cs.mime.Mime.java
private static List<MPartInfo> listParts(MimePart root, String defaultCharset) throws MessagingException, IOException { List<MPartInfo> parts = new ArrayList<MPartInfo>(); LinkedList<MPartInfo> queue = new LinkedList<MPartInfo>(); queue.add(generateMPartInfo(root, null, "", 0)); MimeMultipart emptyMultipart = null; while (!queue.isEmpty()) { MPartInfo mpart = queue.removeFirst(); MimePart mp = mpart.getMimePart(); parts.add(mpart);// ww w.j ava 2s . c o m String cts = mpart.mContentType; boolean isMultipart = cts.startsWith(MimeConstants.CT_MULTIPART_PREFIX); boolean isMessage = !isMultipart && cts.equals(MimeConstants.CT_MESSAGE_RFC822); if (isMultipart) { // IMAP part numbering is screwy: top-level multipart doesn't get a number String prefix = mpart.mPartName.length() > 0 ? (mpart.mPartName + '.') : ""; if (mp instanceof MimeMessage) { mpart.mPartName = prefix + "TEXT"; } MimeMultipart multi = getMultipartContent(mp, cts); if (multi != null) { if (multi.getCount() == 0 && LC.mime_promote_empty_multipart.booleanValue()) { if (emptyMultipart == null) { emptyMultipart = multi; } if (MimeConstants.CT_MULTIPART_APPLEDOUBLE.equalsIgnoreCase(getContentType(mp))) { ZimbraLog.misc.debug( "appledouble with no children; assuming it is malformed and really applefile"); mpart.mContentType = mpart.mContentType.replace(MimeConstants.CT_MULTIPART_APPLEDOUBLE, MimeConstants.CT_APPLEFILE); } } mpart.mChildren = new ArrayList<MPartInfo>(multi.getCount()); for (int i = 1; i <= multi.getCount(); i++) { mpart.mChildren .add(generateMPartInfo((MimePart) multi.getBodyPart(i - 1), mpart, prefix + i, i)); } queue.addAll(0, mpart.mChildren); } } else if (isMessage) { MimeMessage mm = getMessageContent(mp); if (mm != null) { MPartInfo child = generateMPartInfo(mm, mpart, mpart.mPartName, 0); queue.addFirst(child); mpart.mChildren = Arrays.asList(child); } } else { // nothing to do at this stage } } if (emptyMultipart != null && parts.size() == 1) { String text = emptyMultipart.getPreamble(); if (!StringUtil.isNullOrEmpty(text)) { ZimbraLog.misc .debug("single multipart with no children. promoting the preamble into a single text part"); parts.remove(0); MPartInfo mpart = new MPartInfo(); ZMimeBodyPart mp = new ZMimeBodyPart(); mp.setText(text, defaultCharset); mpart.mPart = mp; mpart.mContentType = mp.getContentType(); mpart.mDisposition = ""; mpart.mPartName = "1"; parts.add(mpart); } } return parts; }
From source file:acromusashi.stream.component.rabbitmq.CachingConnectionFactory.java
private Channel getChannel(boolean transactional) { LinkedList<ChannelProxy> channelList = transactional ? this.cachedChannelsTransactional : this.cachedChannelsNonTransactional; Channel channel = null;/*from w ww.j a v a 2 s . c o m*/ synchronized (channelList) { if (!channelList.isEmpty()) { channel = channelList.removeFirst(); } } if (channel != null) { if (this.logger.isTraceEnabled()) { this.logger.trace("Found cached Rabbit Channel"); } } else { channel = getCachedChannelProxy(channelList, transactional); } return channel; }
From source file:it.cnr.icar.eric.client.admin.function.Cp.java
/** * Load the contents of baseDir into rp using pathname as base for * locators of loaded objects./*from w w w . j a v a 2 s . co m*/ * * @param baseDir Directory in local file system from which to load * @param rp Existing RegistryPackage to which to add */ protected void scanDir(File baseDir, RegistryPackage rootRP) throws Exception { ArrayList<RegistryObject> repositoryObjects = new ArrayList<RegistryObject>(); LinkedList<DirInfo> dirInfoList = new LinkedList<DirInfo>(); dirInfoList.add(new DirInfo(baseDir, rootRP)); /* * Loop through the list of directories (and corresponding * RegistryPackages and pathnames). Child directories of * curDir are added to the end of the list, so the list isn't * finished until all descendant directories have been * processed. */ while (!dirInfoList.isEmpty()) { DirInfo curDirInfo = dirInfoList.removeFirst(); File curDir = curDirInfo.getDir(); RegistryPackage curRP = curDirInfo.getRegistryPackage(); if (!curDir.exists()) { throw new AdminException(format(rb, "nonexistentLocalDir", new Object[] { curDir })); } if (!curDir.isDirectory()) { throw new AdminException(format(rb, "nondirectoryLocalDir", new Object[] { curDir })); } if (!curDir.canRead()) { throw new AdminException(format(rb, "unreadableLocalDir", new Object[] { curDir })); } File[] childFiles = curDir.listFiles(); for (int i = 0; i < childFiles.length; i++) { String childName = childFiles[i].getName(); boolean canInclude = checkIncludesExcludes(childName); RegistryObject childObject; if (!canInclude) { if (verbose || debug) { context.printMessage(format(rb, "notIncluding", new Object[] { childFiles[i] })); } continue; } if (childFiles[i].isFile()) { if (verbose || debug) { context.printMessage(format(rb, "including", new Object[] { "ExtrinsicObject", childFiles[i], childName })); } childObject = context.getService().createExtrinsicObject(childFiles[i]); } else if (childFiles[i].isDirectory()) { if (verbose || debug) { context.printMessage(format(rb, "including", new Object[] { "RegistryPackage", childFiles[i], childName })); } childObject = context.getService().createRegistryPackage(childName); dirInfoList.addLast(new DirInfo(childFiles[i], (RegistryPackage) childObject)); } else { childObject = null; throw new AdminException(format(rb, "notFileOrDir", new Object[] { childFiles[i] })); } if (curRP != null) { curRP.addRegistryObject(childObject); } repositoryObjects.add(childObject); } } if (!repositoryObjects.isEmpty()) { if (rootRP != null) { repositoryObjects.add(rootRP); } BulkResponse response = ((LifeCycleManagerImpl) context.getService().getLCM()) .saveObjects(repositoryObjects, saveObjectsSlots); JAXRUtility.checkBulkResponse(response); } }
From source file:com.android.utils.AccessibilityNodeInfoUtils.java
/** * Returns the result of applying a filter using breadth-first traversal. * * @param node The root node to traverse from. * @param filter The filter to satisfy./*from ww w. ja v a2 s . c om*/ * @return The first node reached via BFS traversal that satisfies the * filter. */ public static AccessibilityNodeInfoCompat searchFromBfs(AccessibilityNodeInfoCompat node, NodeFilter filter) { if (node == null) { return null; } final LinkedList<AccessibilityNodeInfoCompat> queue = new LinkedList<>(); Set<AccessibilityNodeInfoCompat> visitedNodes = new HashSet<>(); queue.add(AccessibilityNodeInfoCompat.obtain(node)); try { while (!queue.isEmpty()) { final AccessibilityNodeInfoCompat item = queue.removeFirst(); visitedNodes.add(item); if (filter.accept(item)) { return item; } final int childCount = item.getChildCount(); for (int i = 0; i < childCount; i++) { final AccessibilityNodeInfoCompat child = item.getChild(i); if (child != null && !visitedNodes.contains(child)) { queue.addLast(child); } } item.recycle(); } } finally { while (!queue.isEmpty()) { queue.removeFirst().recycle(); } } return null; }
From source file:org.apache.hadoop.hdfs.server.datanode.TestDirectoryScannerDelta.java
private void testDeltaBehaviour(boolean ic) throws Exception { setUp(ic);/* ww w.j av a 2 s . c o m*/ try { InjectionHandler.set(new ParallelInjectionHandler(InjectionEvent.DIRECTORY_SCANNER_NOT_STARTED, InjectionEvent.DIRECTORY_SCANNER_AFTER_FILE_SCAN, InjectionEvent.DIRECTORY_SCANNER_AFTER_DIFF, InjectionEvent.DIRECTORY_SCANNER_FINISHED)); // let's make a bunch of files here for (int i = 0; i < 100; i++) { createFile(fs, "file" + i); } FSDataset fds = (FSDataset) dn.data; LinkedList<DatanodeBlockInfo> blockInfos = getBlockInfos(fds, nsid); // now lets corrupt some files startParallelInjection(InjectionEvent.DIRECTORY_SCANNER_NOT_STARTED); LinkedList<FileAndBlockId> blocksToBeRemoved = new LinkedList<FileAndBlockId>(); for (int i = 0; i < REMOVE_BLOCK_FILES; i++) { DatanodeBlockInfo bi = blockInfos.removeFirst(); String fileName = firstLine(bi.getBlockDataFile().getFile()); blocksToBeRemoved.add(new FileAndBlockId(bi.getBlock(), fileName)); assertTrue(bi.getBlockDataFile().getFile().delete()); } for (int i = 0; i < REMOVE_BOTH_FILES; i++) { DatanodeBlockInfo bi = blockInfos.removeFirst(); String fileName = firstLine(bi.getBlockDataFile().getFile()); blocksToBeRemoved.add(new FileAndBlockId(bi.getBlock(), fileName)); assertTrue(bi.getBlockDataFile().getFile().delete()); if (!ic) { assertTrue(BlockWithChecksumFileWriter .getMetaFile(bi.getBlockDataFile().getFile(), bi.getBlock()).delete()); } } LinkedList<FileAndBlockId> blocksToBeUpdated = new LinkedList<FileAndBlockId>(); for (int i = 0; i < REMOVE_META_FILES; i++) { DatanodeBlockInfo bi = blockInfos.removeFirst(); String fileName = firstLine(bi.getBlockDataFile().getFile()); blocksToBeUpdated .add(new FileAndBlockId(bi.getBlock(), fileName, bi.getBlock().getGenerationStamp())); if (!ic) { assertTrue(BlockWithChecksumFileWriter .getMetaFile(bi.getBlockDataFile().getFile(), bi.getBlock()).delete()); } else { incInlineFileGenStamp(bi.getBlockDataFile().getFile()); } } LinkedList<FileAndBlockId> blocksToBeAdded = new LinkedList<FileAndBlockId>(); for (int i = 0; i < REMOVE_FROM_VOLUME_MAP; i++) { DatanodeBlockInfo bi = blockInfos.removeFirst(); String fileName = firstLine(bi.getBlockDataFile().getFile()); blocksToBeAdded.add(new FileAndBlockId(bi.getBlock(), fileName)); fds.volumeMap.remove(nsid, bi.getBlock()); } stopParallelInjection(InjectionEvent.DIRECTORY_SCANNER_NOT_STARTED); // Now messing up with delta a bit startParallelInjection(InjectionEvent.DIRECTORY_SCANNER_AFTER_FILE_SCAN); messWithDelta(blocksToBeRemoved, blocksToBeUpdated, blocksToBeAdded); stopParallelInjection(InjectionEvent.DIRECTORY_SCANNER_AFTER_FILE_SCAN); startParallelInjection(InjectionEvent.DIRECTORY_SCANNER_AFTER_DIFF); messWithDelta(blocksToBeRemoved, blocksToBeUpdated, blocksToBeAdded); stopParallelInjection(InjectionEvent.DIRECTORY_SCANNER_AFTER_DIFF); // Checking results startParallelInjection(InjectionEvent.DIRECTORY_SCANNER_FINISHED); for (FileAndBlockId f : blocksToBeAdded) { assertNotNull(fds.volumeMap.get(nsid, f.block)); } for (FileAndBlockId f : blocksToBeRemoved) { assertNull(fds.volumeMap.get(nsid, f.block)); } if (!ic) { // for inline checksums, the generation stamp is in the datafilename for (FileAndBlockId f : blocksToBeUpdated) { assertEquals(Block.GRANDFATHER_GENERATION_STAMP, fds.volumeMap.get(nsid, f.block).getBlock().getGenerationStamp()); } } else { for (FileAndBlockId f : blocksToBeUpdated) { assertEquals(f.originalGenStamp + 1, fds.volumeMap.get(nsid, f.block).getBlock().getGenerationStamp()); } } stopParallelInjection(InjectionEvent.DIRECTORY_SCANNER_FINISHED); } finally { if (cluster != null) { cluster.shutdown(); cluster = null; } InjectionHandler.clear(); } }
From source file:nl.tue.bimserver.citygml.CityGmlSerializer.java
private <T extends AbstractCityObject> T buildBoundarySurface(IfcProduct ifcProduct, T cityObject) throws SerializerException { setName(cityObject.getName(), ifcProduct.getName()); setGlobalId(cityObject, ifcProduct); MultiSurface multiSurface = gml.createMultiSurface(); {/*from ww w. j ava 2 s. c o m*/ CompositeSurface compositeSurface = gml.createCompositeSurface(); setGeometry(compositeSurface, ifcProduct); materialManager.assign(compositeSurface, ifcProduct); multiSurface.addSurfaceMember(gml.createSurfaceProperty(compositeSurface)); } LinkedList<IfcObjectDefinition> decompose = new LinkedList<IfcObjectDefinition>( Collections.singletonList(ifcProduct)); while (!decompose.isEmpty()) { for (IfcRelDecomposes ifcRelDecomposes : decompose.removeFirst().getIsDecomposedBy()) { for (IfcObjectDefinition ifcObjectDef : ifcRelDecomposes.getRelatedObjects()) { CompositeSurface compositeSurface = gml.createCompositeSurface(); setGeometry(compositeSurface, ifcObjectDef); materialManager.assign(compositeSurface, ifcObjectDef); multiSurface.addSurfaceMember(gml.createSurfaceProperty(compositeSurface)); decompose.add(ifcObjectDef); } } } MultiSurfaceProperty multiSurfaceProperty = gml.createMultiSurfaceProperty(multiSurface); try { if (PropertyUtils.isWriteable(cityObject, "lod4MultiSurface")) { PropertyUtils.setProperty(cityObject, "lod4MultiSurface", multiSurfaceProperty); } else { PropertyUtils.setProperty(cityObject, "lod4Geometry", multiSurfaceProperty); } } catch (Exception e) { e.printStackTrace(); } return cityObject; }
From source file:net.sourceforge.jasa.report.HistoricalDataReport.java
protected void removeNShouts(int n, LinkedList<Order> shouts) { for (int i = 0; i < n; i++) { Order shout = shouts.removeFirst(); if (!sortedShouts.remove(shout)) { assert !sortedShouts.contains(shout); throw new AuctionRuntimeException("Could not process " + shout); }/* w ww .jav a 2 s .c om*/ acceptedShouts.remove(shout); } }