List of usage examples for com.google.common.collect Sets newLinkedHashSet
public static <E> LinkedHashSet<E> newLinkedHashSet()
From source file:com.facebook.buck.graph.AcyclicDepthFirstPostOrderTraversal.java
/** * Performs a depth-first, post-order traversal over a DAG. * @param initialNodes The nodes from which to perform the traversal. Not allowed to contain * {@code null}.//w w w . j av a2s .c o m * @throws CycleException if a cycle is found while performing the traversal. */ @SuppressWarnings("PMD.PrematureDeclaration") public Iterable<T> traverse(Iterable<? extends T> initialNodes) throws CycleException { // This corresponds to the current chain of nodes being explored. Enforcing this invariant makes // this data structure useful for debugging. Deque<Explorable> toExplore = Lists.newLinkedList(); for (T node : initialNodes) { toExplore.add(new Explorable(node)); } Set<T> inProgress = Sets.newHashSet(); LinkedHashSet<T> explored = Sets.newLinkedHashSet(); while (!toExplore.isEmpty()) { Explorable explorable = toExplore.peek(); T node = explorable.node; // This could happen if one of the initial nodes is a dependency of the other, for example. if (explored.contains(node)) { toExplore.removeFirst(); continue; } inProgress.add(node); // Find children that need to be explored to add to the stack. int stackSize = toExplore.size(); for (Iterator<T> iter = explorable.children; iter.hasNext();) { T child = iter.next(); if (inProgress.contains(child)) { throw createCycleException(child, toExplore); } else if (!explored.contains(child)) { toExplore.addFirst(new Explorable(child)); // Without this break statement: // (1) Children will be explored in reverse order instead of the specified order. // (2) CycleException may contain extra nodes. // Comment out the break statement and run the unit test to verify this for yourself. break; } } if (stackSize == toExplore.size()) { // Nothing was added to toExplore, so the current node can be popped off the stack and // marked as explored. toExplore.removeFirst(); inProgress.remove(node); explored.add(node); } } Preconditions.checkState(inProgress.isEmpty(), "No more nodes should be in progress."); return Iterables.unmodifiableIterable(explored); }
From source file:org.eclipse.sirius.diagram.ui.internal.refresh.SiriusGMFSynchronizerDispatcher.java
/** * Get a EMF Command which update the GMF notation model from the * {@link Notification} collection./*from w ww . jav a 2 s. co m*/ * * @param domain * the {@link TransactionalEditingDomain} from which the Command * will be executed * * @param notifications * the {@link Notification} collection about the session model * changes. * * @return the EMF COmmand to update the GMF notation model */ public Command getGMFNotationModelSynchronizationCmd(TransactionalEditingDomain domain, Collection<Notification> notifications) { Command gmfNotationModelSynchronizationCmd = null; final Set<Diagram> gmfDiagramToSynchronizes = Sets.newLinkedHashSet(); for (Notification notification : notifications) { Diagram gmfDiagram = getGMFDiagram(notification); if (gmfDiagram != null && !gmfDiagramToSynchronizes.contains(gmfDiagram)) { gmfDiagramToSynchronizes.add(gmfDiagram); CanonicalSynchronizer canonicalSynchronizer = CanonicalSynchronizerFactory.INSTANCE .createCanonicalSynchronizer(gmfDiagram); if (gmfNotationModelSynchronizationCmd == null) { gmfNotationModelSynchronizationCmd = new SynchronizeGMFModelCommand(domain, canonicalSynchronizer); } else { Command synchronizeCommand = new SynchronizeGMFModelCommand(domain, canonicalSynchronizer); gmfNotationModelSynchronizationCmd = gmfNotationModelSynchronizationCmd .chain(synchronizeCommand); } } } return gmfNotationModelSynchronizationCmd; }
From source file:org.jclouds.aws.s3.xml.ListBucketHandler.java
@Inject public ListBucketHandler(DateService dateParser) { this.dateParser = dateParser; this.contents = Sets.newLinkedHashSet(); this.commonPrefixes = Sets.newLinkedHashSet(); }
From source file:org.splevo.ui.handler.vpm.MergeVariationPointsHandler.java
@Override public Object execute(ExecutionEvent event) throws ExecutionException { Shell activeShell = HandlerUtil.getActiveShell(event); boolean proceed = MessageDialog.openConfirm(activeShell, "Merge Variation Points", "There is no technical check performed if the " + "selected variation points can be merged or not.\n" + "Proceed anyway?"); if (!proceed) { return null; }/*from ww w. j a v a 2 s .c o m*/ ISelection curSelection = HandlerUtil.getCurrentSelection(event); if (curSelection != null && curSelection instanceof IStructuredSelection) { IStructuredSelection selection = (IStructuredSelection) curSelection; Set<VariationPoint> vpsToMerge = Sets.newLinkedHashSet(); for (Object selectedItem : selection.toList()) { if (selectedItem instanceof VariationPoint) { vpsToMerge.add((VariationPoint) selectedItem); } } mergeVPs(vpsToMerge); Set<Resource> resourcesToSave = Sets.newLinkedHashSet(); for (VariationPoint vp : vpsToMerge) { resourcesToSave.add(vp.eResource()); } updateResources(resourcesToSave); } return null; }
From source file:org.artifactory.storage.db.fs.service.ArchiveEntriesServiceImpl.java
@Override @Nonnull/*w w w. j a v a2 s. co m*/ public Set<ZipEntryInfo> getArchiveEntries(String archiveSha1) { try { Set<ArchiveEntry> archiveEntries = archiveEntriesDao.loadByChecksum(archiveSha1); Set<ZipEntryInfo> entries = Sets.newLinkedHashSet(); for (ArchiveEntry entry : archiveEntries) { entries.add(new ZipEntryImpl(entry.getPathName(), false)); } return entries; } catch (SQLException e) { throw new StorageException("Failed to load archive entries for " + archiveSha1, e); } }
From source file:org.richfaces.ui.validation.validator.ClientOnlyScript.java
public ClientOnlyScript(LibraryScriptFunction clientSideConverterScript, Collection<? extends LibraryScriptFunction> validatorScripts, String onvalid, String oninvalid) { super();/*from w w w . ja v a 2s . c o m*/ this.converter = clientSideConverterScript; this.validators = ImmutableList.copyOf(validatorScripts); LinkedHashSet<ResourceKey> resources = Sets.newLinkedHashSet(); resources.add(CSV_RESOURCE); if (null != converter) { Iterables.addAll(resources, converter.getResources()); } for (LibraryScriptFunction scriptString : validators) { Iterables.addAll(resources, scriptString.getResources()); } this.resources = ImmutableSet.copyOf(resources); this.onvalid = onvalid; this.oninvalid = oninvalid; }
From source file:org.prebake.fs.GlobDispatcher.java
void dispatch(Iterable<Path> paths) { // Order of dispatch here is non-deterministic Set<GlobUnion> unions = Sets.newLinkedHashSet(); for (Path p : paths) { for (Glob g : gset.matching(p)) { synchronized (globsContaining) { unions.addAll(globsContaining.get(g)); }//from w w w. j a va 2 s .c o m } } for (GlobUnion union : unions) { synchronized (listeners) { for (ArtifactListener<GlobUnion> unionListener : listeners.get(union)) { try { unionListener.artifactChanged(union); } catch (RuntimeException ex) { logger.log(Level.SEVERE, "Internal error", ex); } } } } }
From source file:com.google.caja.ancillary.linter.LiveSet.java
/** * Creates a new scope for a DOM root or function constructor. * * @param scopeRoot normally, a node such that there exists a * {@link LexicalScope scope} LS where {@code LS.root.node == scopeRoot}. */// ww w . ja va 2s . co m LiveSet(ParseTreeNode scopeRoot) { Set<Pair<String, LexicalScope>> symbols = Sets.newLinkedHashSet(); LexicalScope scope = ScopeAnalyzer.containingScopeForNode(scopeRoot); // Find the set of symbols defined by the overrideable method // LexicalScope.initScope that were defined because of this method, not // as the result of a declaration which we may encounter later. for (String symbolName : scope.symbols.symbolNames()) { SymbolTable.Symbol s = scope.symbols.getSymbol(symbolName); for (AncestorChain<?> decl : s.getDeclarations()) { if (decl.node == scopeRoot) { symbols.add(Pair.pair(symbolName, scope)); break; } } } this.symbols = Collections.unmodifiableSet(symbols); }
From source file:org.eclipse.sirius.ui.tools.internal.views.modelexplorer.DeleteActionHandler.java
private Collection<DRepresentation> getRepresentations() { ISelection selection = selectionProvider.getSelection(); if (selection instanceof IStructuredSelection) { Collection<?> selections = ((IStructuredSelection) selection).toList(); if (selections != null && !selections.isEmpty()) { Collection<DRepresentation> selectedRepresentations = Sets.newLinkedHashSet(); Iterables.addAll(selectedRepresentations, Iterables.filter(selections, DRepresentation.class)); Iterables.addAll(selectedRepresentations, Iterables.transform(Iterables.filter(selections, RepresentationItemImpl.class), RepresentationItemImpl.REPRESENTATION_ITEM_TO_REPRESENTATION)); return selectedRepresentations; }//from w w w. j ava 2s . c o m } return Collections.emptyList(); }
From source file:org.jetbrains.kotlin.resolve.lazy.descriptors.LazyTypeParameterDescriptor.java
@NotNull @Override/* w w w. ja v a 2 s.co m*/ protected Set<JetType> resolveUpperBounds() { Set<JetType> upperBounds = Sets.newLinkedHashSet(); JetTypeParameter jetTypeParameter = this.jetTypeParameter; JetTypeReference extendsBound = jetTypeParameter.getExtendsBound(); if (extendsBound != null) { JetType boundType = c.getDescriptorResolver().resolveTypeParameterExtendsBound(this, extendsBound, getContainingDeclaration().getScopeForClassHeaderResolution(), c.getTrace()); upperBounds.add(boundType); } resolveUpperBoundsFromWhereClause(upperBounds); if (upperBounds.isEmpty()) { upperBounds.add(c.getModuleDescriptor().getBuiltIns().getDefaultBound()); } return upperBounds; }