List of usage examples for com.google.common.collect Sets newLinkedHashSet
public static <E> LinkedHashSet<E> newLinkedHashSet()
From source file:com.ebay.xcelite.column.ColumnsExtractor.java
private void columnsOrdering() { Row rowAnnotation = type.getAnnotation(Row.class); if (rowAnnotation == null || rowAnnotation.colsOrder() == null || rowAnnotation.colsOrder().length == 0) return;/* www. ja v a 2s .c o m*/ colsOrdering = Sets.newLinkedHashSet(); for (String column : rowAnnotation.colsOrder()) { colsOrdering.add(new Col(column)); } }
From source file:exec.validate_evaluation.streaks.EditStreakGenerationRunner.java
public EditStreakGenerationRunner(EditStreakGenerationIo io, EditStreakGenerationLogger log) { this.io = io; this.log = log; filters = Sets.newLinkedHashSet(); }
From source file:cc.kave.episodes.export.EventsFilter.java
private static List<Event> removeMethodDublicates(List<Event> stream) { List<Event> results = new LinkedList<Event>(); List<Event> method = new LinkedList<Event>(); Set<IMethodName> observedMethods = Sets.newLinkedHashSet(); IMethodName currentMethod = null;//from w w w . j ava 2s . com for (Event event : stream) { if ((event.getKind() == EventKind.FIRST_DECLARATION) && (currentMethod != null)) { if (!observedMethods.contains(currentMethod)) { results.addAll(method); observedMethods.add(currentMethod); } method = new LinkedList<Event>(); currentMethod = null; } else if (event.getKind() == EventKind.METHOD_DECLARATION) { currentMethod = event.getMethod(); } method.add(event); } if ((currentMethod != null) && (!observedMethods.contains(currentMethod))) { results.addAll(method); } return results; }
From source file:brooklyn.entity.rebind.transformer.impl.DeleteOrphanedLocationsTransformer.java
public BrooklynMemento transform(BrooklynMemento input) throws Exception { Set<String> referencedLocationIds = findReferencedLocationIds(input); Set<String> unreferencedLocationIds = Sets.newLinkedHashSet(); List<String> toCheck = Lists.newLinkedList(input.getLocationIds()); while (!toCheck.isEmpty()) { String locationId = toCheck.remove(0); List<String> locationsInHierarchy = MutableList.<String>builder().add(locationId) .addAll(findLocationAncestors(input, locationId)) .addAll(findLocationDescendents(input, locationId)).build(); if (containsAny(referencedLocationIds, locationsInHierarchy)) { // keep them all } else {//from w w w . j a v a 2 s . c om unreferencedLocationIds.addAll(locationsInHierarchy); } toCheck.removeAll(locationsInHierarchy); } // TODO What about brooklyn version? return BrooklynMementoImpl.builder().applicationIds(input.getApplicationIds()) .topLevelLocationIds(MutableSet.<String>builder().addAll(input.getTopLevelLocationIds()) .removeAll(unreferencedLocationIds).build()) .entities(input.getEntityMementos()) .locations(MutableMap.<String, LocationMemento>builder().putAll(input.getLocationMementos()) .removeAll(unreferencedLocationIds).build()) .policies(input.getPolicyMementos()).enrichers(input.getEnricherMementos()) .catalogItems(input.getCatalogItemMementos()).build(); }
From source file:org.splevo.ui.vpexplorer.handler.vpmedit.DeleteVariationPointHandler.java
/** * Delete the selected variation point(s) and save the model. * * {@inheritDoc}//w ww .j a v a2s .c o m */ @Override public Object execute(ExecutionEvent event) throws ExecutionException { Set<Resource> resourceToSave = Sets.newLinkedHashSet(); ISelection selection = HandlerUtil.getCurrentSelection(event); if (selection != null && selection instanceof IStructuredSelection) { IStructuredSelection strucSelection = (IStructuredSelection) selection; if (strucSelection.toList().size() > 0) { Shell activeShell = HandlerUtil.getActiveShell(event); boolean proceed = MessageDialog.openConfirm(activeShell, "Delete Variation Point(s)", "Are you sure to delete this variation point(s)?"); if (!proceed) { return null; } } for (Object element : strucSelection.toList()) { if (element instanceof VariationPoint) { VariationPoint vp = (VariationPoint) element; logger.info("MANUAL VP DELETE in " + vp.getLocation().getLabel()); Resource resource = vp.eResource(); VariationPointGroup group = vp.getGroup(); group.getVariationPoints().remove(vp); resourceToSave.add(resource); } } } for (Resource resource : resourceToSave) { try { resource.save(null); } catch (IOException e) { throw new ExecutionException("Failed to save modified resource", e); } } return null; }
From source file:uk.ac.ebi.atlas.experimentimport.experimentdesign.magetab.RnaSeqExperimentMageTabParser.java
@Override protected Set<NamedSdrfNode<ScanNode>> getAssayNodes(SDRF sdrf) { Set<NamedSdrfNode<ScanNode>> namedSdrfNodes = Sets.newLinkedHashSet(); for (ScanNode scanNode : sdrf.getNodes(ScanNode.class)) { List<String> assayNodes = scanNode.comments.get(ENA_RUN); if (assayNodes == null || assayNodes.isEmpty()) { // We procure certain human RNA-seq experiments directly from EGA, hence their sdrf will not // have an ENA_RUN comment. Im such cases, we take the RUN_NAME comment column as the assay node. assayNodes = scanNode.comments.get(RUN_NAME); }/* w w w . ja va 2 s . c om*/ namedSdrfNodes.add(new NamedSdrfNode(assayNodes.iterator().next(), scanNode)); } return namedSdrfNodes; }
From source file:cc.kave.episodes.mining.reader.EpisodeParser.java
public Map<Integer, Set<Episode>> parse(int numRepos) { File filePath = getFilePath(numRepos); Logger.log("%s", filePath.getAbsolutePath()); List<String> lines = reader.readFile(filePath); Map<Integer, Set<Episode>> episodeIndexed = new HashMap<Integer, Set<Episode>>(); Set<Episode> episodes = Sets.newLinkedHashSet(); String[] rowValues;// ww w. j a v a 2s. com int numNodes = 0; for (String line : lines) { if (line.contains(":")) { rowValues = line.split(":"); Episode episode = readEpisode(numNodes, rowValues); episodes.add(episode); } else { rowValues = line.split("\\s+"); if (!episodes.isEmpty()) { episodeIndexed.put(numNodes, episodes); } if (Integer.parseInt(rowValues[3]) > 0) { String[] nodeString = rowValues[0].split("-"); numNodes = Integer.parseInt(nodeString[0]); episodes = Sets.newHashSet(); } else { break; } } } if (!episodeIndexed.containsKey(numNodes)) { episodeIndexed.put(numNodes, episodes); } return episodeIndexed; }
From source file:org.artifactory.storage.db.fs.service.TasksServiceImpl.java
@Nonnull private Set<RepoPath> getRepoPathTasks(String type) { // this method expects repo path id as the task value Set<RepoPath> repoPaths = Sets.newLinkedHashSet(); try {/*from www . j ava 2 s . c o m*/ Set<TaskRecord> tasks = tasksDao.load(type); for (TaskRecord task : tasks) { repoPaths.add(InternalRepoPathFactory.createRepoPath(task.getTaskContext())); } } catch (SQLException e) { throw new StorageException("Failed to load tasks of type '" + type + "' : " + e.getMessage(), e); } return repoPaths; }
From source file:org.obeonetwork.dsl.uml2.core.internal.services.UIServices.java
/** * Get displayed node in a diagram// ww w. j a v a2 s. c om * * @param diagram * Diagram * @return List of displayed semantic objects. */ public Collection<EObject> getDisplayedNodes(DDiagram diagram) { final Set<EObject> result = Sets.newLinkedHashSet(); final DDiagramQuery query = new DDiagramQuery(diagram); for (final DDiagramElement diagramElement : query.getAllDiagramElements()) { result.add(diagramElement.getTarget()); } return result; }
From source file:org.summer.dsl.xbase.scoping.featurecalls.AbstractStaticMethodsFeatureForTypeProvider.java
public Iterable<JvmFeature> getFeaturesByName(String name, JvmTypeReference declarator, Iterable<JvmTypeReference> hierarchy) { Set<JvmFeature> result = Sets.newLinkedHashSet(); if (declarator != null) { collectFeatures(name, hierarchy, result); } else {//from w ww.java2 s . c om collectFeatures(name, null, result); } return result; }