List of usage examples for com.google.common.collect Sets newLinkedHashSet
public static <E> LinkedHashSet<E> newLinkedHashSet()
From source file:org.eclipse.sirius.ui.properties.internal.expressions.VSMNavigation.java
/** * Returns the domain class of a given {@link GroupDescription}, defaulting * to the union of all the possible domain classes from pages which * reference that group if no value was set explicitly. * //from w w w .ja va2s . co m * @param group * a {@link GroupDescription}. * @return the names of potential domain classes for that group. */ public static Collection<String> getGroupDomainClass(GroupDescription group) { if (group != null && !Util.isBlank(group.getDomainClass())) { return Collections.singleton(group.getDomainClass()); } else { Collection<String> result = Sets.newLinkedHashSet(); for (PageDescription page : VSMNavigation.findReferencingPages(group)) { result.addAll(getPageDomainClass(page)); } return result; } }
From source file:org.apache.brooklyn.core.entity.internal.ConfigMapViewWithStringKeys.java
@Override public Set<String> keySet() { LinkedHashSet<String> result = Sets.newLinkedHashSet(); Set<Map.Entry<ConfigKey<?>, Object>> set = target.getAllConfig().entrySet(); for (final Map.Entry<ConfigKey<?>, Object> entry : set) { result.add(entry.getKey().getName()); }/*from w ww . j a v a 2s .c o m*/ return result; }
From source file:org.jetbrains.jet.lang.resolve.scopes.WriteThroughScope.java
@Override @NotNull// w ww. ja v a 2 s. co m public Collection<FunctionDescriptor> getFunctions(@NotNull Name name) { checkMayRead(); Set<FunctionDescriptor> result = Sets.newLinkedHashSet(); result.addAll(writableWorker.getFunctions(name)); result.addAll(getWorkerScope().getFunctions(name)); result.addAll(super.getFunctions(name)); // Imports return result; }
From source file:de.matzefratze123.simpletrading.TradeFactory.java
public TradeFactory(SimpleTrading plugin, MessageConfiguration messageConfig, TradeConfiguration config, Economy econ, ItemControlManager controlManager) { this.plugin = plugin; this.config = config; this.trades = Sets.newLinkedHashSet(); this.messageConfig = messageConfig; this.econ = econ; this.controlManager = controlManager; plugin.getServer().getPluginManager().registerEvents(this, plugin); }
From source file:org.grouplens.lenskit.eval.traintest.ExperimentSuite.java
/** * Get the set of all algorithmInfo attribute names. * @return The set of all algorithmInfo attribute names. *//*from w w w . j av a 2 s. co m*/ public Set<String> getAlgorithmAttributes() { Set<String> attrs = Sets.newLinkedHashSet(); for (Attributed algo : getAllAlgorithms()) { attrs.addAll(algo.getAttributes().keySet()); } return attrs; }
From source file:org.apache.druid.indexer.path.StaticPathSpec.java
public static void addToMultipleInputs(HadoopDruidIndexerConfig config, Job job, Set<String> paths, Class<? extends InputFormat> inputFormatClass) { if (paths == null || paths.isEmpty()) { return;/*from w w w . j a v a 2 s . c o m*/ } Class<? extends InputFormat> inputFormatClassToUse = inputFormatClass; if (inputFormatClassToUse == null) { if (config.isCombineText()) { inputFormatClassToUse = CombineTextInputFormat.class; } else { inputFormatClassToUse = TextInputFormat.class; } } // Due to https://issues.apache.org/jira/browse/MAPREDUCE-5061 we can't directly do // MultipleInputs.addInputPath(job, path, inputFormatClassToUse) // but have to handle hadoop glob path ourselves correctly // This change and HadoopGlobPathSplitter.java can be removed once the hadoop issue is fixed Set<String> pathStrings = Sets.newLinkedHashSet(); for (String path : paths) { Iterables.addAll(pathStrings, HadoopGlobPathSplitter.splitGlob(path)); } if (!pathStrings.isEmpty()) { addInputPath(job, pathStrings, inputFormatClassToUse); } }
From source file:org.eclipse.emf.compare.match.eobject.internal.ReflectiveWeightProvider.java
/** * Create the weight provider. */ public ReflectiveWeightProvider() { weights = Maps.newHashMap(); toBeIgnored = Sets.newLinkedHashSet(); }
From source file:org.lanternpowered.server.world.gen.debug.DebugGenerationPopulator.java
public DebugGenerationPopulator(GameRegistry registry) { checkNotNull(registry, "registry"); final Set<BlockState> blockStates = Sets.newLinkedHashSet(); for (BlockType blockType : registry.getAllOf(BlockType.class)) { blockStates.addAll(blockType.getAllBlockStates().stream() .filter(state -> !((LanternBlockState) state).isExtended()).collect(Collectors.toList())); }//from w ww . j ava 2s.c o m this.blockStateCache = blockStates.toArray(new BlockState[blockStates.size()]); this.size = (int) Math.ceil(Math.sqrt((double) this.blockStateCache.length)); }
From source file:com.github.fge.jsonschema.load.RefResolver.java
@Override public SchemaTree rawProcess(final ProcessingReport report, final SchemaTree input) throws ProcessingException { /*/* w w w. j a v a 2 s . c om*/ * The set of refs we see during ref resolution, necessary to detect ref * loops. We make it linked since we want the ref path reported in the * order where refs have been encountered. */ final Set<JsonRef> refs = Sets.newLinkedHashSet(); SchemaTree tree = input; JsonPointer ptr; JsonRef ref; JsonNode node; while (true) { /* * See if the current node is a JSON Reference. */ node = tree.getNode(); /* * If it isn't, we are done */ ref = nodeAsRef(node); if (ref == null) break; /* * Resolve the reference against the current tree. */ ref = tree.resolve(ref); /* * If we have seen this ref already, this is a ref loop. */ if (!refs.add(ref)) throw new ProcessingException( new ProcessingMessage().setMessage(BUNDLE.getMessage("refProcessing.refLoop")) .put("schema", tree).putArgument("ref", ref).put("path", refs)); /* * Check whether ref is resolvable within the current tree. If not, * fetch the new tree. * * This may fail, in which case we exit here since SchemaLoader's * .get() throws a ProcessingException if it fails. */ if (!tree.containsRef(ref)) tree = loader.get(ref.getLocator()); /* * Get the appropriate pointer into the tree. If none, this means * a dangling reference. */ ptr = tree.matchingPointer(ref); if (ptr == null) throw new ProcessingException( new ProcessingMessage().setMessage(BUNDLE.getMessage("refProcessing.danglingRef")) .put("schema", tree).putArgument("ref", ref)); tree = tree.setPointer(ptr); } return tree; }
From source file:org.terasology.cities.generator.LotGeneratorRandom.java
/** * @param city the city/*from w ww . j av a 2s. co m*/ * @param si describes the blocked area for a sector * @return a set of lots for that city within the city radius */ public Set<SimpleLot> generate(City city, AreaInfo si) { Random rand = new FastRandom(Objects.hash(seed, city)); Vector2i center = city.getPos(); Set<SimpleLot> lots = Sets.newLinkedHashSet(); // the order is important for deterministic generation double maxLotDiam = maxSize * Math.sqrt(2); double minRad = 5 + maxSize * 0.5; double maxRad = (city.getDiameter() - maxLotDiam) * 0.5; if (minRad >= maxRad) { return lots; // which is empty } for (int i = 0; i < maxTries && lots.size() < maxLots; i++) { double ang = rand.nextDouble(0, Math.PI * 2.0); double rad = rand.nextDouble(minRad, maxRad); double desSizeX = rand.nextDouble(minSize, maxSize); double desSizeZ = rand.nextDouble(minSize, maxSize); double x = center.x + rad * Math.cos(ang); double z = center.y + rad * Math.sin(ang); Point2d pos = new Point2d(x, z); Vector2d maxSpace = getMaxSpace(pos, lots); int sizeX = (int) Math.min(desSizeX, maxSpace.x); int sizeZ = (int) Math.min(desSizeZ, maxSpace.y); // check if enough space is available if (sizeX < minSize || sizeZ < minSize) { continue; } Rectangle shape = new Rectangle((int) (pos.x - sizeX * 0.5), (int) (pos.y - sizeZ * 0.5), sizeX, sizeZ); // check if lot intersects with blocked area if (si.isBlocked(shape)) { continue; } si.addBlockedArea(shape); // all tests passed -> create and add SimpleLot lot = new SimpleLot(shape); lots.add(lot); } logger.debug("Generated {} lots for city {}", lots.size(), city); return lots; }