List of usage examples for com.google.common.collect ImmutableSet size
int size();
From source file:io.mesosphere.mesos.frameworks.cassandra.scheduler.CassandraCluster.java
@NotNull static List<String> hasResources(@NotNull final Protos.Offer offer, @NotNull final TaskResources taskResources, @NotNull final Map<String, Long> portMapping, @NotNull final String mesosRole) { final List<String> errors = newArrayList(); final ListMultimap<String, Protos.Resource> availableResources = resourcesForRoleAndOffer(mesosRole, offer); final double availableCpus = maxResourceValueDouble(availableResources.get("cpus")).or(0d); final long availableMem = maxResourceValueLong(availableResources.get("mem")).or(0l); final long availableDisk = maxResourceValueLong(availableResources.get("disk")).or(0l); if (availableCpus < taskResources.getCpuCores()) { errors.add(String.format("Not enough cpu resources for role %s. Required %s only %s available", mesosRole, String.valueOf(taskResources.getCpuCores()), String.valueOf(availableCpus))); }//from www . ja v a2s . c o m if (availableMem < taskResources.getMemMb()) { errors.add(String.format("Not enough mem resources for role %s. Required %d only %d available", mesosRole, taskResources.getMemMb(), availableMem)); } if (availableDisk < taskResources.getDiskMb()) { errors.add(String.format("Not enough disk resources for role %s. Required %d only %d available", mesosRole, taskResources.getDiskMb(), availableDisk)); } ImmutableSet<Long> ports = from(availableResources.get("ports")).transformAndConcat(resourceToPortSet()) .toSet(); for (final Map.Entry<String, Long> entry : portMapping.entrySet()) { final String key = entry.getKey(); final Long value = entry.getValue(); if (!ports.contains(value)) { errors.add(String.format("Unavailable port %d(%s) for role %s. %d other ports available", value, key, mesosRole, ports.size())); } } return errors; }
From source file:dagger.internal.codegen.DependencyRequestFormatter.java
/** * Returns a string of the form "{@code @BindsOptionalOf SomeKey is declared at Module.method()}", * where {@code Module.method()} is the declaration. If there is more than one such declaration, * one is chosen arbitrarily, and ", among others" is appended. *//* w w w .j a va 2 s . c om*/ private String formatSyntheticOptionalBindingDependency( ImmutableSet<OptionalBindingDeclaration> optionalBindingDeclarations) { OptionalBindingDeclaration optionalBindingDeclaration = optionalBindingDeclarations.iterator().next(); StringBuilder builder = new StringBuilder(); builder.append(INDENT).append("@BindsOptionalOf ").append(formatKey(optionalBindingDeclaration.key())) .append(" is declared at\n").append(DOUBLE_INDENT); appendEnclosingTypeAndMemberName(optionalBindingDeclaration.bindingElement().get(), builder); builder.append("()"); if (optionalBindingDeclarations.size() > 1) { builder.append(", among others"); } return builder.toString(); }
From source file:org.locationtech.geogig.model.impl.LegacyTreeBuilder.java
private Map<Integer, RevTree> getBucketTrees(ImmutableSet<Integer> changedBucketIndexes) { Map<Integer, RevTree> bucketTrees = new HashMap<>(); List<Integer> missing = new ArrayList<>(changedBucketIndexes.size()); for (Integer bucketIndex : changedBucketIndexes) { Bucket bucket = bucketTreesByBucket.get(bucketIndex); RevTree cached = bucket == null ? RevTree.EMPTY : pendingWritesCache.get(bucket.getObjectId()); if (cached == null) { missing.add(bucketIndex);/*from w w w. j a v a 2s .c o m*/ } else { bucketTrees.put(bucketIndex, cached); } } if (!missing.isEmpty()) { Map<ObjectId, Integer> ids = Maps.uniqueIndex(missing, new Function<Integer, ObjectId>() { @Override public ObjectId apply(Integer index) { return bucketTreesByBucket.get(index).getObjectId(); } }); Iterator<RevObject> all = obStore.getAll(ids.keySet()); while (all.hasNext()) { RevObject next = all.next(); bucketTrees.put(ids.get(next.getId()), (RevTree) next); } } return bucketTrees; }
From source file:com.facebook.buck.cli.Main.java
private void loadListenersFromBuckConfig(ImmutableList.Builder<BuckEventListener> eventListeners, ProjectFilesystem projectFilesystem, BuckConfig config) { final ImmutableSet<String> paths = config.getListenerJars(); if (paths.isEmpty()) { return;//from w ww. ja va 2 s . c o m } URL[] urlsArray = new URL[paths.size()]; try { int i = 0; for (String path : paths) { String urlString = "file://" + projectFilesystem.getAbsolutifier().apply(Paths.get(path)); urlsArray[i] = new URL(urlString); i++; } } catch (MalformedURLException e) { throw new HumanReadableException(e.getMessage()); } // This ClassLoader is disconnected to allow searching the JARs (and just the JARs) for classes. ClassLoader isolatedClassLoader = URLClassLoader.newInstance(urlsArray, null); ImmutableSet<ClassPath.ClassInfo> classInfos; try { ClassPath classPath = ClassPath.from(isolatedClassLoader); classInfos = classPath.getTopLevelClasses(); } catch (IOException e) { throw new HumanReadableException(e.getMessage()); } // This ClassLoader will actually work, because it is joined to the parent ClassLoader. URLClassLoader workingClassLoader = URLClassLoader.newInstance(urlsArray); for (ClassPath.ClassInfo classInfo : classInfos) { String className = classInfo.getName(); try { Class<?> aClass = Class.forName(className, true, workingClassLoader); if (BuckEventListener.class.isAssignableFrom(aClass)) { BuckEventListener listener = aClass.asSubclass(BuckEventListener.class).newInstance(); eventListeners.add(listener); } } catch (ReflectiveOperationException e) { throw new HumanReadableException("Error loading event listener class '%s': %s: %s", className, e.getClass(), e.getMessage()); } } }
From source file:com.b2international.snowowl.snomed.api.impl.traceability.SnomedTraceabilityChangeProcessor.java
@Override public void afterCommit() { final Timer traceabilityTimer = MetricsThreadLocal.get().timer("traceability"); try {/*from w ww . ja v a 2 s . c o m*/ traceabilityTimer.start(); if (commitChangeSet != null) { boolean isLightWeight = false; if (!entry.getChanges().isEmpty()) { if (!isDeltaImportInProgress(branchPath.getPath()) && !isClassificationInProgress(branchPath.getPath())) { final ImmutableSet<String> conceptIds = ImmutableSet.copyOf(entry.getChanges().keySet()); final SnomedConcepts concepts = SnomedRequests.prepareSearchConcept() .setLimit(conceptIds.size()).filterByIds(conceptIds) .setExpand( "descriptions(expand(inactivationProperties())),relationships(expand(destination()))") .build(SnomedDatastoreActivator.REPOSITORY_UUID, branchPath.getPath()) .execute(getBus()).getSync(); final Set<SnomedBrowserConcept> convertedConcepts = newHashSet(); for (SnomedConcept concept : concepts) { SnomedBrowserConcept convertedConcept = new SnomedBrowserConcept(); convertedConcept.setActive(concept.isActive()); convertedConcept.setConceptId(concept.getId()); convertedConcept.setDefinitionStatus(concept.getDefinitionStatus()); convertedConcept.setDescriptions(convertDescriptions(concept.getDescriptions())); convertedConcept.setEffectiveTime(concept.getEffectiveTime()); convertedConcept.setModuleId(concept.getModuleId()); convertedConcept.setRelationships(convertRelationships(concept.getRelationships())); convertedConcept.setFsn(concept.getId()); convertedConcept.setInactivationIndicator(concept.getInactivationIndicator()); convertedConcept.setAssociationTargets(concept.getAssociationTargets()); convertedConcepts.add(convertedConcept); } Collection<? extends ISnomedBrowserConcept> resultsConcepts; if (!convertedConcepts.isEmpty()) { // Lookup and expand axioms resultsConcepts = getAxiomService().expandAxioms(convertedConcepts, branchPath.getPath(), getLocales()); } else { resultsConcepts = convertedConcepts; } for (ISnomedBrowserConcept result : resultsConcepts) { // PT and SYN labels are not populated entry.setConcept(result.getId(), (SnomedBrowserConcept) result); } } else { isLightWeight = true; } } TRACE_LOGGER.info(objectWriter.writeValueAsString(entry)); SYS_LOGGER.info("Logged {} {}traceability entries on branch {}", entry.getChanges().size(), isLightWeight ? "lightweight " : "", branchPath.getPath()); } } catch (IOException e) { throw SnowowlRuntimeException.wrap(e); } finally { traceabilityTimer.stop(); } }
From source file:no.ssb.vtl.script.operations.FoldOperation.java
/** * Create indices used by the fold operation. * * @param structure the new structure./*from ww w .ja va 2 s. c o m*/ */ private void computeIndices(DataStructure structure) { ImmutableSet<String> originalColumns = ImmutableSet.copyOf(getChild().getDataStructure().keySet()); ImmutableSet<String> columns = ImmutableSet.copyOf(structure.keySet()); // Indices of the common columns. copyIndices = Sets.intersection(columns, originalColumns).stream() .mapToInt(originalColumns.asList()::indexOf).toArray(); elementIndices = Sets.intersection(elements, originalColumns).stream() .mapToInt(originalColumns.asList()::indexOf).toArray(); elementNames = elements.asList().toArray(new String[] {}); measureIndex = columns.asList().indexOf(measure); dimensionIndex = columns.asList().indexOf(dimension); size = columns.size(); }
From source file:com.palantir.util.MutuallyExclusiveSetLock.java
private ImmutableSet<T> validateLockInput(Iterable<T> lockObjects) { if (lockObjects == null) { throw new IllegalArgumentException("lockObjects is null"); }/* w ww . jav a 2 s. c o m*/ if (threadSet.contains(Thread.currentThread())) { throw new IllegalStateException("You must not synchronize twice in the same thread"); } ImmutableSet<T> hashSet = ImmutableSet.copyOf(lockObjects); if (comparator == null) { for (T t : hashSet) { if (!(t instanceof Comparable)) { throw new IllegalArgumentException( "you must either specify a comparator or pass in comparable objects"); } } } //verify that the compareTo and equals are consistent in that we are always locking on all objects SortedSet<T> treeSet = new TreeSet<T>(comparator); treeSet.addAll(hashSet); if (treeSet.size() != hashSet.size()) { throw new IllegalArgumentException("The number of elements using .equals and compareTo differ. " + "This means that compareTo and equals are not consistent " + "which will cause some objects to not be locked"); } return hashSet; }
From source file:org.fcrepo.transform.sparql.JQLQueryVisitor.java
/** * Get the columns for the JCR query/* w ww . j a va 2 s .co m*/ * @return */ private Column[] getColumns() { final ImmutableSet<Column> build = this.columns.build(); return build.toArray(new Column[build.size()]); }
From source file:org.caleydo.view.crossword.api.ui.CrosswordMultiElement.java
/** * @param crosswordElement/*w ww .ja v a2 s. c o m*/ * @param dimensionSubTablePerspectives */ private void split(CrosswordElement base, boolean inDim) { TablePerspective table = base.getTablePerspective(); final EDimension type = EDimension.get(inDim); final GraphVertex baseVertex = toVertex(base); final GroupList groups = (inDim ? table.getDimensionPerspective() : table.getRecordPerspective()) .getVirtualArray().getGroupList(); assert groups.size() > 1; List<CrosswordElement> children = toElements(base, inDim, baseVertex, table); // combine the elements that should be ignored final ImmutableSet<CrosswordElement> ignore = ImmutableSet.<CrosswordElement>builder().addAll(children) .add(base).build(); int total = inDim ? table.getNrDimensions() : table.getNrRecords(); List<GraphVertex> vertices = new ArrayList<>(ignore.size()); for (int i = 0; i < children.size(); ++i) { CrosswordElement child = children.get(i); Group group = groups.get(i); GraphVertex vertex = addImpl(child); vertices.add(vertex); createBands(vertex, ignore); int startIndex = group.getStartIndex(); float offset = startIndex / (float) total; addEdge(baseVertex, vertex, PARENT_CHILD, connect(type, ConnectorStrategies.createParent(offset)), connect(type)); // add parent edge for (int j = 0; j < i; ++j) { GraphVertex child2 = vertices.get(j); addEdge(vertex, child2, SIBLING, connect(type.opposite()), connect(type.opposite())); // add sibling // edge } } // update metadata flags TablePerspectiveMetaData metaData = base.getMetaData(); (inDim ? metaData.getDimension() : metaData.getRecord()).setSplitted(); }
From source file:com.facebook.buck.core.build.engine.manifest.Manifest.java
/** Adds a new output file to the manifest. */ public void addEntry(FileHashCache fileHashCache, RuleKey key, SourcePathResolver resolver, ImmutableSet<SourcePath> universe, ImmutableSet<SourcePath> inputs) throws IOException { // Construct the input sub-paths that we care about. ImmutableSet<String> inputPaths = RichStream.from(inputs).map(sourcePathToManifestHeaderFunction(resolver)) .toImmutableSet();/*from ww w. j a v a 2 s . co m*/ // Create a multimap from paths we care about to SourcePaths that maps to them. ImmutableListMultimap<String, SourcePath> sortedUniverse = index(universe, sourcePathToManifestHeaderFunction(resolver), inputPaths::contains); // Record the Entry. int index = 0; int[] hashIndices = new int[inputs.size()]; for (String relativePath : inputPaths) { ImmutableList<SourcePath> paths = sortedUniverse.get(relativePath); Preconditions.checkState(!paths.isEmpty()); hashIndices[index++] = addHash(relativePath, hashSourcePathGroup(fileHashCache, resolver, paths)); } entries.add(new Pair<>(key, hashIndices)); }