Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:com.android.build.gradle.internal.dsl.NdkConfigDsl.java

@NonNull
public NdkConfigDsl setLdLibs(Collection<String> libs) {
    if (libs != null) {
        if (abiFilters == null) {
            abiFilters = Sets.newHashSetWithExpectedSize(libs.size());
        } else {/*from  w  ww .  j  av a2s.co  m*/
            abiFilters.clear();
        }
        for (String filter : libs) {
            abiFilters.add(filter);
        }
    } else {
        abiFilters = null;
    }
    return this;
}

From source file:com.google.auto.factory.processor.Parameter.java

static ImmutableSet<Parameter> forParameterList(List<? extends VariableElement> variables,
        List<? extends TypeMirror> variableTypes) {
    checkArgument(variables.size() == variableTypes.size());
    ImmutableSet.Builder<Parameter> builder = ImmutableSet.builder();
    Set<String> names = Sets.newHashSetWithExpectedSize(variables.size());
    for (int i = 0; i < variables.size(); i++) {
        Parameter parameter = forVariableElement(variables.get(i), variableTypes.get(i));
        checkArgument(names.add(parameter.name));
        builder.add(parameter);//w  w  w.  j  a v a  2s.co  m
    }
    ImmutableSet<Parameter> parameters = builder.build();
    checkArgument(variables.size() == parameters.size());
    return parameters;
}

From source file:org.graylog.plugins.usagestatistics.collectors.ElasticsearchCollector.java

public Set<ElasticsearchNodeInfo> getNodeInfos() {
    final Map<String, NodeInfo> nodeInfos = fetchNodeInfos();
    final Map<String, NodeStats> nodeStats = fetchNodeStats();

    final Set<ElasticsearchNodeInfo> elasticsearchNodeInfos = Sets.newHashSetWithExpectedSize(nodeInfos.size());
    for (String node : nodeInfos.keySet()) {
        final NodeInfo info = nodeInfos.get(node);
        final NodeStats stats = nodeStats.get(node);

        if (info == null || stats == null) {
            LOG.warn("Couldn't retrieve all required information from Elasticsearch node {}, skipping.", node);
            continue;
        }/*from ww  w  .  j av  a  2 s  . c  om*/

        // TODO remove these as soon as the backend service treats HostInfo as optional
        // the host info details aren't available in Elasticsearch 2.x anymore, but we still report the empty
        // bean because the backend service still expects some data (even if it is empty)
        final MacAddress macAddress = MacAddress.EMPTY;
        final HostInfo.Cpu cpu = null;
        final HostInfo.Memory memory = null;
        final HostInfo.Memory swap = null;
        final HostInfo hostInfo = HostInfo.create(macAddress, cpu, memory, swap);

        final List<String> garbageCollectors;
        if (stats.getJvm() != null) {
            garbageCollectors = Lists.newArrayList();
            for (JvmStats.GarbageCollector gc : stats.getJvm().getGc()) {
                garbageCollectors.add(gc.getName());
            }
        } else {
            garbageCollectors = Collections.emptyList();
        }

        final JvmInfo jvmInfo;
        if (info.getJvm() != null) {
            final JvmInfo.Memory jvmMemory = JvmInfo.Memory.create(info.getJvm().getMem().getHeapInit().bytes(),
                    info.getJvm().getMem().getHeapMax().bytes(),
                    info.getJvm().getMem().getNonHeapInit().bytes(),
                    info.getJvm().getMem().getNonHeapMax().bytes(),
                    info.getJvm().getMem().getDirectMemoryMax().bytes());
            final JvmInfo.Os jvmOs = JvmInfo.Os.create(info.getJvm().getSystemProperties().get("os.name"),
                    info.getJvm().getSystemProperties().get("os.version"),
                    info.getJvm().getSystemProperties().get("os.arch"));
            jvmInfo = JvmInfo.create(info.getJvm().version(), info.getJvm().getVmName(),
                    info.getJvm().getVmVersion(), info.getJvm().getVmVendor(), jvmOs, jvmMemory,
                    garbageCollectors);
        } else {
            jvmInfo = null;
        }

        final ElasticsearchNodeInfo elasticsearchNodeInfo = ElasticsearchNodeInfo
                .create(info.getVersion().toString(), hostInfo, jvmInfo);

        elasticsearchNodeInfos.add(elasticsearchNodeInfo);
    }

    return elasticsearchNodeInfos;
}

From source file:com.edmunds.etm.runtime.impl.ApplicationRepository.java

public synchronized Set<Application> getActiveApplications() {

    Set<Application> applications = Sets.newHashSetWithExpectedSize(seriesByName.size());

    for (ApplicationSeries series : seriesByName.values()) {
        final Application activeVersion = series.getActiveVersion();
        if (activeVersion != null) {
            applications.add(activeVersion);
        }/* w ww  .  jav  a 2s. c  o  m*/
    }
    return applications;
}

From source file:com.android.build.gradle.internal.dependency.VariantDependencies.java

public static VariantDependencies compute(@NonNull Project project, @NonNull ErrorReporter errorReporter,
        @NonNull String variantName, boolean publishVariant, @NonNull VariantType variantType,
        @Nullable VariantType testedVariantType, @Nullable VariantDependencies parentVariant,
        @NonNull ConfigurationProvider... providers) {
    Set<Configuration> compileConfigs = Sets.newHashSetWithExpectedSize(providers.length * 2);
    Set<Configuration> apkConfigs = Sets.newHashSetWithExpectedSize(providers.length);
    Set<Configuration> annotationConfigs = Sets.newHashSetWithExpectedSize(providers.length);

    for (ConfigurationProvider provider : providers) {
        if (provider != null) {
            compileConfigs.add(provider.getCompileConfiguration());
            if (provider.getProvidedConfiguration() != null) {
                compileConfigs.add(provider.getProvidedConfiguration());
            }//from w  ww .j a v  a  2  s. c  om

            apkConfigs.add(provider.getCompileConfiguration());
            apkConfigs.add(provider.getPackageConfiguration());
            annotationConfigs.add(provider.getAnnotationProcessorConfiguration());
        }
    }

    if (parentVariant != null) {
        compileConfigs.add(parentVariant.getCompileConfiguration());
        apkConfigs.add(parentVariant.getPackageConfiguration());
        annotationConfigs.add(parentVariant.getAnnotationProcessorConfiguration());
    }

    Configuration compile = project.getConfigurations().maybeCreate("_" + variantName + "Compile");
    compile.setVisible(false);
    compile.setDescription("## Internal use, do not manually configure ##");
    compile.setExtendsFrom(compileConfigs);

    Configuration annotationProcessor = project.getConfigurations()
            .maybeCreate("_" + variantName + "AnnotationProcessor");
    annotationProcessor.setVisible(false);
    annotationProcessor.setDescription("## Internal use, do not manually configure ##");
    annotationProcessor.setExtendsFrom(annotationConfigs);

    Configuration apk = project.getConfigurations().maybeCreate(
            variantType == VariantType.LIBRARY ? "_" + variantName + "Publish" : "_" + variantName + "Apk");

    apk.setVisible(false);
    apk.setDescription("## Internal use, do not manually configure ##");
    apk.setExtendsFrom(apkConfigs);

    Configuration publish = null;
    Configuration mapping = null;
    Configuration classes = null;
    Configuration metadata = null;
    Configuration manifest = null;
    if (publishVariant) {
        publish = project.getConfigurations().maybeCreate(variantName);
        publish.setDescription("Published Configuration for Variant " + variantName);
        // if the variant is not a library, then the publishing configuration should
        // not extend from the apkConfigs. It's mostly there to access the artifact from
        // another project but it shouldn't bring any dependencies with it.
        if (variantType == VariantType.LIBRARY) {
            publish.setExtendsFrom(apkConfigs);
        }

        // create configuration for -metadata.
        metadata = project.getConfigurations().create(variantName + CONFIGURATION_METADATA);
        metadata.setDescription("Published APKs metadata for Variant " + variantName);

        // create configuration for -mapping and -classes.
        mapping = project.getConfigurations().maybeCreate(variantName + CONFIGURATION_MAPPING);
        mapping.setDescription("Published mapping configuration for Variant " + variantName);

        classes = project.getConfigurations().maybeCreate(variantName + CONFIGURATION_CLASSES);
        classes.setDescription("Published classes configuration for Variant " + variantName);

        // create configuration for -manifest
        manifest = project.getConfigurations().maybeCreate(variantName + CONFIGURATION_MANIFEST);
        manifest.setDescription("Published manifest configuration for Variant " + variantName);

        // because we need the transitive dependencies for the classes, extend the compile config.
        classes.setExtendsFrom(compileConfigs);
    }

    DependencyChecker checker = new DependencyChecker(project.getName(), variantName, errorReporter,
            variantType, testedVariantType);

    return new VariantDependencies(variantName, checker, compile, apk, publish, annotationProcessor, mapping,
            classes, metadata, manifest);
}

From source file:com.nesscomputing.concurrent.NessExecutors.java

/**
 * Invoke all of the given callables.  If they all succeed, returns a list of the futures.  All will be
 * {@link Future#isDone()}.  If any fails, returns the list of Futures that succeeded before the failure, and
 * the final future that caused the computation to fail.  The remaining futures will be cancelled.
 * If the calling thread is interrupted, it will make a best-effort attempt to cancel running tasks.
 *//*from w w w.j  av a 2  s .c o  m*/
public static <T> List<Future<T>> invokeAllExplosively(ExecutorService service,
        Collection<? extends Callable<T>> tasks) throws InterruptedException {
    final ExecutorCompletionService<T> completionService = new ExecutorCompletionService<>(service);
    final ImmutableList.Builder<Future<T>> results = ImmutableList.builder();
    final Set<Future<T>> inFlight = Sets.newHashSetWithExpectedSize(tasks.size());

    boolean interrupted = false;

    for (Callable<T> task : tasks) {
        inFlight.add(completionService.submit(task));
    }

    while (!inFlight.isEmpty()) {
        final Future<T> future;
        try {
            future = completionService.take();
        } catch (InterruptedException e) {
            interrupted = true;
            break;
        }

        inFlight.remove(future);
        results.add(future);

        try {
            future.get();
        } catch (InterruptedException e) {
            interrupted = true;
            break;
        } catch (ExecutionException e) {
            break;
        }
    }

    for (final Future<T> future : inFlight) {
        future.cancel(true);
    }

    if (interrupted) {
        throw new InterruptedException();
    }

    return results.build();
}

From source file:uk.ac.ebi.mdk.hsql.KEGGReactionLoader.java

@Override
public void update() throws IOException {
    ResourceFileLocation location = getLocation("KEGG Reaction");
    HSQLDBLocation connection = connection();
    try {//from   w ww.  j a va  2s.c o  m
        Hsqldb.createReactionSchema(connection.getConnection());
        DSLContext create = DSL.using(connection.getConnection(), HSQLDB);

        Set<String> compoundIds = Sets.newHashSetWithExpectedSize(10000);

        InsertValuesStep2<?, String, String> reactionInsert = create.insertInto(REACTION, REACTION.ACCESSION,
                REACTION.EC);
        InsertValuesStep1<?, String> compoundInsert = create.insertInto(COMPOUND, COMPOUND.ACCESSION);

        List<String[]> reactants = new ArrayList<String[]>(10000);
        List<String[]> products = new ArrayList<String[]>(10000);

        KEGGReactionParser parser = new KEGGReactionParser(location.open(), KEGGField.ENTRY, KEGGField.EQUATION,
                KEGGField.ENZYME);
        Map<KEGGField, StringBuilder> entry;
        while ((entry = parser.readNext()) != null) {

            if (isCancelled())
                break;

            String equation = entry.get(KEGGField.EQUATION).toString();
            String ec = entry.containsKey(KEGGField.ENZYME) ? entry.get(KEGGField.ENZYME).toString().trim()
                    : "";
            String[] sides = equation.split("<=>");

            String[][] left = getParticipants(sides[0]);
            String[][] right = getParticipants(sides[1]);

            Matcher matcher = ACCESSION.matcher(entry.get(KEGGField.ENTRY).toString());

            if (!ec.isEmpty())
                ec = ec.split("\\s+")[0].trim();

            if (matcher.find()) {
                String accession = matcher.group(1);
                reactionInsert.values(accession, ec);

                for (String[] participant : left) {
                    String cid = participant[1];
                    if (compoundIds.add(cid))
                        compoundInsert.values(cid);
                    participant = Arrays.copyOf(participant, 3);
                    participant[2] = accession;
                    reactants.add(participant);
                }
                for (String[] participant : right) {
                    String cid = participant[1];
                    if (compoundIds.add(cid))
                        compoundInsert.values(cid);
                    participant = Arrays.copyOf(participant, 3);
                    participant[2] = accession;
                    products.add(participant);
                }

            }

        }

        // do the inserts
        fireProgressUpdate("inserting reactions and compounds");
        reactionInsert.execute();
        compoundInsert.execute();

        fireProgressUpdate("inserting reaction relations");

        for (int i = 0, end = reactants.size() - 1; i <= end; i++) {

            String[] participant = reactants.get(i);
            double coef = Double.parseDouble(participant[0]);
            String cid = participant[1];
            String acc = participant[2];
            create.insertInto(REACTANT).set(REACTANT.COEFFICIENT, coef)
                    .set(REACTANT.COMPOUND_ID,
                            create.select(COMPOUND.ID).from(COMPOUND).where(COMPOUND.ACCESSION.eq(cid)))
                    .set(REACTANT.REACTION_ID,
                            create.select(REACTION.ID).from(REACTION).where(REACTION.ACCESSION.eq(acc)))
                    .execute();
        }

        for (int i = 0, end = products.size() - 1; i <= end; i++) {

            String[] participant = products.get(i);
            double coef = Double.parseDouble(participant[0]);
            String cid = participant[1];
            String acc = participant[2];
            create.insertInto(PRODUCT).set(PRODUCT.COEFFICIENT, coef)
                    .set(PRODUCT.COMPOUND_ID,
                            create.select(COMPOUND.ID).from(COMPOUND).where(COMPOUND.ACCESSION.eq(cid)))
                    .set(PRODUCT.REACTION_ID,
                            create.select(REACTION.ID).from(REACTION).where(REACTION.ACCESSION.eq(acc)))
                    .execute();
        }

    } catch (SQLException e) {
        throw new IOException(e);
    } finally {
        location.close();
        try {
            connection.commit();
        } catch (SQLException e) {
            System.err.println(e.getMessage());
        } finally {
            try {
                connection.close();
            } catch (SQLException e) {
            }
        }
    }
}

From source file:com.romeikat.datamessie.core.base.service.DownloadService.java

public List<DocumentWithDownloads> getDocumentsWithDownloads(final SharedSessionContract ssc,
        final long sourceId, final Set<String> urls) {
    final List<DocumentWithDownloads> documentsWithDownloads = Lists.newArrayListWithExpectedSize(urls.size());

    final Set<Long> processedDocumentIds = Sets.newHashSetWithExpectedSize(urls.size());
    for (final String url : urls) {
        final DocumentWithDownloads documentWithDownloads = getDocumentWithDownloads(ssc, sourceId, url);
        if (documentWithDownloads == null) {
            continue;
        }//  w ww .  ja  va2 s  . c o m

        final long documentId = documentWithDownloads.getDocumentId();
        if (processedDocumentIds.contains(documentId)) {
            continue;
        }

        processedDocumentIds.add(documentId);
        documentsWithDownloads.add(documentWithDownloads);
    }

    return documentsWithDownloads;
}

From source file:org.agatom.springatom.cmp.wizards.data.WizardDescriptor.java

private Set<WizardStepDescriptor> getContent() {
    if (this.descriptorSet == null) {
        this.descriptorSet = Sets.newHashSetWithExpectedSize(3);
    }//from www .j a v  a 2  s  .  c om
    return this.descriptorSet;
}

From source file:com.android.tools.idea.rendering.ModuleSetResourceRepository.java

private static List<ProjectResources> computeRepositories(@NotNull final AndroidFacet facet) {
    // List of module facets the given module depends on
    List<AndroidFacet> facets = AndroidUtils.getAllAndroidDependencies(facet.getModule(), true);

    // Android libraries (.aar libraries) the module, or any of the modules it depends on
    List<AndroidLibrary> libraries = Lists.newArrayList();
    addAndroidLibraries(libraries, facet);
    for (AndroidFacet f : facets) {
        addAndroidLibraries(libraries, f);
    }//w ww  .j  av  a 2s .c  om

    boolean includeLibraries = false;
    ProjectResources main = get(facet.getModule(), includeLibraries);

    if (facets.isEmpty() && libraries.isEmpty()) {
        return Collections.singletonList(main);
    }

    List<ProjectResources> resources = Lists.newArrayListWithExpectedSize(facets.size());

    if (libraries != null) {
        // Pull out the unique directories, in case multiple modules point to the same .aar folder
        Set<File> files = Sets.newHashSetWithExpectedSize(facets.size());

        Set<String> moduleNames = Sets.newHashSet();
        for (AndroidFacet f : facets) {
            moduleNames.add(f.getModule().getName());
        }
        for (AndroidLibrary library : libraries) {
            // We should only add .aar dependencies if they aren't already provided as modules.
            // For now, the way we associate them with each other is via the library name;
            // in the future the model will provide this for us

            String libraryName = null;
            String projectName = library.getProject();
            if (projectName != null && !projectName.isEmpty()) {
                libraryName = projectName.substring(projectName.lastIndexOf(':') + 1);
            } else {
                // Pre 0.5 support: remove soon
                File folder = library.getFolder();
                String name = folder.getName();
                if (name.endsWith(DOT_AAR)) {
                    libraryName = name.substring(0, name.length() - DOT_AAR.length());
                }
            }
            if (libraryName != null && !moduleNames.contains(libraryName)) {
                File resFolder = library.getResFolder();
                if (resFolder.exists()) {
                    files.add(resFolder);

                    // Don't add it again!
                    moduleNames.add(libraryName);
                }
            }
        }

        for (File resFolder : files) {
            resources.add(FileProjectResourceRepository.get(resFolder));
        }
    }

    for (AndroidFacet f : facets) {
        ProjectResources r = get(f.getModule(), includeLibraries);
        resources.add(r);
    }

    resources.add(main);

    return resources;
}