Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:com.opengamma.engine.fudgemsg.CompiledViewCalculationConfigurationFudgeBuilder.java

@SuppressWarnings("unchecked")
protected Map<ValueSpecification, Set<ValueRequirement>> decodeTerminalOutputSpecifications(
        final FudgeDeserializer deserializer, final FudgeMsg msg) {
    final FudgeMsg submsg = msg.getMessage(TERMINAL_OUTPUT_SPECIFICATIONS_FIELD);
    if (submsg == null) {
        return Collections.emptyMap();
    }//from  w  w  w  .  jav a 2  s. co  m
    final Map<ValueSpecification, Set<ValueRequirement>> result = Maps
            .newHashMapWithExpectedSize(submsg.getNumFields() / 2);
    LinkedList<Object> overflow = null;
    ValueSpecification key = null;
    Set<ValueRequirement> value = null;
    for (final FudgeField field : submsg) {
        if (MAP_KEY.equals(field.getOrdinal())) {
            final ValueSpecification fieldValue = deserializer.fieldValueToObject(ValueSpecification.class,
                    field);
            if (key == null) {
                if (value == null) {
                    key = fieldValue;
                } else {
                    result.put(fieldValue, value);
                    if (overflow != null) {
                        value = overflow.isEmpty() ? null : (Set<ValueRequirement>) overflow.removeFirst();
                    } else {
                        value = null;
                    }
                }
            } else {
                if (overflow == null) {
                    overflow = new LinkedList<Object>();
                }
                overflow.add(fieldValue);
            }
        } else if (MAP_VALUE.equals(field.getOrdinal())) {
            final FudgeMsg submsg2 = (FudgeMsg) field.getValue();
            final Set<ValueRequirement> fieldValue = Sets.newHashSetWithExpectedSize(submsg2.getNumFields());
            for (final FudgeField field2 : submsg2) {
                fieldValue.add(deserializer.fieldValueToObject(ValueRequirement.class, field2));
            }
            if (value == null) {
                if (key == null) {
                    value = fieldValue;
                } else {
                    result.put(key, fieldValue);
                    if (overflow != null) {
                        key = overflow.isEmpty() ? null : (ValueSpecification) overflow.removeFirst();
                    } else {
                        key = null;
                    }
                }
            } else {
                if (overflow == null) {
                    overflow = new LinkedList<Object>();
                }
                overflow.add(fieldValue);
            }
        }
    }
    return result;
}

From source file:org.n52.svalbard.decode.DeleteObservationV20Decoder.java

private Set<TemporalFilter> parseTemporalFilters(final DeleteObservationType.TemporalFilter[] temporalFilters)
        throws DecodingException {
    final Set<TemporalFilter> sosTemporalFilters = Sets.newHashSetWithExpectedSize(temporalFilters.length);
    for (final DeleteObservationType.TemporalFilter temporalFilter : temporalFilters) {
        final Object filter = decodeXmlElement(temporalFilter.getTemporalOps());
        if (filter instanceof TemporalFilter) {
            sosTemporalFilters.add((TemporalFilter) filter);
        }//from www  . ja v a 2 s . c o  m
    }
    return sosTemporalFilters;
}

From source file:org.apache.cassandra.io.sstable.SSTable.java

public static Set<Component> discoverComponentsFor(Descriptor desc) {
    Set<Component.Type> knownTypes = Sets.difference(Component.TYPES,
            Collections.singleton(Component.Type.CUSTOM));
    Set<Component> components = Sets.newHashSetWithExpectedSize(knownTypes.size());
    for (Component.Type componentType : knownTypes) {
        Component component = new Component(componentType);
        if (new File(desc.filenameFor(component)).exists())
            components.add(component);//from ww  w. j  ava 2 s. co m
    }
    return components;
}

From source file:org.gradle.internal.locking.DependencyLockingArtifactVisitor.java

private static Set<UnresolvedDependency> createLockingFailures(
        Map<ModuleIdentifier, ModuleComponentIdentifier> modulesToBeLocked,
        Set<ModuleComponentIdentifier> extraModules) {
    Set<UnresolvedDependency> completedFailures = Sets
            .newHashSetWithExpectedSize(modulesToBeLocked.values().size() + extraModules.size());
    for (ModuleComponentIdentifier presentInLock : modulesToBeLocked.values()) {
        completedFailures.add(new DefaultUnresolvedDependency(
                DefaultModuleVersionSelector.newSelector(presentInLock.getModuleIdentifier(),
                        presentInLock.getVersion()),
                new LockOutOfDateException("Did not resolve '" + presentInLock.getDisplayName()
                        + "' which is part of the dependency lock state")));
    }/*from w ww  . jav  a  2 s.c o m*/
    for (ModuleComponentIdentifier extraModule : extraModules) {
        completedFailures.add(new DefaultUnresolvedDependency(
                DefaultModuleVersionSelector.newSelector(extraModule.getModuleIdentifier(),
                        extraModule.getVersion()),
                new LockOutOfDateException("Resolved '" + extraModule.getDisplayName()
                        + "' which is not part of the dependency lock state")));
    }
    return completedFailures;
}

From source file:com.opengamma.financial.analytics.model.equity.option.EquityOptionBlackVegaMatrixFunction.java

@Override
public Set<ValueSpecification> getResults(final FunctionCompilationContext context,
        final ComputationTarget target, final Map<ValueSpecification, ValueRequirement> inputs) {
    final Set<ValueSpecification> results = super.getResults(context, target, inputs);
    final FinancialSecurity security = (FinancialSecurity) target.getSecurity();
    final String bbgTicker = EquitySecurityUtils.getIndexOrEquityNameFromUnderlying(security);
    final Set<ValueSpecification> resultsWithExtraProperties = Sets.newHashSetWithExpectedSize(results.size());
    for (final ValueSpecification spec : results) {
        final String name = spec.getValueName();
        final ComputationTargetSpecification targetSpec = spec.getTargetSpecification();
        final ValueProperties properties = spec.getProperties().copy()
                .with(InstrumentTypeProperties.PROPERTY_SURFACE_INSTRUMENT_TYPE,
                        InstrumentTypeProperties.EQUITY_OPTION)
                .with(ValuePropertyNames.UNDERLYING_TICKER, bbgTicker).get();
        resultsWithExtraProperties.add(new ValueSpecification(name, targetSpec, properties));
    }//from ww w.j  a v  a2 s  .  c  om
    return results;
}

From source file:com.google.gerrit.server.query.change.InternalChangeQuery.java

private Iterable<ChangeData> byCommitsOnBranchNotMergedFromDatabase(Repository repo, final ReviewDb db,
        final Branch.NameKey branch, Collection<String> hashes) throws OrmException, IOException {
    Set<Change.Id> changeIds = Sets.newHashSetWithExpectedSize(hashes.size());
    String lastPrefix = null;//from   ww w . java 2s .  c  om
    for (Ref ref : repo.getRefDatabase().getRefs(RefNames.REFS_CHANGES).values()) {
        String r = ref.getName();
        if ((lastPrefix != null && r.startsWith(lastPrefix)) || !hashes.contains(ref.getObjectId().name())) {
            continue;
        }
        Change.Id id = Change.Id.fromRef(r);
        if (id == null) {
            continue;
        }
        if (changeIds.add(id)) {
            lastPrefix = r.substring(0, r.lastIndexOf('/'));
        }
    }

    List<ChangeNotes> notes = notesFactory.create(db, branch.getParentKey(), changeIds, cn -> {
        Change c = cn.getChange();
        return c.getDest().equals(branch) && c.getStatus() != Change.Status.MERGED;
    });
    return Lists.transform(notes, n -> changeDataFactory.create(db, n));
}

From source file:guru.qas.martini.DefaultMixologist.java

private Step getGherkinStep(Background background, ScenarioDefinition definition, PickleStep step) {
    List<Step> backgroundSteps = null == background ? ImmutableList.of() : background.getSteps();
    List<Step> definitionSteps = definition.getSteps();
    Iterable<Step> steps = Iterables.concat(backgroundSteps, definitionSteps);
    List<PickleLocation> locations = step.getLocations();
    Set<Integer> lines = Sets.newHashSetWithExpectedSize(locations.size());
    for (PickleLocation location : locations) {
        int line = location.getLine();
        lines.add(line);/*from  w w w. j  a  v a  2  s .c o  m*/
    }

    Step gherkinStep = null;
    for (Iterator<Step> i = steps.iterator(); gherkinStep == null && i.hasNext();) {
        Step candidate = i.next();
        Location location = candidate.getLocation();
        int line = location.getLine();
        gherkinStep = lines.contains(line) ? candidate : null;
    }

    checkState(null != gherkinStep, "unable to locate Step %s in ScenarioDefinition %s", step, definition);
    return gherkinStep;
}

From source file:org.apache.phoenix.execute.PhoenixTxnIndexMutationGenerator.java

public List<Mutation> getIndexUpdates(final PTable table, PTable index, List<Mutation> dataMutations)
        throws IOException, SQLException {

    if (dataMutations.isEmpty()) {
        return new ArrayList<Mutation>();
    }//from   w  w  w  .  j  a v a  2  s. c o m

    Map<String, byte[]> updateAttributes = dataMutations.get(0).getAttributesMap();
    boolean replyWrite = (BaseScannerRegionObserver.ReplayWrite
            .fromBytes(updateAttributes.get(BaseScannerRegionObserver.REPLAY_WRITES)) != null);
    byte[] txRollbackAttribute = updateAttributes.get(PhoenixTransactionContext.TX_ROLLBACK_ATTRIBUTE_KEY);

    IndexMaintainer maintainer = index.getIndexMaintainer(table, connection);

    boolean isRollback = txRollbackAttribute != null;
    boolean isImmutable = index.isImmutableRows();
    ResultScanner currentScanner = null;
    HTableInterface txTable = null;
    // Collect up all mutations in batch
    Map<ImmutableBytesPtr, MultiMutation> mutations = new HashMap<ImmutableBytesPtr, MultiMutation>();
    Map<ImmutableBytesPtr, MultiMutation> findPriorValueMutations;
    if (isImmutable && !isRollback) {
        findPriorValueMutations = new HashMap<ImmutableBytesPtr, MultiMutation>();
    } else {
        findPriorValueMutations = mutations;
    }
    // Collect the set of mutable ColumnReferences so that we can first
    // run a scan to get the current state. We'll need this to delete
    // the existing index rows.
    int estimatedSize = 10;
    Set<ColumnReference> mutableColumns = Sets.newHashSetWithExpectedSize(estimatedSize);
    // For transactional tables, we use an index maintainer
    // to aid in rollback if there's a KeyValue column in the index. The alternative would be
    // to hold on to all uncommitted index row keys (even ones already sent to HBase) on the
    // client side.
    Set<ColumnReference> allColumns = maintainer.getAllColumns();
    mutableColumns.addAll(allColumns);

    for (final Mutation m : dataMutations) {
        // add the mutation to the batch set
        ImmutableBytesPtr row = new ImmutableBytesPtr(m.getRow());
        // if we have no non PK columns, no need to find the prior values

        boolean requiresPriorRowState = !isImmutable
                || (maintainer.isRowDeleted(m) && !maintainer.getIndexedColumns().isEmpty());
        if (mutations != findPriorValueMutations && requiresPriorRowState) {
            addMutation(findPriorValueMutations, row, m);
        }
        addMutation(mutations, row, m);
    }

    List<Mutation> indexUpdates = new ArrayList<Mutation>(mutations.size() * 2);
    try {
        // Track if we have row keys with Delete mutations (or Puts that are
        // Tephra's Delete marker). If there are none, we don't need to do the scan for
        // prior versions, if there are, we do. Since rollbacks always have delete mutations,
        // this logic will work there too.
        if (!findPriorValueMutations.isEmpty()) {
            List<KeyRange> keys = Lists.newArrayListWithExpectedSize(mutations.size());
            for (ImmutableBytesPtr ptr : findPriorValueMutations.keySet()) {
                keys.add(PVarbinary.INSTANCE.getKeyRange(ptr.copyBytesIfNecessary()));
            }
            Scan scan = new Scan();
            // Project all mutable columns
            for (ColumnReference ref : mutableColumns) {
                scan.addColumn(ref.getFamily(), ref.getQualifier());
            }
            /*
             * Indexes inherit the storage scheme of the data table which means all the indexes have the same
             * storage scheme and empty key value qualifier. Note that this assumption would be broken if we start
             * supporting new indexes over existing data tables to have a different storage scheme than the data
             * table.
             */
            byte[] emptyKeyValueQualifier = maintainer.getEmptyKeyValueQualifier();

            // Project empty key value column
            scan.addColumn(maintainer.getDataEmptyKeyValueCF(), emptyKeyValueQualifier);
            ScanRanges scanRanges = ScanRanges.create(SchemaUtil.VAR_BINARY_SCHEMA,
                    Collections.singletonList(keys), ScanUtil.SINGLE_COLUMN_SLOT_SPAN,
                    KeyRange.EVERYTHING_RANGE, null, true, -1);
            scanRanges.initializeScan(scan);
            txTable = connection.getQueryServices().getTable(table.getPhysicalName().getBytes());
            // For rollback, we need to see all versions, including
            // the last committed version as there may be multiple
            // checkpointed versions.
            SkipScanFilter filter = scanRanges.getSkipScanFilter();
            if (isRollback) {
                filter = new SkipScanFilter(filter, true);
                phoenixTransactionContext.setVisibilityLevel(PhoenixVisibilityLevel.SNAPSHOT_ALL);
            }
            scan.setFilter(filter);
            currentScanner = txTable.getScanner(scan);
        }
        if (isRollback) {
            processRollback(maintainer, txRollbackAttribute, currentScanner, mutableColumns, indexUpdates,
                    mutations, replyWrite, table);
        } else {
            processMutation(maintainer, txRollbackAttribute, currentScanner, mutableColumns, indexUpdates,
                    mutations, findPriorValueMutations, replyWrite, table);
        }
    } finally {
        if (txTable != null)
            txTable.close();
    }

    return indexUpdates;
}

From source file:com.android.builder.internal.compiler.DependencyGraph.java

/**
 * Parses the given dependency file and stores the file paths
 *
 * @param dependencyFile the dependency file
 *//*  w  w  w.j  a  v a2s  .  com*/
private void parseDependencyFile(File dependencyFile) {
    // first check if the dependency file is here.
    if (dependencyFile.isFile() == false) {
        mMissingDepFile = true;
        return;
    }

    // get the modification time of the dep file as we may need it later
    mDepFileLastModified = dependencyFile.lastModified();

    // Read in our dependency file
    List<String> content = readFile(dependencyFile);
    if (content == null) {
        mLogger.error(null, "ERROR: Couldn't read " + dependencyFile.getAbsolutePath());
        return;
    }

    // The format is something like:
    // output1 output2 [...]: dep1 dep2 [...]
    // move it back to a single line first
    StringBuilder sb = new StringBuilder();
    for (String line : content) {
        line = line.trim();
        if (line.endsWith("\\")) {
            line = line.substring(0, line.length() - 1);
        }
        sb.append(line);
    }

    // split the left and right part
    String[] files = sb.toString().split(":");

    // get the target files:
    String[] targets = files[0].trim().split(" ");

    String[] prereqs = {};
    // Check to make sure our dependency file is okay
    if (files.length < 1) {
        mLogger.warning("Warning! Dependency file does not list any prerequisites after ':' ");
    } else {
        // and the prerequisite files:
        prereqs = files[1].trim().split(" ");
    }

    mTargets = Sets.newHashSetWithExpectedSize(targets.length);
    for (String path : targets) {
        if (path.length() > 0) {
            mTargets.add(new File(path));
        }
    }

    mPrereqs = Sets.newHashSetWithExpectedSize(prereqs.length);
    for (String path : prereqs) {
        if (path.length() > 0) {
            if (DEBUG) {
                mLogger.info("PREREQ: " + path);
            }
            File f = new File(path);
            if (mFirstPrereq == null) {
                mFirstPrereq = f;
            }
            mPrereqs.add(f);
        }
    }
}

From source file:com.opengamma.financial.analytics.model.equity.futures.EquityFutureYieldCurveNodeSensitivityFunction.java

@Override
public Set<ValueRequirement> getRequirements(FunctionCompilationContext context, ComputationTarget target,
        ValueRequirement desiredValue) {
    final Trade trade = target.getTrade();
    final EquityFutureSecurity security = (EquityFutureSecurity) trade.getSecurity();
    final Set<ValueRequirement> requirements = Sets.newHashSetWithExpectedSize(6);
    requirements.add(getSpotAssetRequirement(security));
    requirements.add(getDiscountCurveRequirement(security));
    requirements.add(getMarketPriceRequirement(security));
    requirements.add(getCurveSpecRequirement(security.getCurrency()));
    ValueRequirement requirement = getMarketValueRequirement(context, security);
    if (requirement == null) {
        return null;
    }/*  w w  w .j a  va2  s  . co  m*/
    requirements.add(requirement);
    requirement = getDividendYieldRequirement(context, security);
    if (requirement == null) {
        return null;
    }
    requirements.add(requirement);
    return requirements;
}