Example usage for com.google.common.collect Sets newHashSetWithExpectedSize

List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize

Introduction

In this page you can find the example usage for com.google.common.collect Sets newHashSetWithExpectedSize.

Prototype

public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize) 

Source Link

Document

Creates a HashSet instance, with a high enough initial table size that it should hold expectedSize elements without resizing.

Usage

From source file:com.opengamma.financial.analytics.model.irfutureoption.InterestRateFutureOptionBlackYieldCurveNodeSensitivitiesFunctionDeprecated.java

@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context,
        final ComputationTarget target, final ValueRequirement desiredValue) {
    final ValueProperties constraints = desiredValue.getConstraints();
    final Set<String> curveNames = constraints.getValues(ValuePropertyNames.CURVE);
    if (curveNames == null || curveNames.size() != 1) {
        s_logger.error("Did not specify a curve name for requirement {}", desiredValue);
        return null;
    }//from w  w  w  .j  ava2s.  c o  m
    final Set<String> forwardCurveNames = constraints.getValues(YieldCurveFunction.PROPERTY_FORWARD_CURVE);
    if (forwardCurveNames == null || forwardCurveNames.size() != 1) {
        return null;
    }
    final Set<String> fundingCurveNames = constraints.getValues(YieldCurveFunction.PROPERTY_FUNDING_CURVE);
    if (fundingCurveNames == null || fundingCurveNames.size() != 1) {
        return null;
    }
    final Set<String> surfaceNames = constraints.getValues(ValuePropertyNames.SURFACE);
    if (surfaceNames == null || surfaceNames.size() != 1) {
        return null;
    }
    final Set<String> curveCalculationMethods = constraints
            .getValues(ValuePropertyNames.CURVE_CALCULATION_METHOD);
    if (curveCalculationMethods == null || curveCalculationMethods.size() != 1) {
        return null;
    }
    final String forwardCurveName = forwardCurveNames.iterator().next();
    final String fundingCurveName = fundingCurveNames.iterator().next();
    final String curveName = curveNames.iterator().next();
    if (!(curveName.equals(forwardCurveName) || curveName.equals(fundingCurveName))) {
        s_logger.error(
                "Did not specify a curve to which this instrument is sensitive; asked for {}, {} and {} are allowed",
                new String[] { curveName, forwardCurveName, fundingCurveName });
        return null;
    }
    final String surfaceName = surfaceNames.iterator().next() + "_" + getFutureOptionPrefix(target);
    final String curveCalculationMethod = curveCalculationMethods.iterator().next();
    final Set<ValueRequirement> requirements = Sets.newHashSetWithExpectedSize(4);
    final Currency currency = FinancialSecurityUtils.getCurrency(target.getTrade().getSecurity());
    requirements.add(YieldCurveFunction.getCurveRequirement(currency, forwardCurveName, forwardCurveName,
            fundingCurveName, curveCalculationMethod));
    requirements.add(YieldCurveFunction.getCurveRequirement(currency, fundingCurveName, forwardCurveName,
            fundingCurveName, curveCalculationMethod));
    requirements.add(getCurveSpecRequirement(target, curveName));
    if (!curveCalculationMethod.equals(InterpolatedDataProperties.CALCULATION_METHOD_NAME)) {
        requirements.add(
                getJacobianRequirement(target, forwardCurveName, fundingCurveName, curveCalculationMethod));
        if (curveCalculationMethod.equals(MarketInstrumentImpliedYieldCurveFunction.PRESENT_VALUE_STRING)) {
            requirements.add(getCouponSensitivityRequirement(target, forwardCurveName, fundingCurveName));
        }
    }
    requirements.add(getVolatilityRequirement(surfaceName, currency));
    final Set<ValueRequirement> tsRequirements = _dataConverter.getConversionTimeSeriesRequirements(
            target.getTrade().getSecurity(), _converter.convert(target.getTrade()),
            new String[] { fundingCurveName, forwardCurveName });
    if (tsRequirements == null) {
        return null;
    }
    requirements.addAll(tsRequirements);
    return requirements;
}

From source file:com.opengamma.financial.analytics.MissingInputsFunction.java

@Override
public Set<ValueRequirement> getAdditionalRequirements(final FunctionCompilationContext context,
        final ComputationTarget target, final Set<ValueSpecification> inputs,
        final Set<ValueSpecification> outputs) {
    final Set<ValueSpecification> underlyingOutputs = Sets.newHashSetWithExpectedSize(outputs.size());
    for (final ValueSpecification output : outputs) {
        final ValueProperties properties = output.getProperties().withoutAny(ValuePropertyNames.AGGREGATION);
        underlyingOutputs.add(//  w ww.  j  a  va2 s.co  m
                new ValueSpecification(output.getValueName(), output.getTargetSpecification(), properties));
    }
    return getUnderlyingCompiled().getAdditionalRequirements(context, target, inputs, underlyingOutputs);
}

From source file:org.sosy_lab.cpachecker.util.blocking.BlockedCFAReducer.java

/**
 * Compute the nodes of the given CFA that should be abstraction-nodes.
 *///  www . j  av  a 2s .  c o  m
@Override
public ImmutableSet<CFANode> computeAbstractionNodes(final CFA pCfa) {
    assert (pCfa != null);
    assert (this.inliningStack.size() == 0);
    assert (this.functionCallSeq == 0);

    this.functionCallSeq = 0;
    ReducedFunction reducedProgram = inlineAndSummarize(pCfa.getMainFunction(), pCfa);

    if (reducedCfaFile != null) {
        Map<ReducedNode, Map<ReducedNode, Set<ReducedEdge>>> inlinedCfa = reducedProgram.getInlinedCfa();
        try (Writer w = Files.openOutputFile(reducedCfaFile)) {
            printInlinedCfa(inlinedCfa, w);
        } catch (IOException e) {
            logger.logUserException(Level.WARNING, e, "Could not write the reduced CFA to file");
        }
    }

    Set<ReducedNode> abstractionNodes = reducedProgram.getAllActiveNodes();
    Set<CFANode> result = Sets.newHashSetWithExpectedSize(abstractionNodes.size());
    for (ReducedNode n : abstractionNodes) {
        result.add(n.getWrapped());
    }

    return ImmutableSet.copyOf(result);
}

From source file:de.learnlib.datastructure.pta.pta.RedBlueMerge.java

public UniversalDeterministicAutomaton<S, Integer, ?, SP, TP> toMergedAutomaton() {
    if (!this.merged) {
        throw new IllegalStateException("#merge has not been called yet");
    }/* w w w  .  ja  v a2 s.  c o  m*/

    return new UniversalDeterministicAutomaton<S, Integer, Pair<S, Integer>, SP, TP>() {

        private Set<S> states;

        @Override
        public S getSuccessor(Pair<S, Integer> transition) {
            final S source = transition.getFirst();
            final Integer input = transition.getSecond();

            if (source.isRed() && succMod[source.id] != null) {
                return succMod[source.id].get(input);
            }

            return pta.getSuccessor(source, input);
        }

        @Override
        public SP getStateProperty(S state) {
            if (state.isRed() && propMod.get(state.id) != null) {
                return propMod.get(state.id);
            }

            return state.getStateProperty();
        }

        @Override
        public TP getTransitionProperty(Pair<S, Integer> transition) {
            final S source = transition.getFirst();
            final Integer input = transition.getSecond();

            if (source.isRed() && transPropMod[source.id] != null) {
                return transPropMod[source.id].get(input);
            }

            return source.transProperties.get(input);
        }

        @Override
        public Pair<S, Integer> getTransition(S state, Integer input) {
            return new Pair<>(state, input);
        }

        @Override
        public Collection<S> getStates() {

            if (states != null) {
                return states;
            }

            states = Sets.newHashSetWithExpectedSize(pta.size());
            final Queue<S> discoverQueue = new ArrayDeque<>();

            discoverQueue.add(getInitialState());

            S iter;

            while ((iter = discoverQueue.poll()) != null) {
                states.add(iter);

                for (int i = 0; i < alphabetSize; i++) {
                    final S succ = getSuccessor(iter, i);

                    if (succ != null && !states.contains(succ)) {
                        discoverQueue.add(succ);
                    }
                }
            }

            return states;
        }

        @Override
        public S getInitialState() {
            return pta.getInitialState();
        }
    };
}

From source file:org.summer.dsl.xbase.typesystem.util.AbstractTypeReferencePairWalker.java

protected AbstractTypeReferencePairWalker(ITypeReferenceOwner owner) {
    this.owner = owner;
    this.recursionGuard = Sets.newHashSetWithExpectedSize(3);
    parameterizedTypeReferenceTraverser = createParameterizedTypeReferenceTraverser();
    wildcardTypeReferenceTraverser = createWildcardTypeReferenceTraverser();
    arrayTypeReferenceTraverser = createArrayTypeReferenceTraverser();
    compoundTypeReferenceTraverser = createCompoundTypeReferenceTraverser();
    unboundTypeReferenceTraverser = createUnboundTypeReferenceTraverser();
}

From source file:org.apache.jackrabbit.oak.plugins.document.RevisionVector.java

private static void checkUniqueClusterIds(Revision[] revisions) throws IllegalArgumentException {
    if (revisions.length < 2) {
        return;/*from  ww w  . ja va 2 s.  c o m*/
    }
    Set<Integer> known = Sets.newHashSetWithExpectedSize(revisions.length);
    for (Revision revision : revisions) {
        if (!known.add(revision.getClusterId())) {
            throw new IllegalArgumentException("Multiple revisions with clusterId " + revision.getClusterId());
        }
    }
}

From source file:com.opengamma.financial.analytics.model.fixedincome.InterestRateInstrumentCurveSpecificFunction.java

@Override
public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context,
        final ComputationTarget target, final ValueRequirement desiredValue) {
    final ValueProperties constraints = desiredValue.getConstraints();
    Set<String> requestedCurveNames = constraints.getValues(ValuePropertyNames.CURVE);
    final boolean permissive = OpenGammaCompilationContext.isPermissive(context);
    if (!permissive && ((requestedCurveNames == null) || requestedCurveNames.isEmpty())) {
        s_logger.debug("Must specify a curve name");
        return null;
    }//www.ja  v  a  2 s .  c o  m
    final Set<String> curveCalculationConfigNames = constraints
            .getValues(ValuePropertyNames.CURVE_CALCULATION_CONFIG);
    if (curveCalculationConfigNames == null || curveCalculationConfigNames.size() != 1) {
        s_logger.debug("Must specify a curve calculation config");
        return null;
    }
    final String curveCalculationConfigName = curveCalculationConfigNames.iterator().next();
    final ConfigSource configSource = OpenGammaCompilationContext.getConfigSource(context);
    final ConfigDBCurveCalculationConfigSource curveCalculationConfigSource = new ConfigDBCurveCalculationConfigSource(
            configSource);
    final MultiCurveCalculationConfig curveCalculationConfig = curveCalculationConfigSource
            .getConfig(curveCalculationConfigName);
    if (curveCalculationConfig == null) {
        s_logger.debug("Could not find curve calculation configuration named " + curveCalculationConfigName);
        return null;
    }
    final FinancialSecurity security = (FinancialSecurity) target.getSecurity();
    final Currency currency = FinancialSecurityUtils.getCurrency(security);
    if (!ComputationTargetSpecification.of(currency).equals(curveCalculationConfig.getTarget())) {
        s_logger.error("Security currency and curve calculation config id were not equal; have {} and {}",
                currency, curveCalculationConfig.getTarget());
        return null;
    }
    final String[] availableCurveNames = curveCalculationConfig.getYieldCurveNames();
    if ((requestedCurveNames == null) || requestedCurveNames.isEmpty()) {
        requestedCurveNames = Sets.newHashSet(availableCurveNames);
    } else {
        final Set<String> intersection = YieldCurveFunctionUtils.intersection(requestedCurveNames,
                availableCurveNames);
        if (intersection.isEmpty()) {
            s_logger.debug(
                    "None of the requested curves {} are available in curve calculation configuration called {}",
                    requestedCurveNames, curveCalculationConfigName);
            return null;
        }
        requestedCurveNames = intersection;
    }
    final String[] applicableCurveNames = FixedIncomeInstrumentCurveExposureHelper
            .getCurveNamesForSecurity(security, availableCurveNames);
    final Set<String> curveNames = YieldCurveFunctionUtils.intersection(requestedCurveNames,
            applicableCurveNames);
    if (curveNames.isEmpty()) {
        s_logger.debug("{} {} security is not sensitive to the curves {}",
                new Object[] { currency, security.getClass(), curveNames });
        return null;
    }
    if (!permissive && (curveNames.size() != 1)) {
        s_logger.debug("Must specify single curve name constraint, got {}", curveNames);
        return null;
    }
    final String curve = curveNames.iterator().next();
    final Set<ValueRequirement> curveRequirements = YieldCurveFunctionUtils
            .getCurveRequirements(curveCalculationConfig, curveCalculationConfigSource);
    final Set<ValueRequirement> requirements = Sets.newHashSetWithExpectedSize(curveRequirements.size());
    for (final ValueRequirement curveRequirement : curveRequirements) {
        final ValueProperties.Builder properties = curveRequirement.getConstraints().copy();
        properties.with(PROPERTY_REQUESTED_CURVE, curve).withOptional(PROPERTY_REQUESTED_CURVE);
        requirements.add(new ValueRequirement(curveRequirement.getValueName(),
                curveRequirement.getTargetReference(), properties.get()));
    }
    try {
        final Set<ValueRequirement> timeSeriesRequirements = InterestRateInstrumentFunction
                .getDerivativeTimeSeriesRequirements(security, security.accept(_visitor), _definitionConverter);
        if (timeSeriesRequirements == null) {
            return null;
        }
        requirements.addAll(timeSeriesRequirements);
        return requirements;
    } catch (final Exception e) {
        s_logger.error(e.getMessage());
        return null;
    }
}

From source file:org.moe.designer.rendering.LayoutlibCallback.java

/**
 * Searches for cycles in the {@code <include>} tag graph of the layout files we've
 * been asked to provide parsers for//from   w ww . j av  a 2  s  .c om
 */
private boolean findCycles() {
    Map<File, String> fileToLayout = Maps.newHashMap();
    Map<String, File> layoutToFile = Maps.newHashMap();
    Multimap<String, String> includeMap = ArrayListMultimap.create();
    for (File file : myParserFiles) {
        String layoutName = LintUtils.getLayoutName(file);
        layoutToFile.put(layoutName, file);
        fileToLayout.put(file, layoutName);
        try {
            String xml = Files.toString(file, Charsets.UTF_8);
            Document document = XmlUtils.parseDocumentSilently(xml, true);
            if (document != null) {
                NodeList includeNodeList = document.getElementsByTagName(VIEW_INCLUDE);
                for (int i = 0, n = includeNodeList.getLength(); i < n; i++) {
                    Element include = (Element) includeNodeList.item(i);
                    String included = include.getAttribute(ATTR_LAYOUT);
                    if (included.startsWith(LAYOUT_RESOURCE_PREFIX)) {
                        String resource = included.substring(LAYOUT_RESOURCE_PREFIX.length());
                        includeMap.put(layoutName, resource);
                    }
                }
            }
        } catch (IOException e) {
            LOG.warn("Could not check file " + file + " for cyclic dependencies", e);
        }
    }

    // We now have a DAG over the include dependencies in the layouts
    // Do a DFS to detect cycles

    // Perform DFS on the include graph and look for a cycle; if we find one, produce
    // a chain of includes on the way back to show to the user
    if (includeMap.size() > 0) {
        for (String from : includeMap.keySet()) {
            Set<String> visiting = Sets.newHashSetWithExpectedSize(includeMap.size());
            List<String> chain = dfs(from, visiting, includeMap);
            if (chain != null) {
                if (myLogger != null) {
                    RenderProblem.Html problem = RenderProblem.create(WARNING);
                    HtmlBuilder builder = problem.getHtmlBuilder();
                    builder.add("Found cyclical <include> chain: ");
                    boolean first = true;
                    Collections.reverse(chain);
                    for (String layout : chain) {
                        if (first) {
                            first = false;
                        } else {
                            builder.add(" includes ");
                        }
                        File file = layoutToFile.get(layout);
                        if (file != null) {
                            try {
                                String url = SdkUtils.fileToUrlString(file);
                                builder.addLink(layout, url);
                            } catch (MalformedURLException e) {
                                builder.add(layout);
                            }
                        } else {
                            builder.add(layout);
                        }
                    }

                    myLogger.addMessage(problem);
                }
                return true;
            }
        }
    }

    return false;
}

From source file:com.android.tools.lint.checks.ApiLookup.java

/** See the {@link #readData(LintClient,File,File)} for documentation on the data format. */
private static void writeDatabase(File file, Api info) throws IOException {
    /*//  w w  w.  ja  v  a 2  s .  co  m
     * 1. A file header, which is the exact contents of {@link FILE_HEADER} encoded
     *     as ASCII characters. The purpose of the header is to identify what the file
     *     is for, for anyone attempting to open the file.
     * 2. A file version number. If the binary file does not match the reader's expected
     *     version, it can ignore it (and regenerate the cache from XML).
     */
    Map<String, ApiClass> classMap = info.getClasses();
    // Write the class table

    List<String> classes = new ArrayList<String>(classMap.size());
    Map<ApiClass, List<String>> memberMap = Maps.newHashMapWithExpectedSize(classMap.size());
    int memberCount = 0;
    Set<String> javaPackageSet = Sets.newHashSetWithExpectedSize(70);
    for (Map.Entry<String, ApiClass> entry : classMap.entrySet()) {
        String className = entry.getKey();
        ApiClass apiClass = entry.getValue();

        if (className.startsWith("java/") //$NON-NLS-1$
                || className.startsWith("javax/")) { //$NON-NLS-1$
            String pkg = apiClass.getPackage();
            javaPackageSet.add(pkg);
        }

        if (!isRelevantOwner(className)) {
            System.out.println("Warning: The isRelevantOwner method does not pass " + className);
        }

        Set<String> allMethods = apiClass.getAllMethods(info);
        Set<String> allFields = apiClass.getAllFields(info);

        // Strip out all members that have been supported since version 1.
        // This makes the database *much* leaner (down from about 4M to about
        // 1.7M), and this just fills the table with entries that ultimately
        // don't help the API checker since it just needs to know if something
        // requires a version *higher* than the minimum. If in the future the
        // database needs to answer queries about whether a method is public
        // or not, then we'd need to put this data back in.
        List<String> members = new ArrayList<String>(allMethods.size() + allFields.size());
        for (String member : allMethods) {

            Integer since = apiClass.getMethod(member, info);
            if (since == null) {
                assert false : className + ':' + member;
                since = 1;
            }
            if (since != 1) {
                members.add(member);
            }
        }

        // Strip out all members that have been supported since version 1.
        // This makes the database *much* leaner (down from about 4M to about
        // 1.7M), and this just fills the table with entries that ultimately
        // don't help the API checker since it just needs to know if something
        // requires a version *higher* than the minimum. If in the future the
        // database needs to answer queries about whether a method is public
        // or not, then we'd need to put this data back in.
        for (String member : allFields) {
            Integer since = apiClass.getField(member, info);
            if (since == null) {
                assert false : className + ':' + member;
                since = 1;
            }
            if (since != 1) {
                members.add(member);
            }
        }

        // Only include classes that have one or more members requiring version 2 or higher:
        if (!members.isEmpty()) {
            classes.add(className);
            memberMap.put(apiClass, members);
            memberCount += members.size();
        }
    }
    Collections.sort(classes);

    List<String> javaPackages = Lists.newArrayList(javaPackageSet);
    Collections.sort(javaPackages);
    int javaPackageCount = javaPackages.size();

    int entryCount = classMap.size() + memberCount;
    int capacity = entryCount * BYTES_PER_ENTRY;
    ByteBuffer buffer = ByteBuffer.allocate(capacity);
    buffer.order(ByteOrder.BIG_ENDIAN);
    //  1. A file header, which is the exact contents of {@link FILE_HEADER} encoded
    //      as ASCII characters. The purpose of the header is to identify what the file
    //      is for, for anyone attempting to open the file.

    buffer.put(FILE_HEADER.getBytes(Charsets.US_ASCII));

    //  2. A file version number. If the binary file does not match the reader's expected
    //      version, it can ignore it (and regenerate the cache from XML).
    buffer.put((byte) BINARY_FORMAT_VERSION);

    //  3. The number of classes [1 int]
    buffer.putInt(classes.size());

    //  4. The number of members (across all classes) [1 int].
    buffer.putInt(memberCount);

    //  5. The number of Java packages [1 int].
    buffer.putInt(javaPackageCount);

    //  6. The Java package table. There are javaPackage.size() entries, where each entry
    //     consists of a string length, as a byte, followed by the bytes in the package.
    //     There is no terminating 0.
    for (String pkg : javaPackages) {
        byte[] bytes = pkg.getBytes(Charsets.UTF_8);
        assert bytes.length < 255 : pkg;
        buffer.put((byte) bytes.length);
        buffer.put(bytes);
    }

    //  7. Class offset table (one integer per class, pointing to the byte offset in the
    //       file (relative to the beginning of the file) where each class begins.
    //       The classes are always sorted alphabetically by fully qualified name.
    int classOffsetTable = buffer.position();

    // Reserve enough room for the offset table here: we will backfill it with pointers
    // as we're writing out the data structures below
    for (int i = 0, n = classes.size(); i < n; i++) {
        buffer.putInt(0);
    }

    //  8. Member offset table (one integer per member, pointing to the byte offset in the
    //       file (relative to the beginning of the file) where each member entry begins.
    //       The members are always sorted alphabetically.
    int methodOffsetTable = buffer.position();
    for (int i = 0, n = memberCount; i < n; i++) {
        buffer.putInt(0);
    }

    int nextEntry = buffer.position();
    int nextOffset = classOffsetTable;

    // 9. Class entry table. Each class entry consists of the fully qualified class name,
    //      in JVM format (using / instead of . in package names and $ for inner classes),
    //      followed by the byte 0 as a terminator, followed by the API version as a byte.
    for (String clz : classes) {
        buffer.position(nextOffset);
        buffer.putInt(nextEntry);
        nextOffset = buffer.position();
        buffer.position(nextEntry);
        buffer.put(clz.getBytes(Charsets.UTF_8));
        buffer.put((byte) 0);

        ApiClass apiClass = classMap.get(clz);
        assert apiClass != null : clz;
        int since = apiClass.getSince();
        assert since == UnsignedBytes.toInt((byte) since) : since; // make sure it fits
        buffer.put((byte) since);

        nextEntry = buffer.position();
    }

    //  10. Member entry table. Each member entry consists of the class number (as a short),
    //       followed by the JVM method/field signature, encoded as UTF-8, followed by a 0 byte
    //       signature terminator, followed by the API level as a byte.
    assert nextOffset == methodOffsetTable;

    for (int classNumber = 0, n = classes.size(); classNumber < n; classNumber++) {
        String clz = classes.get(classNumber);
        ApiClass apiClass = classMap.get(clz);
        assert apiClass != null : clz;
        List<String> members = memberMap.get(apiClass);
        Collections.sort(members);

        for (String member : members) {
            buffer.position(nextOffset);
            buffer.putInt(nextEntry);
            nextOffset = buffer.position();
            buffer.position(nextEntry);

            Integer since;
            if (member.indexOf('(') != -1) {
                since = apiClass.getMethod(member, info);
            } else {
                since = apiClass.getField(member, info);
            }
            if (since == null) {
                assert false : clz + ':' + member;
                since = 1;
            }

            assert classNumber == (short) classNumber;
            buffer.putShort((short) classNumber);
            byte[] signature = member.getBytes(Charsets.UTF_8);
            for (int i = 0; i < signature.length; i++) {
                // Make sure all signatures are really just simple ASCII
                byte b = signature[i];
                assert b == (b & 0x7f) : member;
                buffer.put(b);
                // Skip types on methods
                if (b == (byte) ')') {
                    break;
                }
            }
            buffer.put((byte) 0);
            int api = since;
            assert api == UnsignedBytes.toInt((byte) api);
            //assert api >= 1 && api < 0xFF; // max that fits in a byte
            buffer.put((byte) api);
            nextEntry = buffer.position();
        }
    }

    int size = buffer.position();
    assert size <= buffer.limit();
    buffer.mark();

    if (WRITE_STATS) {
        System.out.println("Wrote " + classes.size() + " classes and " + memberCount + " member entries");
        System.out.print("Actual binary size: " + size + " bytes");
        System.out.println(String.format(" (%.1fM)", size / (1024 * 1024.f)));

        System.out.println("Allocated size: " + (entryCount * BYTES_PER_ENTRY) + " bytes");
        System.out.println("Required bytes per entry: " + (size / entryCount) + " bytes");
    }

    // Now dump this out as a file
    // There's probably an API to do this more efficiently; TODO: Look into this.
    byte[] b = new byte[size];
    buffer.rewind();
    buffer.get(b);
    if (file.exists()) {
        file.delete();
    }
    FileOutputStream output = Files.newOutputStreamSupplier(file).getOutput();
    output.write(b);
    output.close();
}

From source file:com.opengamma.financial.analytics.MissingInputsFunction.java

private Set<ComputedValue> createExecuteResults(final FunctionInputs inputs,
        final Set<ComputedValue> underlyingResults) {
    if (underlyingResults == null) {
        return Collections.emptySet();
    }//w  w  w.ja  v a2 s  . c  om
    final Set<ComputedValue> results = Sets.newHashSetWithExpectedSize(underlyingResults.size());
    for (final ComputedValue underlyingResult : underlyingResults) {
        final ValueSpecification resultSpec = underlyingResult.getSpecification();
        final ValueProperties.Builder properties = resultSpec.getProperties().copy();
        properties.withoutAny(ValuePropertyNames.AGGREGATION).with(ValuePropertyNames.AGGREGATION,
                getAggregationStyleMissing());
        results.add(new ComputedValue(new ValueSpecification(resultSpec.getValueName(),
                resultSpec.getTargetSpecification(), properties.get()), underlyingResult.getValue()));
        if (inputs.getMissingValues().isEmpty()) {
            properties.withoutAny(ValuePropertyNames.AGGREGATION).with(ValuePropertyNames.AGGREGATION,
                    getAggregationStyleFull());
            results.add(new ComputedValue(new ValueSpecification(resultSpec.getValueName(),
                    resultSpec.getTargetSpecification(), properties.get()), underlyingResult.getValue()));
        }
    }
    return results;
}