List of usage examples for com.google.common.collect Sets newHashSetWithExpectedSize
public static <E> HashSet<E> newHashSetWithExpectedSize(int expectedSize)
From source file:com.android.tools.lint.checks.AndroidTvDetector.java
@Override public void beforeCheckFile(@NonNull Context context) { mHasLeanbackLauncherActivity = false; mHasLeanbackSupport = false;//from w ww . java 2 s . c om mHasApplicationBanner = false; mLeanbackActivitiesWithoutBanners = 0; mUnsupportedHardwareImpliedPermissions = Lists.newArrayListWithExpectedSize(2); mUnsupportedTvUsesFeatures = Sets.newHashSetWithExpectedSize(2); mAllUnsupportedTvUsesFeatures = Sets.newHashSetWithExpectedSize(2); // Check gradle dependency Project mainProject = context.getMainProject(); mHasLeanbackDependency = (mainProject.isGradleProject() && Boolean.TRUE.equals(mainProject.dependsOn(LEANBACK_LIB_ARTIFACT))); }
From source file:com.android.build.gradle.tasks.ResourceUsageAnalyzer.java
/** * Remove resources (already identified by {@link #shrink(Path)}). * * <p>This task will copy all remaining used resources over from the full resource directory to a * new reduced resource directory and removes unused values from all value xml files. * * @param destination directory to copy resources into; if null, delete resources in place * @throws IOException/*from w w w . jav a2 s . c o m*/ * @throws ParserConfigurationException * @throws SAXException */ private void removeUnused(Path destination) throws IOException, ParserConfigurationException, SAXException { assert unused != null; // should always call analyze() first int resourceCount = unused.size() * 4; // *4: account for some resource folder repetition Set<File> skip = Sets.newHashSetWithExpectedSize(resourceCount); Set<File> rewrite = Sets.newHashSetWithExpectedSize(resourceCount); Set<Resource> deleted = Sets.newHashSetWithExpectedSize(resourceCount); for (Resource resource : unused) { if (resource.declarations != null) { for (File file : resource.declarations) { String folder = file.getParentFile().getName(); ResourceFolderType folderType = ResourceFolderType.getFolderType(folder); if (folderType != null && folderType != ResourceFolderType.VALUES) { List<ResourceType> types = FolderTypeRelationship.getRelatedResourceTypes(folderType); ResourceType type = types.get(0); assert type != ResourceType.ID : folderType; Resource fileResource = model.getResource(type, LintUtils.getBaseName(file.getName())); // Only delete the file if there is no owning resource or this is the owning resource of // the file, i.e. not an id declared within it, because id declarations are not // considered uses and would otherwise cause deletion of the file. if (fileResource == null || fileResource.equals(resource)) { logger.fine("Deleted unused file " + file + " for resource " + resource); assert skip != null; skip.add(file); deleted.add(resource); } } else { // Can't delete values immediately; there can be many resources // in this file, so we have to process them all rewrite.add(file); } } } else { // Not declared anywhere; mark as deleted. Covers the case of inline resources. // https://developer.android.com/guide/topics/resources/complex-xml-resources.html deleted.add(resource); } } // Special case the base values.xml folder File values = new File(mergedResourceDir.toFile(), FD_RES_VALUES + File.separatorChar + "values.xml"); if (values.exists()) { rewrite.add(values); } Map<File, String> rewritten = Maps.newHashMapWithExpectedSize(rewrite.size()); rewriteXml(rewrite, rewritten, deleted); // TODO(apell): The graph traversal does not mark IDs as reachable or not, so they cannot be // accurately removed from public.xml, but the definitions may be deleted if they occur in // other files. IDs should be added to values.xml so that there are no declarations in // public.xml without definitions. File publicXml = new File(mergedResourceDir.toFile(), FD_RES_VALUES + File.separatorChar + "public.xml"); createStubIds(values, rewritten, publicXml); trimPublicResources(publicXml, deleted, rewritten); filteredCopy(mergedResourceDir.toFile(), destination, skip, rewritten); }
From source file:org.n52.sos.encode.AbstractSensorMLEncoder.java
/** * Convert SOS sosOfferings to map with key == identifier and value = name * /*from w w w. j a va 2 s . co m*/ * @param offerings * SOS sosOfferings * @return Set with identifier, name. */ protected Set<SweText> convertOfferingsToSet(final Set<SosOffering> offerings) { final Set<SweText> offeringSet = Sets.newHashSetWithExpectedSize(offerings.size()); for (final SosOffering offering : offerings) { SweText sweText = new SweText(); sweText.setValue(offering.getIdentifier()); for (CodeType name : offering.getName()) { sweText.addName(name); } if (offering.isSetDescription()) { sweText.setDescription(offering.getDescription()); } offeringSet.add(sweText); } return offeringSet; }
From source file:cosmos.store.PersistedStores.java
public static Store deserialize(Connector connector, Value value) throws InvalidProtocolBufferException { checkNotNull(connector);/* www .j a va 2 s .c o m*/ checkNotNull(value); StoreProtobuf.Store store = StoreProtobuf.Store.parseFrom(value.get()); Set<Index> columnsToIndex; // Using the IndexSpec, determine what information to read from the message // to appropriate create the columnsToIndex Set switch (store.getIndexSpec()) { case ASCENDING_IDENTITY: { columnsToIndex = AscendingIndexIdentitySet.create(); break; } case DESCENDING_IDENTITY: { columnsToIndex = DescendingIndexIdentitySet.create(); break; } case IDENTITY: { columnsToIndex = IdentitySet.<Index>create(); break; } case OTHER: { // If we don't have an IdentitySet of some kind, assume it's a "regular" // concretely-backed Set List<StoreProtobuf.Index> serializedIndexes = store.getIndexesList(); columnsToIndex = Sets.newHashSetWithExpectedSize(serializedIndexes.size()); for (StoreProtobuf.Index i : serializedIndexes) { Column column = Column.create(i.getColumn()); String typeClassName = i.getType(); Order order; switch (i.getOrder()) { case ASCENDING: { order = Order.ASCENDING; break; } case DESCENDING: { order = Order.DESCENDING; break; } default: { throw new RuntimeException("Found unknown order: " + i.getOrder()); } } // Load the class, hitting a cache when we can Class<?> typeClass; try { typeClass = CLASS_CACHE.get(typeClassName); } catch (ExecutionException e) { throw new RuntimeException(e); } columnsToIndex.add(Index.define(column, order, typeClass)); } break; } default: { throw new RuntimeException("Unable to process unknown Index specification: " + store.getIndexSpec()); } } Authorizations auths = new Authorizations(store.getAuths().getBytes()); return Store.create(connector, auths, store.getUniqueId(), columnsToIndex, store.getLockOnUpdates(), store.getDataTable(), store.getMetadataTable()); }
From source file:org.apache.phoenix.compile.DeleteCompiler.java
private Set<PTable> getNonDisabledImmutableIndexes(TableRef tableRef) { PTable table = tableRef.getTable();//from w ww . j a va 2 s . c o m if (table.isImmutableRows() && !table.getIndexes().isEmpty()) { Set<PTable> nonDisabledIndexes = Sets.newHashSetWithExpectedSize(table.getIndexes().size()); for (PTable index : table.getIndexes()) { if (index.getIndexState() != PIndexState.DISABLE) { nonDisabledIndexes.add(index); } } return nonDisabledIndexes; } return Collections.emptySet(); }
From source file:com.attribyte.essem.model.StoredGraph.java
/** * Cleans the list of tags./*w w w .j a v a 2 s . c o m*/ * - Trims * - Removes duplicates * - Removes tags that are invalid identifiers. * @param tags The colleciton of tags. * @return The clean set of tags. */ private Set<String> cleanTags(final Collection<String> tags) { if (tags == null || tags.size() == 0) { return ImmutableSet.of(); } else { Set<String> cleanedTags = Sets.newHashSetWithExpectedSize(tags.size()); for (String tag : tags) { tag = tag.trim(); cleanedTags.add(toValidIdentifier(tag)); } return cleanedTags; } }
From source file:com.cloudera.director.aws.ec2.ebs.EBSAllocator.java
/** * Waits for the volumes in a list of {@code InstanceEbsVolumes} to reach an available state. * Returns an updated list of {@code InstanceEbsVolumes} with the volumes that became * available marked as AVAILABLE and volumes that failed or timed out marked as FAILED. * * @param createdInstanceVolumes list of instances with their created ebs volumes * @return updated list of instances EBS volumes */// w ww. j av a 2 s. c om public List<InstanceEbsVolumes> waitUntilVolumesAvailable(List<InstanceEbsVolumes> createdInstanceVolumes) throws InterruptedException { Set<String> volumesToCheck = getAllVolumeIdsWithStatus(createdInstanceVolumes, InstanceEbsVolumes.Status.CREATED); int numRequestedVolumes = volumesToCheck.size(); Set<String> volumesAvailable = Sets.newHashSetWithExpectedSize(numRequestedVolumes); if (numRequestedVolumes > 0) { LOG.info("Waiting for a maximum of {} seconds for volumes to become available", availableTimeoutSeconds); Stopwatch watch = Stopwatch.createStarted(); while (watch.elapsed(TimeUnit.SECONDS) < availableTimeoutSeconds) { DescribeVolumesRequest volumeRequest = new DescribeVolumesRequest().withVolumeIds(volumesToCheck); try { List<Volume> volumes = client.describeVolumes(volumeRequest).getVolumes(); for (Volume volume : volumes) { String id = volume.getVolumeId(); VolumeState state = VolumeState.fromValue(volume.getState()); switch (state) { case Creating: break; case Available: volumesToCheck.remove(id); volumesAvailable.add(id); break; case Error: // TODO log why the volume failed which may need a separate api call volumesToCheck.remove(id); break; default: String err = String .format("A requested volume went into an unexpected state %s while waiting " + "for volume to become available", state); throw new IllegalStateException(String.format(err, state)); } } if (volumesToCheck.isEmpty()) { break; } } catch (AmazonServiceException ex) { // ignore exception when volume isn't found, newly created volumes may not be found right away if (ex.getErrorCode().equals("InvalidVolume.NotFound")) { LOG.info("Requested volume(s) not yet found"); } else { throw AWSExceptions.propagate(ex); } } LOG.info("Waiting on {} out of {} volumes to reach a final state, next check in {} seconds", volumesToCheck.size(), numRequestedVolumes, WAIT_UNTIL_AVAILABLE_INTERVAL_SECONDS); TimeUnit.SECONDS.sleep(WAIT_UNTIL_AVAILABLE_INTERVAL_SECONDS); } if (volumesToCheck.size() > 0) { LOG.error( "Timed out while waiting for volumes to be created, {} out of {} volumes became available", volumesAvailable.size(), numRequestedVolumes); } } else { LOG.info("Skipping wait for availability because no EBS volumes were created"); } // Update the status of each volume to AVAILABLE or FAILED based on the result List<InstanceEbsVolumes> updated = Lists.newArrayList(); for (InstanceEbsVolumes instanceEbsVolumes : createdInstanceVolumes) { Map<String, InstanceEbsVolumes.Status> updatedVolumes = Maps.newHashMap(); for (String volumeId : instanceEbsVolumes.getVolumeStatuses().keySet()) { InstanceEbsVolumes.Status updatedStatus = volumesAvailable.contains(volumeId) ? InstanceEbsVolumes.Status.AVAILABLE : InstanceEbsVolumes.Status.FAILED; updatedVolumes.put(volumeId, updatedStatus); } updated.add(new InstanceEbsVolumes(instanceEbsVolumes.getVirtualInstanceId(), instanceEbsVolumes.getEc2InstanceId(), updatedVolumes)); } return updated; }
From source file:com.opengamma.financial.analytics.model.credit.ISDACurveFunction.java
@Override public CompiledFunctionDefinition compile(final FunctionCompilationContext compilationContext, final InstantProvider atInstantProvider) { final ZonedDateTime atInstant = ZonedDateTime.ofInstant(atInstantProvider, TimeZone.UTC); final HolidaySource holidaySource = OpenGammaCompilationContext.getHolidaySource(compilationContext); final RegionSource regionSource = OpenGammaCompilationContext.getRegionSource(compilationContext); final ConventionBundleSource conventionSource = OpenGammaCompilationContext .getConventionBundleSource(compilationContext); final SecuritySource securitySource = OpenGammaCompilationContext.getSecuritySource(compilationContext); final HistoricalTimeSeriesResolver timeSeriesResolver = OpenGammaCompilationContext .getHistoricalTimeSeriesResolver(compilationContext); final InterestRateInstrumentTradeOrSecurityConverter securityConverter = new InterestRateInstrumentTradeOrSecurityConverter( holidaySource, conventionSource, regionSource, securitySource, true); final FixedIncomeConverterDataProvider definitionConverter = new FixedIncomeConverterDataProvider( conventionSource, timeSeriesResolver); return new AbstractInvokingCompiledFunction(atInstant.withTime(0, 0), atInstant.plusDays(1).withTime(0, 0).minusNanos(1000000)) { @SuppressWarnings("synthetic-access") @Override/*from ww w . j av a2s. co m*/ public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) throws AsynchronousExecution { final Clock snapshotClock = executionContext.getValuationClock(); final ZonedDateTime now = snapshotClock.zonedDateTime(); final HistoricalTimeSeriesBundle timeSeries = (HistoricalTimeSeriesBundle) inputs .getValue(ValueRequirementNames.YIELD_CURVE_INSTRUMENT_CONVERSION_HISTORICAL_TIME_SERIES); final ValueRequirement desiredValue = desiredValues.iterator().next(); final String curveName = desiredValue.getConstraint(ValuePropertyNames.CURVE); final String curveCalculationConfigName = desiredValue .getConstraint(ValuePropertyNames.CURVE_CALCULATION_CONFIG); final String offsetString = desiredValue.getConstraint(ISDAFunctionConstants.ISDA_CURVE_OFFSET); final int offset = Integer.parseInt(offsetString); final Object specificationObject = inputs.getValue(ValueRequirementNames.YIELD_CURVE_SPEC); if (specificationObject == null) { throw new OpenGammaRuntimeException("Could not get interpolated yield curve specification"); } final Object dataObject = inputs.getValue(ValueRequirementNames.YIELD_CURVE_MARKET_DATA); if (dataObject == null) { throw new OpenGammaRuntimeException("Could not get yield curve data"); } final InterpolatedYieldCurveSpecificationWithSecurities specification = (InterpolatedYieldCurveSpecificationWithSecurities) specificationObject; final SnapshotDataBundle data = (SnapshotDataBundle) dataObject; final Map<ExternalId, Double> marketData = YieldCurveFunctionHelper.buildMarketDataMap(data); final ConfigSource configSource = OpenGammaExecutionContext.getConfigSource(executionContext); final MultiCurveCalculationConfig curveCalculationConfig = new ConfigDBCurveCalculationConfigSource( configSource).getConfig(curveCalculationConfigName); final int n = marketData.size(); final double[] times = new double[n]; final double[] yields = new double[n]; int i = 0; for (final FixedIncomeStripWithSecurity strip : specification.getStrips()) { final String securityType = strip.getSecurity().getSecurityType(); if (!(securityType.equals(CashSecurity.SECURITY_TYPE) || securityType.equals(SwapSecurity.SECURITY_TYPE))) { throw new OpenGammaRuntimeException("ISDA curves should only use Libor and swap rates"); } final Double marketValue = marketData.get(strip.getSecurityIdentifier()); if (marketValue == null) { throw new OpenGammaRuntimeException("Could not get market data for " + strip); } final FinancialSecurity financialSecurity = (FinancialSecurity) strip.getSecurity(); final String[] curveNamesForSecurity = curveCalculationConfig .getCurveExposureForInstrument(curveName, strip.getInstrumentType()); final InstrumentDefinition<?> definition = securityConverter.visit(financialSecurity); final InstrumentDerivative derivative = definitionConverter.convert(financialSecurity, definition, now, curveNamesForSecurity, timeSeries); if (derivative == null) { throw new OpenGammaRuntimeException("Had a null InterestRateDefinition for " + strip); } times[i] = LAST_DATE_CALCULATOR.visit(derivative); yields[i++] = marketValue; } final ISDACurve curve = new ISDACurve(curveCalculationConfigName, times, yields, offset); final ValueProperties properties = createValueProperties().with(ValuePropertyNames.CURVE, curveName) .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfigName) .with(ISDAFunctionConstants.ISDA_CURVE_OFFSET, offsetString) .with(ValuePropertyNames.CURVE_CALCULATION_METHOD, ISDAFunctionConstants.ISDA_METHOD_NAME) .get(); final ValueSpecification spec = new ValueSpecification(ValueRequirementNames.YIELD_CURVE, target.toSpecification(), properties); return Collections.singleton(new ComputedValue(spec, curve)); } @Override public ComputationTargetType getTargetType() { return ComputationTargetType.PRIMITIVE; } @Override public boolean canApplyTo(final FunctionCompilationContext context, final ComputationTarget target) { if (target.getType() != ComputationTargetType.PRIMITIVE) { return false; } return Currency.OBJECT_SCHEME.equals(target.getUniqueId().getScheme()); } @Override public Set<ValueSpecification> getResults(final FunctionCompilationContext context, final ComputationTarget target) { @SuppressWarnings("synthetic-access") final ValueProperties properties = createValueProperties().withAny(ValuePropertyNames.CURVE) .withAny(ValuePropertyNames.CURVE_CALCULATION_CONFIG) .withAny(ISDAFunctionConstants.ISDA_CURVE_OFFSET) .with(ValuePropertyNames.CURVE_CALCULATION_METHOD, ISDAFunctionConstants.ISDA_METHOD_NAME) .get(); return Collections.singleton(new ValueSpecification(ValueRequirementNames.YIELD_CURVE, target.toSpecification(), properties)); } @Override public Set<ValueRequirement> getRequirements(final FunctionCompilationContext context, final ComputationTarget target, final ValueRequirement desiredValue) { final ValueProperties constraints = desiredValue.getConstraints(); final Set<String> curveNames = constraints.getValues(ValuePropertyNames.CURVE); if (curveNames == null || curveNames.size() != 1) { return null; } final Set<String> curveCalculationConfigNames = constraints .getValues(ValuePropertyNames.CURVE_CALCULATION_CONFIG); if (curveCalculationConfigNames == null || curveCalculationConfigNames.size() != 1) { return null; } final String curveName = Iterables.getOnlyElement(curveNames); final String curveCalculationConfigName = Iterables.getOnlyElement(curveCalculationConfigNames); final ValueProperties tsProperties = ValueProperties.builder() .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfigName).get(); final ValueProperties curveProperties = ValueProperties.builder() .with(ValuePropertyNames.CURVE, curveName) .with(ValuePropertyNames.CURVE_CALCULATION_CONFIG, curveCalculationConfigName).get(); final Set<ValueRequirement> requirements = Sets.newHashSetWithExpectedSize(3); final ComputationTargetSpecification targetSpec = target.toSpecification(); requirements.add(new ValueRequirement(ValueRequirementNames.YIELD_CURVE_MARKET_DATA, targetSpec, curveProperties)); requirements.add( new ValueRequirement(ValueRequirementNames.YIELD_CURVE_SPEC, targetSpec, curveProperties)); requirements.add(new ValueRequirement( ValueRequirementNames.YIELD_CURVE_INSTRUMENT_CONVERSION_HISTORICAL_TIME_SERIES, targetSpec, tsProperties)); return requirements; } }; }
From source file:com.opengamma.financial.analytics.model.credit.isdanew.ISDACompliantCDSFunction.java
@Override public Set<ComputedValue> execute(final FunctionExecutionContext executionContext, final FunctionInputs inputs, final ComputationTarget target, final Set<ValueRequirement> desiredValues) throws AsynchronousExecution { final ZonedDateTime now = ZonedDateTime.now(executionContext.getValuationClock()); final ValueRequirement requirement = desiredValues.iterator().next(); final ValueProperties properties = requirement.getConstraints().copy().get(); final LegacyVanillaCDSSecurity security = (LegacyVanillaCDSSecurity) target.getSecurity(); //LegacyVanillaCreditDefaultSwapDefinition cds = _converter.visitLegacyVanillaCDSSecurity(security); final ValueRequirement desiredValue = desiredValues.iterator().next(); // all same constraints final String quoteConventionString = desiredValue.getConstraint(ISDAFunctionConstants.CDS_QUOTE_CONVENTION); final StandardCDSQuotingConvention quoteConvention = StandardCDSQuotingConvention .parse(quoteConventionString); final CdsRecoveryRateIdentifier recoveryRateIdentifier = security .accept(new CreditSecurityToRecoveryRateVisitor(executionContext.getSecuritySource())); Object recoveryRateObject = inputs.getValue(new ValueRequirement("PX_LAST", ComputationTargetType.PRIMITIVE, recoveryRateIdentifier.getExternalId())); if (recoveryRateObject == null) { throw new OpenGammaRuntimeException("Could not get recovery rate"); //s_logger.warn("Could not get recovery rate, defaulting to 0.4: " + recoveryRateIdentifier); //recoveryRateObject = 0.4; }/*from w ww . ja va2s.co m*/ final double recoveryRate = (Double) recoveryRateObject; // get the isda curve final Object isdaObject = inputs.getValue(ValueRequirementNames.YIELD_CURVE); if (isdaObject == null) { throw new OpenGammaRuntimeException("Couldn't get isda curve"); } final ISDACompliantYieldCurve yieldCurve = (ISDACompliantYieldCurve) isdaObject; // spreads NodalTenorDoubleCurve spreadObject = (NodalTenorDoubleCurve) inputs .getValue(ValueRequirementNames.BUCKETED_SPREADS); if (spreadObject == null) { throw new OpenGammaRuntimeException("Unable to get spreads"); } final double[] spreads = ArrayUtils.toPrimitive(spreadObject.getYData()); //final String pillarString = IMMDateGenerator.isIMMDate(security.getMaturityDate()) ? requirement.getConstraint(ISDAFunctionConstants.ISDA_BUCKET_TENORS) : ISDACompliantCreditCurveFunction.NON_IMM_PILLAR_TENORS; final ZonedDateTime[] bucketDates = SpreadCurveFunctions.getPillarDates(now, spreadObject.getXData()); final CDSQuoteConvention[] quotes = SpreadCurveFunctions.getQuotes(security.getMaturityDate(), spreads, security.getParSpread(), quoteConvention, false); // spreads NodalTenorDoubleCurve pillarObject = (NodalTenorDoubleCurve) inputs .getValue(ValueRequirementNames.PILLAR_SPREADS); if (pillarObject == null) { throw new OpenGammaRuntimeException("Unable to get pillars"); } // CDS analytics for credit curve (possible performance improvement if earlier result obtained) //final LegacyVanillaCreditDefaultSwapDefinition curveCDS = cds.withStartDate(now); //security.setStartDate(now); // needed for curve instruments final CDSAnalytic[] bucketCDSs = new CDSAnalytic[bucketDates.length]; for (int i = 0; i < bucketCDSs.length; i++) { //security.setMaturityDate(bucketDates[i]); final CDSAnalyticVisitor visitor = new CDSAnalyticVisitor(now.toLocalDate(), _holidaySource, _regionSource, security.getStartDate().toLocalDate(), bucketDates[i].toLocalDate(), recoveryRate); bucketCDSs[i] = security.accept(visitor); } final ZonedDateTime[] pillarDates = SpreadCurveFunctions.getPillarDates(now, pillarObject.getXData()); final CDSAnalytic[] pillarCDSs = new CDSAnalytic[pillarDates.length]; for (int i = 0; i < pillarCDSs.length; i++) { //security.setMaturityDate(bucketDates[i]); final CDSAnalyticVisitor visitor = new CDSAnalyticVisitor(now.toLocalDate(), _holidaySource, _regionSource, security.getStartDate().toLocalDate(), pillarDates[i].toLocalDate(), recoveryRate); pillarCDSs[i] = security.accept(visitor); } final ISDACompliantCreditCurve creditCurve = (ISDACompliantCreditCurve) inputs .getValue(ValueRequirementNames.HAZARD_RATE_CURVE); if (creditCurve == null) { throw new OpenGammaRuntimeException("Couldnt get credit curve"); } //final CDSAnalytic analytic = CDSAnalyticConverter.create(cds, now.toLocalDate()); final CDSAnalyticVisitor visitor = new CDSAnalyticVisitor(now.toLocalDate(), _holidaySource, _regionSource, recoveryRate); final CDSAnalytic analytic = security.accept(visitor); final BuySellProtection buySellProtection = security.isBuy() ? BuySellProtection.BUY : BuySellProtection.SELL; final Double cdsQuoteDouble = (Double) inputs.getValue(MarketDataRequirementNames.MARKET_VALUE); if (cdsQuoteDouble == null) { throw new OpenGammaRuntimeException("Couldn't get spread for " + security); } final CDSQuoteConvention quote = SpreadCurveFunctions.getQuotes(security.getMaturityDate(), new double[] { cdsQuoteDouble }, security.getParSpread(), quoteConvention, true)[0]; final double notional = security.getNotional().getAmount(); final double coupon = security.getParSpread() * ONE_BPS; final PointsUpFront puf = getPointsUpfront(quote, buySellProtection, yieldCurve, analytic, creditCurve); final double accruedPremium = analytic.getAccruedPremium(coupon) * notional; final int accruedDays = analytic.getAccuredDays(); final double quotedSpread = getQuotedSpread(quote, puf, buySellProtection, yieldCurve, analytic) .getQuotedSpread(); final double upfrontAmount = getUpfrontAmount(analytic, puf, notional, buySellProtection); final double cleanPV = puf.getPointsUpFront() * notional; final double cleanPrice = getCleanPrice(puf); final TenorLabelledMatrix1D bucketedCS01 = getBucketedCS01(analytic, bucketCDSs, spreadObject.getXData(), quote, notional, yieldCurve, creditCurve); final double parallelCS01 = getParallelCS01(quote, analytic, yieldCurve, notional, pillarCDSs, ArrayUtils.toPrimitive(pillarObject.getYData())); final Set<ComputedValue> results = Sets.newHashSetWithExpectedSize(_valueRequirements.length); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.ACCRUED_PREMIUM, target.toSpecification(), properties), accruedPremium)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.ACCRUED_DAYS, target.toSpecification(), properties), accruedDays)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.QUOTED_SPREAD, target.toSpecification(), properties), quotedSpread / ONE_BPS)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.UPFRONT_AMOUNT, target.toSpecification(), properties), upfrontAmount)); results.add(new ComputedValue(new ValueSpecification(ValueRequirementNames.DIRTY_PRESENT_VALUE, target.toSpecification(), properties), upfrontAmount)); results.add(new ComputedValue(new ValueSpecification(ValueRequirementNames.CLEAN_PRESENT_VALUE, target.toSpecification(), properties), cleanPV)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.PRINCIPAL, target.toSpecification(), properties), cleanPV)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.CLEAN_PRICE, target.toSpecification(), properties), cleanPrice)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.BUCKETED_CS01, target.toSpecification(), properties), bucketedCS01)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.PARALLEL_CS01, target.toSpecification(), properties), parallelCS01)); results.add(new ComputedValue( new ValueSpecification(ValueRequirementNames.POINTS_UPFRONT, target.toSpecification(), properties), puf.getPointsUpFront())); return results; }
From source file:com.datatorrent.lib.appdata.query.serde.DataQueryDimensionalDeserializer.java
@Unstable private static Map<String, Set<Object>> deserializeToMap(FieldsDescriptor fieldsDescriptor, JSONObject dpou) { Map<String, Set<Object>> keyToValues = Maps.newHashMap(); for (String key : fieldsDescriptor.getFields().getFields()) { if (!dpou.has(key)) { throw new IllegalArgumentException("The given key " + key + " is not contained in the given JSON"); }//from w ww. j ava 2 s. co m Set<Object> keyValues; Object keyValue; try { keyValue = dpou.get(key); } catch (JSONException ex) { throw new IllegalStateException("This should never happen", ex); } if (keyValue instanceof JSONArray) { JSONArray ja = (JSONArray) keyValue; keyValues = Sets.newHashSetWithExpectedSize(ja.length()); Type type = fieldsDescriptor.getType(key); for (int index = 0; index < ja.length(); index++) { keyValues.add(getFieldFromJSON(type, ja, index)); } } else if (keyValue instanceof JSONObject) { throw new UnsupportedOperationException("Cannot extract objects from JSONObjects"); } else { keyValues = Sets.newHashSetWithExpectedSize(1); keyValues.add(getFieldFromJSON(fieldsDescriptor, key, dpou)); } keyToValues.put(key, keyValues); } return keyToValues; }