List of usage examples for java.util LinkedHashMap get
public V get(Object key)
From source file:com.indeed.imhotep.web.ImhotepMetadataCache.java
@Scheduled(fixedRate = 60000) public void updateDatasets() { Map<String, DatasetInfo> datasetToShardList = imhotepClient.getDatasetToShardList(); List<String> datasetNames = new ArrayList<String>(datasetToShardList.keySet()); Collections.sort(datasetNames); if (datasetNames.size() == 0) { // if we get no data, just keep what we already have log.warn("Imhotep returns no datasets"); return;//from ww w. j a va2 s . c o m } // First make empty DatasetMetadata instances final LinkedHashMap<String, DatasetMetadata> newDatasets = Maps.newLinkedHashMap(); for (String datasetName : datasetNames) { final DatasetMetadata datasetMetadata = new DatasetMetadata(datasetName); newDatasets.put(datasetName, datasetMetadata); } // Now pre-fill the metadata with fields from Imhotep for (DatasetInfo datasetInfo : datasetToShardList.values()) { List<String> dsIntFields = Lists.newArrayList(datasetInfo.getIntFields()); List<String> dsStringFields = Lists.newArrayList(datasetInfo.getStringFields()); removeDisabledFields(dsIntFields); removeDisabledFields(dsStringFields); Collections.sort(dsIntFields); Collections.sort(dsStringFields); final String datasetName = datasetInfo.getDataset(); final DatasetMetadata datasetMetadata = newDatasets.get(datasetName); final LinkedHashMap<String, FieldMetadata> fieldMetadatas = datasetMetadata.getFields(); for (String intField : dsIntFields) { fieldMetadatas.put(intField, new FieldMetadata(intField, FieldType.Integer)); } for (String stringField : dsStringFields) { fieldMetadatas.put(stringField, new FieldMetadata(stringField, FieldType.String)); } } // now load the metadata from files loadMetadataFromFiles(newDatasets); for (final DatasetMetadata datasetMetadata : newDatasets.values()) { addStandardAliases(datasetMetadata); datasetMetadata.finishLoading(); } // new metadata instance is ready for use datasets = newDatasets; }
From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java
public LinkedHashMap<String, String> getDescriptions() { LinkedHashMap<String, String> descriptions = new LinkedHashMap<String, String>(); LinkedHashMap<String, String> appInfos = getAppInfos("X_Description_.*");//$NON-NLS-1$ Set<String> keys = appInfos.keySet(); for (Iterator<String> iter = keys.iterator(); iter.hasNext();) { String key = iter.next(); descriptions.put(key.substring(14).toLowerCase(), appInfos.get(key)); }/* w w w. j a v a 2 s.co m*/ return descriptions; }
From source file:com.ikanow.aleph2.search_service.elasticsearch.utils.ElasticsearchIndexUtils.java
/** Creates a list of JsonNodes containing the mapping for fields that will _enable_ or _disable_ field data depending on fielddata_info is present * (note this can convert a property to a dynamic template, but never the other way round) * @param instream/*www . ja v a2s.c om*/ * @param f * @param field_lookups * @param fielddata_info 3tuple containing not_analyzed, analyzed, and override * @param mapper * @return */ protected static Stream<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>> createFieldLookups( final Stream<String> instream, final Function<String, Either<String, Tuple2<String, String>>> f, final LinkedHashMap<Either<String, Tuple2<String, String>>, JsonNode> field_lookups, final Optional<Tuple3<JsonNode, JsonNode, Boolean>> fielddata_info, final SearchIndexSchemaDefaultBean search_index_schema_override, final Map<Either<String, Tuple2<String, String>>, String> type_override, final ObjectMapper mapper, final String index_type) { return instream.<Tuple2<Either<String, Tuple2<String, String>>, JsonNode>>map(Lambdas.wrap_u(fn -> { final Either<String, Tuple2<String, String>> either_tmp = f.apply(fn); final Optional<String> maybe_type = Optional.ofNullable(type_override.get(either_tmp)); // add type if present final Either<String, Tuple2<String, String>> either = maybe_type .<Either<String, Tuple2<String, String>>>map(type -> { return either_tmp.<Either<String, Tuple2<String, String>>>either(s -> Either.left(s), t2 -> Either.right(Tuples._2T(t2._1(), type))); }).orElse(either_tmp); final ObjectNode mutable_field_metadata = (ObjectNode) Optional.ofNullable(field_lookups.get(either)) .map(j -> j.deepCopy()) .orElse(either.either(Lambdas.wrap_fj_u(__ -> mapper.readTree(BACKUP_FIELD_MAPPING_PROPERTIES)), Lambdas.wrap_fj_u(__ -> mapper.readTree(BACKUP_FIELD_MAPPING_TEMPLATES)))); //(note that these 2 mappings don't have "type"s - therefore they will result in default_templates not properties - you need the type to generate a property) final ObjectNode mutable_field_mapping_tmp = either.isLeft() ? mutable_field_metadata : (ObjectNode) mutable_field_metadata.get("mapping"); //(override with type if set) maybe_type.ifPresent(type -> mutable_field_mapping_tmp.put("type", type)); final boolean has_type = mutable_field_mapping_tmp.has("type"); final Tuple2<ObjectNode, Either<String, Tuple2<String, String>>> toplevel_eithermod = Lambdas .get(() -> { if (either.isLeft() && !has_type) { final ObjectNode top_level = (ObjectNode) mapper.createObjectNode().set("mapping", mutable_field_metadata); return Tuples._2T(top_level, Either.<String, Tuple2<String, String>>right(Tuples._2T(fn, "*"))); } else { // right[dynamic] *OR* (left[properties] and has-type) return Tuples._2T(mutable_field_metadata, either); } }); final ObjectNode mutable_field_mapping = toplevel_eithermod._2().isLeft() ? toplevel_eithermod._1() : (ObjectNode) toplevel_eithermod._1().get("mapping"); // Special case ... if we're columnar and we're merging with tokenized and non-dual then convert to untokenized instead if (fielddata_info.filter(t3 -> t3._3()).isPresent() && mutable_field_mapping.equals( mapper.convertValue(search_index_schema_override.tokenized_string_field(), JsonNode.class))) { mutable_field_mapping.removeAll(); mutable_field_mapping.setAll((ObjectNode) mapper .convertValue(search_index_schema_override.untokenized_string_field(), ObjectNode.class)); } if (toplevel_eithermod._2().isRight()) { if (!toplevel_eithermod._1().has(PATH_MATCH_NAME) && !toplevel_eithermod._1().has(RAW_MATCH_NAME)) { toplevel_eithermod._1().put(PATH_MATCH_NAME, toplevel_eithermod._2().right().value()._1()); if (!toplevel_eithermod._1().has(TYPE_MATCH_NAME)) toplevel_eithermod._1().put(TYPE_MATCH_NAME, toplevel_eithermod._2().right().value()._2()); } if (!has_type) { if (toplevel_eithermod._2().right().value()._2().equals("*")) { // type is mandatory mutable_field_mapping.put("type", "{dynamic_type}"); } else { mutable_field_mapping.put("type", toplevel_eithermod._2().right().value()._2()); } } } handleMappingFields(mutable_field_mapping, fielddata_info, mapper, index_type); setMapping(mutable_field_mapping, fielddata_info, mapper, index_type); return Tuples._2T(toplevel_eithermod._2(), toplevel_eithermod._1()); })); }
From source file:gate.util.reporting.PRTimeReporter.java
/** * Calculates the sub totals at each level. * * @param reportContainer//from w w w.j a v a2s .c o m * An Object of type LinkedHashMap<String, Object> containing the * processing elements (with time in milliseconds) in hierarchical * structure. * * @return An Object containing modified hierarchical structure of processing * elements with totals and All others embedded in it. */ @SuppressWarnings("unchecked") @Override public Object calculate(Object reportContainer) { LinkedHashMap<String, Object> globalStore = (LinkedHashMap<String, Object>) reportContainer; Iterator<String> iter = globalStore.keySet().iterator(); int total = 0; while (iter.hasNext()) { String key = iter.next(); total = getTotal((LinkedHashMap<String, Object>) (globalStore.get(key))); globalTotal.put(key, Integer.toString(total)); } return globalStore; }
From source file:com.dbmojo.QueryExecutor.java
/** Add a batch update to either a single statement, the correct * passed prepared statement.//from www .j a va2 s . c o m */ private void addBatchUpdate(Connection conn, boolean prepared, String query, String[] values, Statement bstmt, LinkedHashMap<String, PreparedStatement> bpstmts) throws Exception { //If this is NOT a prepared statement then add the query to a raw SQL batch if (!prepared) { if (DebugLog.enabled) { DebugLog.add(this, "Adding update '" + query + "' to statement batch"); } bstmt.addBatch(query); } else { //If this IS a prepared statement then check for its existence //in the pstmts hash. If it doesn't exist then create a new //pstmt for the query and add it to the hash. PreparedStatement pstmt = null; if (bpstmts.containsKey(query)) { if (DebugLog.enabled) { DebugLog.add(this, "Retrieving pstmt batch for query '" + query + "'"); } pstmt = bpstmts.get(query); } else { if (DebugLog.enabled) { DebugLog.add(this, "Starting pstmt batch for query '" + query + "'"); } pstmt = conn.prepareStatement(query); } if (DebugLog.enabled) { DebugLog.add(this, "Setting vals on pstmt batch for query '" + query + "'"); } setPreparedStatementValues(pstmt, values); //Add THIS set of values to the batch for this specific //prepared statement. Later on all prepared statment batches //will be executed sequentially if (DebugLog.enabled) { DebugLog.add(this, "Adding to pstmt batch for query '" + query + "'"); } pstmt.addBatch(); bpstmts.put(query, pstmt); } }
From source file:gate.util.reporting.PRTimeReporter.java
/** * Calculates the total of the time taken by processing element at each leaf * level. Also calculates the difference between the actual time taken by the * resources and system noted time.// w ww .ja v a 2 s . co m * * @param reportContainer * An Object of type LinkedHashMap<String, Object> containing the * processing elements (with time in milliseconds) in hierarchical * structure. * * @return An integer containing the sub total. */ @SuppressWarnings("unchecked") private int getTotal(LinkedHashMap<String, Object> reportContainer) { int total = 0; int diff = 0; int systotal = 0; int subLevelTotal = 0; Iterator<String> i = reportContainer.keySet().iterator(); while (i.hasNext()) { Object key = i.next(); if (reportContainer.get(key) instanceof LinkedHashMap) { subLevelTotal = getTotal((LinkedHashMap<String, Object>) (reportContainer.get(key))); total = total + subLevelTotal; } else { if (!key.equals("systotal")) { total = total + Integer.parseInt((String) (reportContainer.get(key))); } } } if (reportContainer.get("systotal") != null) { systotal = Integer.parseInt((String) (reportContainer.get("systotal"))); } diff = systotal - total; reportContainer.put("total", Integer.toString(total)); reportContainer.put("All others", Integer.toString(diff)); total += diff; return total; }
From source file:com.espertech.esper.regression.pattern.TestCronParameter.java
private void checkResults(String eventId) { log.debug(".checkResults Checking results for event " + eventId); String expressionText = patternStmt.getText(); LinkedHashMap<String, LinkedList<EventDescriptor>> allExpectedResults = testCase.getExpectedResults(); EventBean[] receivedResults = listener.getLastNewData(); // If nothing at all was expected for this event, make sure nothing was received if (!(allExpectedResults.containsKey(eventId))) { if ((receivedResults != null) && (receivedResults.length > 0)) { log.debug(".checkResults Incorrect result for expression : " + expressionText); log.debug(".checkResults Expected no results for event " + eventId + ", but received " + receivedResults.length + " events"); log.debug(".checkResults Received, have " + receivedResults.length + " entries"); printList(receivedResults);/*from ww w. j a v a2 s .c o m*/ TestCase.assertFalse(true); } } LinkedList<EventDescriptor> expectedResults = allExpectedResults.get(eventId); // Compare the result lists, not caring about the order of the elements if (!(compareLists(receivedResults, expectedResults))) { log.debug(".checkResults Incorrect result for expression : " + expressionText); log.debug(".checkResults Expected size=" + expectedResults.size() + " received size=" + (receivedResults == null ? 0 : receivedResults.length)); log.debug(".checkResults Expected, have " + expectedResults.size() + " entries"); printList(expectedResults); log.debug(".checkResults Received, have " + (receivedResults == null ? 0 : receivedResults.length) + " entries"); printList(receivedResults); TestCase.assertFalse(true); } }
From source file:com.textocat.textokit.eval.GoldStandardBasedEvaluation.java
private void evaluate(CAS goldCas, CAS sysCas) { FSIterator<AnnotationFS> goldAnnoIter = annotationExtractor.extract(goldCas); Set<AnnotationFS> goldProcessed = new HashSet<AnnotationFS>(); // system annotations that exactly match a gold one Set<AnnotationFS> sysMatched = newHashSet(); // matches/*from w w w . j a v a2 s .com*/ LinkedHashMap<AnnotationFS, MatchInfo> matchesMap = newLinkedHashMap(); while (goldAnnoIter.hasNext()) { AnnotationFS goldAnno = goldAnnoIter.next(); if (goldProcessed.contains(goldAnno)) { continue; } MatchInfo mi = new MatchInfo(); matchesMap.put(goldAnno, mi); Set<AnnotationFS> candidates = newLinkedHashSet(matchingStrategy.searchCandidates(goldAnno)); candidates.removeAll(sysMatched); AnnotationFS exactSys = matchingStrategy.searchExactMatch(goldAnno, candidates); if (exactSys != null) { // sanity check assert candidates.contains(exactSys); mi.exact = exactSys; sysMatched.add(exactSys); } mi.partialSet.addAll(candidates); goldProcessed.add(goldAnno); } // filter partials that match a next gold for (MatchInfo mi : matchesMap.values()) { mi.partialSet.removeAll(sysMatched); } // report for each gold anno for (AnnotationFS goldAnno : matchesMap.keySet()) { // assert order declared in EvaluationListener javadoc MatchInfo mi = matchesMap.get(goldAnno); boolean matchedExactly = mi.exact != null; if (matchedExactly) { evalCtx.reportExactMatch(goldAnno, mi.exact); } for (AnnotationFS partialSys : mi.partialSet) { evalCtx.reportPartialMatch(goldAnno, partialSys); } if (!matchedExactly) { evalCtx.reportMissing(goldAnno); } } // report spurious (false positives) FSIterator<AnnotationFS> sysAnnoIter = annotationExtractor.extract(sysCas); while (sysAnnoIter.hasNext()) { AnnotationFS sysAnno = sysAnnoIter.next(); if (!sysMatched.contains(sysAnno)) { evalCtx.reportSpurious(sysAnno); } } }
From source file:edumsg.edumsg_android_app.ProfileActivity.java
private void getTimeline() { final ObjectMapper mapper = new ObjectMapper(); Map<String, String> jsonParams = new HashMap<>(); jsonParams.put("queue", "USER"); jsonParams.put("method", "get_favorites"); jsonParams.put("session_id", sessionId); JSONObject jsonRequest = new JSONObject(jsonParams); JsonObjectRequest jsonObjectRequest3 = new JsonObjectRequest(Request.Method.POST, requestUrl, jsonRequest, new Response.Listener<JSONObject>() { @Override/*from w ww. j av a 2 s. co m*/ public void onResponse(JSONObject response) { try { Map<String, Object> responseMap = mapper.readValue(response.toString(), new TypeReference<HashMap<String, Object>>() { }); if (responseMap.get("code").equals("200")) { favorites = (ArrayList) responseMap.get("favorites"); Map<String, String> jsonParams = new HashMap<>(); jsonParams.put("queue", "USER"); jsonParams.put("method", "following"); jsonParams.put("session_id", sessionId); JSONObject jsonRequest = new JSONObject(jsonParams); JsonObjectRequest jsonObjectRequest2 = new JsonObjectRequest(Request.Method.POST, requestUrl, jsonRequest, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { Map<String, Object> responseMap = mapper.readValue( response.toString(), new TypeReference<HashMap<String, Object>>() { }); if (responseMap.get("code").equals("200")) { followings = (ArrayList) responseMap.get("following"); if (!owner) checkIfFollowed(followings); Map<String, String> jsonParams = new HashMap<>(); jsonParams.put("queue", "USER"); if (owner) { jsonParams.put("method", "user_tweets"); jsonParams.put("session_id", sessionId); } else { jsonParams.put("method", "user_tweets2"); jsonParams.put("username", profileUsername); } JSONObject jsonRequest = new JSONObject(jsonParams); JsonObjectRequest jsonObjectRequest4 = new JsonObjectRequest( Request.Method.POST, requestUrl, jsonRequest, new Response.Listener<JSONObject>() { @Override public void onResponse(JSONObject response) { try { Map<String, Object> responseMap = mapper .readValue(response.toString(), new TypeReference<HashMap<String, Object>>() { }); if (responseMap.get("code") .equals("200")) { if (!swipeRefreshLayout .isRefreshing()) loading.success(); ArrayList tweetsArray = (ArrayList) responseMap .get("tweets"); final Iterator iterator = tweetsArray .iterator(); while (iterator.hasNext()) { final Map<String, Object> tweetJsonObj = mapper .readValue(mapper .writeValueAsString( iterator.next()), new TypeReference<HashMap<String, Object>>() { }); final int tweetId = (int) tweetJsonObj .get("id"); final LinkedHashMap creatorMap = (LinkedHashMap) tweetJsonObj .get("creator"); final int creatorId = (int) creatorMap .get("id"); String tweetText = (String) tweetJsonObj .get("tweet_text"); String avatarUrl = (String) creatorMap .get("avatar_url"); User creator = new User(); creator.setId(creatorId); creator.setName( (String) creatorMap .get("name")); creator.setUsername( (String) creatorMap.get( "username")); creator.setAvatar_url( avatarUrl); final Tweet tweetObject = new Tweet( tweetId, creator, tweetText); if (avatarUrl != null && !avatarUrl .equals("")) { tweetObject.setImgUrl( avatarUrl); } Iterator favIter = favorites .iterator(); while (favIter.hasNext()) { Map<String, Object> tweetJsonObj2 = mapper .readValue(mapper .writeValueAsString( favIter.next()), new TypeReference<HashMap<String, Object>>() { }); if (tweetId == (int) tweetJsonObj2 .get("id")) { tweetObject .setIsFavorited( true); break; } } tweetObjects.add(tweetObject); } if (swipeRefreshLayout .isRefreshing()) { rvAdapter .notifyDataSetChanged(); swipeRefreshLayout .setRefreshing(false); } else { rvAdapter .notifyItemRangeInserted( 0, tweetObjects .size()); } } } catch (Exception e) { if (loading != null) loading.error(); else swipeRefreshLayout .setRefreshing(false); e.printStackTrace(); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (loading != null) loading.error(); else swipeRefreshLayout.setRefreshing(false); error.printStackTrace(); } }) { @Override public Map<String, String> getHeaders() throws AuthFailureError { HashMap<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json; charset=utf-8"); //headers.put("User-agent", System.getProperty("http.agent")); return headers; }; }; jsonObjectRequest4.setTag(TAG); jsonObjectRequest4.setRetryPolicy(new DefaultRetryPolicy( 10000, DefaultRetryPolicy.DEFAULT_MAX_RETRIES, DefaultRetryPolicy.DEFAULT_BACKOFF_MULT)); getVolleyRequestQueue().add(jsonObjectRequest4); } } catch (Exception e) { if (loading != null) loading.error(); else swipeRefreshLayout.setRefreshing(false); e.printStackTrace(); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (loading != null) loading.error(); else swipeRefreshLayout.setRefreshing(false); } }) { @Override public Map<String, String> getHeaders() throws AuthFailureError { HashMap<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json; charset=utf-8"); //headers.put("User-agent", System.getProperty("http.agent")); return headers; }; }; jsonObjectRequest2.setTag(TAG); jsonObjectRequest2.setRetryPolicy( new DefaultRetryPolicy(10000, DefaultRetryPolicy.DEFAULT_MAX_RETRIES, DefaultRetryPolicy.DEFAULT_BACKOFF_MULT)); getVolleyRequestQueue().add(jsonObjectRequest2); } } catch (Exception e) { if (loading != null) loading.error(); else swipeRefreshLayout.setRefreshing(false); e.printStackTrace(); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (loading != null) loading.error(); else swipeRefreshLayout.setRefreshing(false); error.printStackTrace(); } }) { @Override public Map<String, String> getHeaders() throws AuthFailureError { HashMap<String, String> headers = new HashMap<String, String>(); headers.put("Content-Type", "application/json; charset=utf-8"); //headers.put("User-agent", System.getProperty("http.agent")); return headers; }; }; jsonObjectRequest3.setTag(TAG); jsonObjectRequest3.setRetryPolicy(new DefaultRetryPolicy(10000, DefaultRetryPolicy.DEFAULT_MAX_RETRIES, DefaultRetryPolicy.DEFAULT_BACKOFF_MULT)); getVolleyRequestQueue().add(jsonObjectRequest3); }
From source file:com.opengamma.analytics.financial.provider.sensitivity.inflation.ParameterSensitivityInflationUnderlyingMatrixCalculator.java
/** * Computes the sensitivity with respect to the parameters from the point sensitivities to the continuously compounded rate. * @param sensitivity The point sensitivity. * @param inflation The inflation provider. Not null. * @param curvesSet The set of curves for which the sensitivity will be computed. Not null. * @return The sensitivity (as a ParameterSensitivity). ??The order of the sensitivity is by curve as provided by the curvesSet?? *//*from w ww . j ava2 s . co m*/ @Override public DoubleMatrix1D pointToParameterSensitivity(final InflationSensitivity sensitivity, final InflationProviderInterface inflation, final Set<String> curvesSet) { // TODO: The first part depends only of the multicurves and curvesSet, not the sensitivity. Should it be refactored and done only once? final Set<String> curveNamesSet = inflation.getAllNames(); // curvesSet; // final int nbCurve = curveNamesSet.size(); final String[] curveNamesArray = new String[nbCurve]; int loopname = 0; final LinkedHashMap<String, Integer> curveNum = new LinkedHashMap<>(); for (final String name : curveNamesSet) { // loop over all curves (by name) curveNamesArray[loopname] = name; curveNum.put(name, loopname++); } final int[] nbNewParameters = new int[nbCurve]; // Implementation note: nbNewParameters - number of new parameters in the curve, parameters not from an underlying curve which is another curve of the bundle. final int[][] indexOther = new int[nbCurve][]; // Implementation note: indexOther - the index of the underlying curves, if any. loopname = 0; for (final String name : curveNamesSet) { // loop over all curves (by name) nbNewParameters[loopname] = inflation.getNumberOfParameters(name); loopname++; } loopname = 0; for (final String name : curveNamesSet) { // loop over all curves (by name) final List<String> underlyingCurveNames = inflation.getUnderlyingCurvesNames(name); final IntArrayList indexOtherList = new IntArrayList(); for (final String u : underlyingCurveNames) { final Integer i = curveNum.get(u); if (i != null) { indexOtherList.add(i); nbNewParameters[loopname] -= nbNewParameters[i]; // Only one level: a curve used as an underlying can not have an underlying itself. } } indexOther[loopname] = indexOtherList.toIntArray(); loopname++; } final int nbSensiCurve = curvesSet.size(); // for (final String name : curveNamesSet) { // loop over all curves (by name) // if (curvesSet.contains(name)) { // nbSensiCurve++; // } // } final int[] nbNewParamSensiCurve = new int[nbSensiCurve]; // Implementation note: nbNewParamSensiCurve final int[][] indexOtherSensiCurve = new int[nbSensiCurve][]; // Implementation note: indexOtherSensiCurve - final int[] startCleanParameter = new int[nbSensiCurve]; // Implementation note: startCleanParameter - for each curve for which the sensitivity should be computed, the index in the total sensitivity vector at which that curve start. final int[][] startDirtyParameter = new int[nbSensiCurve][]; // Implementation note: startDirtyParameter - for each curve for which the sensitivity should be computed, the indexes of the underlying curves. int nbSensitivityCurve = 0; int nbCleanParameters = 0; int currentDirtyStart = 0; for (final String name : curvesSet) { // loop over all curves (by name) // if (curvesSet.contains(name)) { final int num = curveNum.get(name); final IntArrayList startDirtyParameterList = new IntArrayList(); final List<String> underlyingCurveNames = inflation.getUnderlyingCurvesNames(name); for (final String u : underlyingCurveNames) { final Integer i = curveNum.get(u); if (i != null) { startDirtyParameterList.add(currentDirtyStart); currentDirtyStart += nbNewParameters[i]; } } startDirtyParameterList.add(currentDirtyStart); currentDirtyStart += nbNewParameters[num]; startDirtyParameter[nbSensitivityCurve] = startDirtyParameterList.toIntArray(); nbNewParamSensiCurve[nbSensitivityCurve] = nbNewParameters[num]; indexOtherSensiCurve[nbSensitivityCurve] = indexOther[num]; startCleanParameter[nbSensitivityCurve] = nbCleanParameters; nbCleanParameters += nbNewParamSensiCurve[nbSensitivityCurve]; nbSensitivityCurve++; // } } // Implementation note: Compute the "dirty" sensitivity, i.e. the sensitivity where the underlying curves are not taken into account. double[] sensiDirty = new double[0]; final Map<String, List<DoublesPair>> sensitivityPriceIndex = sensitivity.getPriceCurveSensitivities(); for (final String name : curvesSet) { // loop over all curves (by name) // if (curvesSet.contains(name)) { final int nbParam = inflation.getNumberOfParameters(name); final double[] s1Name = new double[nbParam]; final double[] sDsc1Name = inflation.parameterInflationSensitivity(name, sensitivityPriceIndex.get(name)); // if ((sDsc1Name != null) && (sFwd1Name == null)) { // s1Name = sDsc1Name; // } // if ((sDsc1Name == null) && (sFwd1Name != null)) { // s1Name = sFwd1Name; // } // if ((sDsc1Name != null) && (sFwd1Name != null)) { for (int loopp = 0; loopp < nbParam; loopp++) { s1Name[loopp] = sDsc1Name[loopp]; } // } sensiDirty = ArrayUtils.addAll(sensiDirty, s1Name); // } } // Implementation note: "clean" the sensitivity, i.e. add the underlying curve parts. final double[] sensiClean = new double[nbCleanParameters]; for (int loopcurve = 0; loopcurve < nbSensiCurve; loopcurve++) { for (int loopo = 0; loopo < indexOtherSensiCurve[loopcurve].length; loopo++) { if (curvesSet.contains(curveNamesArray[indexOtherSensiCurve[loopcurve][loopo]])) { for (int loops = 0; loops < nbNewParamSensiCurve[indexOtherSensiCurve[loopcurve][loopo]]; loops++) { sensiClean[startCleanParameter[indexOtherSensiCurve[loopcurve][loopo]] + loops] += sensiDirty[startDirtyParameter[loopcurve][loopo] + loops]; } } } for (int loops = 0; loops < nbNewParamSensiCurve[loopcurve]; loops++) { sensiClean[startCleanParameter[loopcurve] + loops] += sensiDirty[startDirtyParameter[loopcurve][indexOtherSensiCurve[loopcurve].length] + loops]; } } return new DoubleMatrix1D(sensiClean); }