List of usage examples for java.util LinkedHashMap get
public V get(Object key)
From source file:jp.or.openid.eiwg.scim.servlet.Users.java
/** * POST?/*w w w . j ava 2s.c o m*/ * * @param request * @param response ? * @throws ServletException * @throws IOException */ protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // ? ServletContext context = getServletContext(); // ?? Operation op = new Operation(); boolean result = op.Authentication(context, request); if (!result) { // errorResponse(response, op.getErrorCode(), op.getErrorType(), op.getErrorMessage()); } else { // ? String targetId = request.getPathInfo(); String attributes = request.getParameter("attributes"); if (targetId != null && !targetId.isEmpty()) { // ?'/'??? targetId = targetId.substring(1); } if (targetId == null || targetId.isEmpty()) { // POST(JSON)? request.setCharacterEncoding("UTF-8"); String body = IOUtils.toString(request.getReader()); // ? LinkedHashMap<String, Object> resultObject = op.createUserInfo(context, request, attributes, body); if (resultObject != null) { // javaJSON?? ObjectMapper mapper = new ObjectMapper(); StringWriter writer = new StringWriter(); mapper.writeValue(writer, resultObject); // Location?URL? String location = request.getScheme() + "://" + request.getServerName(); int serverPort = request.getServerPort(); if (serverPort != 80 && serverPort != 443) { location += ":" + Integer.toString(serverPort); } location += request.getContextPath(); location += "/scim/Users/"; if (resultObject.get("id") != null) { location += resultObject.get("id").toString(); } // ?? response.setStatus(HttpServletResponse.SC_CREATED); response.setContentType("application/scim+json;charset=UTF-8"); response.setHeader("Location", location); PrintWriter out = response.getWriter(); out.println(writer.toString()); } else { // errorResponse(response, op.getErrorCode(), op.getErrorType(), op.getErrorMessage()); } } else { errorResponse(response, HttpServletResponse.SC_BAD_REQUEST, null, MessageConstants.ERROR_NOT_SUPPORT_OPERATION); } } }
From source file:com.opengamma.analytics.financial.curve.sensitivity.ParameterUnderlyingSensitivityCalculator.java
/** * Computes the sensitivity with respect to the parameters from the point sensitivities to the continuously compounded rate. * The sensitivity computed is only to the curves not in the fixedCurves set. When a curve depend on another underlying curve and the underlying curve is a fixed curve, * its sensitivity is not reported.//from w w w . j av a2 s .co m * @param sensitivity The point sensitivity. * @param fixedCurves The fixed curves names (for which the parameter sensitivity are not computed even if they are necessary for the instrument pricing). * The curve in the list may or may not be in the bundle. Not null. * @param bundle The curve bundle with all the curves with respect to which the sensitivity should be computed. Not null. * @return The sensitivity (as a DoubleMatrix1D). */ @Override public DoubleMatrix1D pointToParameterSensitivity(final InterestRateCurveSensitivity sensitivity, final Set<String> fixedCurves, final YieldCurveBundle bundle) { Set<String> curveNamesSet = bundle.getAllNames(); int nbCurve = curveNamesSet.size(); String[] curveNamesArray = new String[nbCurve]; int loopname = 0; LinkedHashMap<String, Integer> curveNum = new LinkedHashMap<String, Integer>(); for (final String name : curveNamesSet) { // loop over all curves (by name) curveNamesArray[loopname] = name; curveNum.put(name, loopname++); } int[] nbNewParameters = new int[nbCurve]; // Implementation note: nbNewParameters - number of new parameters in the curve, parameters not from an underlying curve which is another curve of the bundle. int[][] indexOther = new int[nbCurve][]; // Implementation note: indexOther - the index of the underlying curves, if any. loopname = 0; for (final String name : curveNamesSet) { // loop over all curves (by name) final YieldAndDiscountCurve curve = bundle.getCurve(name); List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames(); nbNewParameters[loopname] = curve.getNumberOfParameters(); List<Integer> indexOtherList = new ArrayList<Integer>(); for (String u : underlyingCurveNames) { Integer i = curveNum.get(u); if (i != null) { indexOtherList.add(i); nbNewParameters[loopname] -= nbNewParameters[i]; } } indexOther[loopname] = ArrayUtils.toPrimitive(indexOtherList.toArray(new Integer[0])); loopname++; } int nbSensiCurve = 0; for (final String name : bundle.getAllNames()) { // loop over all curves (by name) if (!fixedCurves.contains(name)) { nbSensiCurve++; } } int[] nbNewParamSensiCurve = new int[nbSensiCurve]; // Implementation note: nbNewParamSensiCurve int[][] indexOtherSensiCurve = new int[nbSensiCurve][]; // Implementation note: indexOtherSensiCurve - int[] startCleanParameter = new int[nbSensiCurve]; // Implementation note: startCleanParameter - for each curve for which the sensitivity should be computed, the index in the total sensitivity vector at which that curve start. int[][] startDirtyParameter = new int[nbSensiCurve][]; // Implementation note: startDirtyParameter - for each curve for which the sensitivity should be computed, the indexes of the underlying curves. int nbSensitivityCurve = 0; int nbCleanParameters = 0; int currentDirtyStart = 0; for (final String name : curveNamesSet) { // loop over all curves (by name) if (!fixedCurves.contains(name)) { int num = curveNum.get(name); final YieldAndDiscountCurve curve = bundle.getCurve(name); List<Integer> startDirtyParameterList = new ArrayList<Integer>(); List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames(); for (String u : underlyingCurveNames) { Integer i = curveNum.get(u); if (i != null) { startDirtyParameterList.add(currentDirtyStart); currentDirtyStart += nbNewParameters[i]; } } startDirtyParameterList.add(currentDirtyStart); currentDirtyStart += nbNewParameters[nbSensitivityCurve]; startDirtyParameter[nbSensitivityCurve] = ArrayUtils .toPrimitive(startDirtyParameterList.toArray(new Integer[0])); nbNewParamSensiCurve[nbSensitivityCurve] = nbNewParameters[num]; indexOtherSensiCurve[nbSensitivityCurve] = indexOther[num]; startCleanParameter[nbSensitivityCurve] = nbCleanParameters; nbCleanParameters += nbNewParamSensiCurve[nbSensitivityCurve]; nbSensitivityCurve++; } } final List<Double> sensiDirtyList = new ArrayList<Double>(); for (final String name : curveNamesSet) { // loop over all curves (by name) if (!fixedCurves.contains(name)) { final YieldAndDiscountCurve curve = bundle.getCurve(name); List<Double> oneCurveSensitivity = pointToParameterSensitivity( sensitivity.getSensitivities().get(name), curve); sensiDirtyList.addAll(oneCurveSensitivity); } } double[] sensiDirty = ArrayUtils.toPrimitive(sensiDirtyList.toArray(new Double[0])); double[] sensiClean = new double[nbCleanParameters]; for (int loopcurve = 0; loopcurve < nbSensiCurve; loopcurve++) { for (int loopo = 0; loopo < indexOtherSensiCurve[loopcurve].length; loopo++) { if (!fixedCurves.contains(curveNamesArray[indexOtherSensiCurve[loopcurve][loopo]])) { for (int loops = 0; loops < nbNewParamSensiCurve[indexOtherSensiCurve[loopcurve][loopo]]; loops++) { sensiClean[startCleanParameter[indexOtherSensiCurve[loopcurve][loopo]] + loops] += sensiDirty[startDirtyParameter[loopcurve][loopo] + loops]; } } } for (int loops = 0; loops < nbNewParamSensiCurve[loopcurve]; loops++) { sensiClean[startCleanParameter[loopcurve] + loops] += sensiDirty[startDirtyParameter[loopcurve][indexOtherSensiCurve[loopcurve].length] + loops]; } } return new DoubleMatrix1D(sensiClean); }
From source file:com.alibaba.wasp.plan.parser.druid.DruidDDLParser.java
/** * Process Create Table Statement and generate Execute Plan * //from ww w. ja va 2 s . com */ private void getCreateTablePlan(ParseContext context, WaspSqlCreateTableStatement waspSqlCreateTableStatement, MetaEventOperation metaEventOperation) throws IOException { /** * example String sql3 = "CREATE TABLE User {Required Int64 user_id; * Required String name; Optional String phone;} primary key(user_id),ENTITY * GROUP ROOT, Entity Group Key(user_id);" ; String sql4 = "CREATE TABLE * Photo { Required Int64 user_id columnfamily cf comment 'aaa'; Required * Int32 photo_id comment 'child primary key'; Required Int64 time; Required * String full_url; Optional String thumbnail_url; Repeated String tag; } * primary key(user_id, photo_id) IN TABLE user,ENTITY GROUP KEY(user_id) * references User;"; */ // Table Info SQLExprTableSource tableSource = waspSqlCreateTableStatement.getTableSource(); String tableName = parseFromClause(tableSource); // Check Table Name is legal. metaEventOperation.isLegalTableName(tableName); // Check if the table exists boolean tableNotExit = metaEventOperation.checkTableNotExists(tableName, true); if (!tableNotExit) { if (waspSqlCreateTableStatement.isIfNotExiists()) { context.setPlan(new NotingTodoPlan()); LOG.debug("table " + tableName + " exits , isIfNotExiists is true, ignore"); return; } else { throw new TableExistsException(tableName + " is already exists!"); } } // Table category. WaspSqlCreateTableStatement.TableCategory category = waspSqlCreateTableStatement.getCategory(); FTable.TableType tableType = FTable.TableType.CHILD; if (category != null && category == WaspSqlCreateTableStatement.TableCategory.ROOT) { tableType = FTable.TableType.ROOT; } // Primary Key. List<SQLExpr> primaryKeysSQLExpr = waspSqlCreateTableStatement.getPrimaryKeys(); // table columns. List<SQLTableElement> tableElementList = waspSqlCreateTableStatement.getTableElementList(); // columns info LinkedHashMap<String, Field> columns = new LinkedHashMap<String, Field>(); for (SQLTableElement element : tableElementList) { Field field = parse(element); columns.put(field.getName(), field); } // Check if columns are legal. metaEventOperation.areLegalTableColumns(null, columns.values()); checkFamilyLegal(columns.values(), metaEventOperation); // Primary keys check will be done in this following method LinkedHashMap<String, Field> primaryKeys = parse(primaryKeysSQLExpr, columns); long createTime = System.currentTimeMillis(); long lastAccessTime = createTime; String owner = "me"; FTable table = new FTable(null, tableName, tableType, owner, createTime, lastAccessTime, columns, primaryKeys, primaryKeys.entrySet().iterator().next().getValue()); SQLExpr entityGroupKeySQLExpr = waspSqlCreateTableStatement.getEntityGroupKey(); Field entityGroupKey = primaryKeys.get(parseName(entityGroupKeySQLExpr)); if (entityGroupKey == null) { throw new UnsupportedException(entityGroupKeySQLExpr + " is ForeignKey, but don't in primaryKeys."); } table.setEntityGroupKey(entityGroupKey); if (tableType == FTable.TableType.CHILD) { String parentName = parseFromClause(waspSqlCreateTableStatement.getInTableName()); table.setParentName(parentName); if (!parentName.equals(parseFromClause(waspSqlCreateTableStatement.getReferenceTable()))) { throw new UnsupportedException(" in table " + waspSqlCreateTableStatement.getInTableName() + " != references table " + waspSqlCreateTableStatement.getReferenceTable()); } // Check parent's EGK equals child's EGK. TableSchemaCacheReader reader = TableSchemaCacheReader.getInstance(configuration); FTable parentTable = reader.getSchema(parentName); if (parentTable == null) { parentTable = TableSchemaCacheReader.getService(reader.getConf()).getTable(tableName); } if (parentTable == null) { throw new TableNotFoundException("Not found parent table:" + parentName); } if (!parentTable.getEntityGroupKey().getName().equals(table.getEntityGroupKey().getName())) { throw new UnsupportedException( "Parent" + parentName + "'s egk doesn't equals Child" + tableName + "'s egk."); } // Check child's PKS contains parent's PKS. for (Field parentPrimaryKey : parentTable.getPrimaryKeys().values()) { boolean found = table.getPrimaryKeys().containsKey(parentPrimaryKey.getName()); if (!found) { throw new UnsupportedException("Child's pks must contains parent's pks."); } } } SQLPartitioningClause partitioning = waspSqlCreateTableStatement.getPartitioning(); byte[][] splitKeys = null; if (partitioning != null) { if (table.isRootTable()) { if (partitioning instanceof WaspSqlPartitionByKey) { WaspSqlPartitionByKey partitionKey = (WaspSqlPartitionByKey) partitioning; byte[] start = convert(null, partitionKey.getStart()); byte[] end = convert(null, partitionKey.getEnd()); int partitionCount = convertToInt(partitionKey.getPartitionCount()); splitKeys = Bytes.split(start, end, partitionCount - 3); } else { throw new UnsupportedException("Unsupported SQLPartitioningClause " + partitioning); } } else { throw new UnsupportedException("Partition by only supported for Root Table"); } } CreateTablePlan createTable = new CreateTablePlan(table, splitKeys); context.setPlan(createTable); LOG.debug("CreateTablePlan " + createTable.toString()); }
From source file:aldenjava.opticalmapping.data.mappingresult.OptMapResultNode.java
public boolean isSubRefInfoValid(LinkedHashMap<String, DataNode> optrefmap) { if (optrefmap.containsKey(mappedRegion.ref)) return isSubRefInfoValid(optrefmap.get(mappedRegion.ref)); else/*from w w w .j av a2 s . c o m*/ return false; }
From source file:com.opengamma.analytics.financial.curve.sensitivity.ParameterUnderlyingSensitivityBlockCalculator.java
public ParameterSensitivity pointToParameterSensitivity(final Currency ccy, final InterestRateCurveSensitivity sensitivity, final Set<String> fixedCurves, final YieldCurveBundle bundle) { Set<String> curveNamesSet = bundle.getAllNames(); int nbCurve = curveNamesSet.size(); String[] curveNamesArray = new String[nbCurve]; int loopname = 0; LinkedHashMap<String, Integer> curveNum = new LinkedHashMap<String, Integer>(); for (final String name : curveNamesSet) { // loop over all curves (by name) curveNamesArray[loopname] = name; curveNum.put(name, loopname++);/*from w ww . jav a2s . com*/ } int[] nbNewParameters = new int[nbCurve]; // Implementation note: nbNewParameters - number of new parameters in the curve, parameters not from an underlying curve which is another curve of the bundle. int[][] indexOther = new int[nbCurve][]; // Implementation note: indexOther - the index of the underlying curves, if any. loopname = 0; for (final String name : curveNamesSet) { // loop over all curves (by name) final YieldAndDiscountCurve curve = bundle.getCurve(name); List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames(); nbNewParameters[loopname] = curve.getNumberOfParameters(); List<Integer> indexOtherList = new ArrayList<Integer>(); for (String u : underlyingCurveNames) { Integer i = curveNum.get(u); if (i != null) { indexOtherList.add(i); nbNewParameters[loopname] -= nbNewParameters[i]; } } indexOther[loopname] = ArrayUtils.toPrimitive(indexOtherList.toArray(new Integer[0])); loopname++; } loopname = 0; for (final String name : bundle.getAllNames()) { // loop over all curves (by name) if (!fixedCurves.contains(name)) { loopname++; } } int nbSensitivityCurve = loopname; int[] nbNewParamSensiCurve = new int[nbSensitivityCurve]; // Implementation note: nbNewParamSensiCurve int[][] indexOtherSensiCurve = new int[nbSensitivityCurve][]; // Implementation note: indexOtherSensiCurve - // int[] startCleanParameter = new int[nbSensitivityCurve]; // Implementation note: startCleanParameter - for each curve for which the sensitivity should be computed, the index in the total sensitivity vector at which that curve start. int[][] startDirtyParameter = new int[nbSensitivityCurve][]; // Implementation note: startDirtyParameter - for each curve for which the sensitivity should be computed, the indexes of the underlying curves. int nbCleanParameters = 0; int currentDirtyStart = 0; loopname = 0; for (final String name : curveNamesSet) { // loop over all curves (by name) if (!fixedCurves.contains(name)) { int num = curveNum.get(name); final YieldAndDiscountCurve curve = bundle.getCurve(name); List<Integer> startDirtyParameterList = new ArrayList<Integer>(); List<String> underlyingCurveNames = curve.getUnderlyingCurvesNames(); for (String u : underlyingCurveNames) { Integer i = curveNum.get(u); if (i != null) { startDirtyParameterList.add(currentDirtyStart); currentDirtyStart += nbNewParameters[i]; } } startDirtyParameterList.add(currentDirtyStart); currentDirtyStart += nbNewParameters[loopname]; startDirtyParameter[loopname] = ArrayUtils .toPrimitive(startDirtyParameterList.toArray(new Integer[0])); nbNewParamSensiCurve[loopname] = nbNewParameters[num]; indexOtherSensiCurve[loopname] = indexOther[num]; // startCleanParameter[loopname] = nbCleanParameters; nbCleanParameters += nbNewParamSensiCurve[loopname]; loopname++; } } final List<Double> sensiDirtyList = new ArrayList<Double>(); for (final String name : curveNamesSet) { // loop over all curves (by name) if (!fixedCurves.contains(name)) { final YieldAndDiscountCurve curve = bundle.getCurve(name); Double[] oneCurveSensitivity = pointToParameterSensitivity(sensitivity.getSensitivities().get(name), curve); sensiDirtyList.addAll(Arrays.asList(oneCurveSensitivity)); } } double[] sensiDirty = ArrayUtils.toPrimitive(sensiDirtyList.toArray(new Double[0])); double[][] sensiClean = new double[nbSensitivityCurve][]; for (int loopcurve = 0; loopcurve < nbSensitivityCurve; loopcurve++) { sensiClean[loopcurve] = new double[nbNewParamSensiCurve[loopcurve]]; } for (int loopcurve = 0; loopcurve < nbSensitivityCurve; loopcurve++) { for (int loopo = 0; loopo < indexOtherSensiCurve[loopcurve].length; loopo++) { if (!fixedCurves.contains(curveNamesArray[indexOtherSensiCurve[loopcurve][loopo]])) { for (int loops = 0; loops < nbNewParamSensiCurve[indexOtherSensiCurve[loopcurve][loopo]]; loops++) { sensiClean[indexOtherSensiCurve[loopcurve][loopo]][loops] += sensiDirty[startDirtyParameter[loopcurve][loopo] + loops]; } } } for (int loops = 0; loops < nbNewParamSensiCurve[loopcurve]; loops++) { sensiClean[loopcurve][loops] += sensiDirty[startDirtyParameter[loopcurve][indexOtherSensiCurve[loopcurve].length] + loops]; } } final LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D> result = new LinkedHashMap<Pair<String, Currency>, DoubleMatrix1D>(); for (int loopcurve = 0; loopcurve < nbSensitivityCurve; loopcurve++) { result.put(new ObjectsPair<String, Currency>(curveNamesArray[loopcurve], ccy), new DoubleMatrix1D(sensiClean[loopcurve])); } return new ParameterSensitivity(result); }
From source file:jp.or.openid.eiwg.scim.servlet.Users.java
/** * PUT?//from ww w. j ava 2s. c o m * * @param request * @param response ? * @throws ServletException * @throws IOException */ protected void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // ? ServletContext context = getServletContext(); // ?? Operation op = new Operation(); boolean result = op.Authentication(context, request); if (!result) { // errorResponse(response, op.getErrorCode(), op.getErrorType(), op.getErrorMessage()); } else { // ? String targetId = request.getPathInfo(); String attributes = request.getParameter("attributes"); if (targetId != null && !targetId.isEmpty()) { // ?'/'??? targetId = targetId.substring(1); } if (targetId != null && !targetId.isEmpty()) { // PUT(JSON)? request.setCharacterEncoding("UTF-8"); String body = IOUtils.toString(request.getReader()); // LinkedHashMap<String, Object> resultObject = op.updateUserInfo(context, request, targetId, attributes, body); if (resultObject != null) { // javaJSON?? ObjectMapper mapper = new ObjectMapper(); StringWriter writer = new StringWriter(); mapper.writeValue(writer, resultObject); // Location?URL? String location = request.getScheme() + "://" + request.getServerName(); int serverPort = request.getServerPort(); if (serverPort != 80 && serverPort != 443) { location += ":" + Integer.toString(serverPort); } location += request.getContextPath(); location += "/scim/Users/"; if (resultObject.get("id") != null) { location += resultObject.get("id").toString(); } // ?? response.setStatus(HttpServletResponse.SC_OK); response.setContentType("application/scim+json;charset=UTF-8"); response.setHeader("Location", location); PrintWriter out = response.getWriter(); out.println(writer.toString()); } else { // errorResponse(response, op.getErrorCode(), op.getErrorType(), op.getErrorMessage()); } } else { errorResponse(response, HttpServletResponse.SC_BAD_REQUEST, null, MessageConstants.ERROR_NOT_SUPPORT_OPERATION); } } }
From source file:gate.util.reporting.DocTimeReporter.java
/** * Computes the sub totals at each processing level. * * @param reportContainer/* w w w. j a v a 2s . c o m*/ * An Object of type LinkedHashMap<String, Object> containing the * processing elements (with time in milliseconds) in hierarchical * structure. * @return An Object containing the LinkedHashMap with the element values * totaled. */ @SuppressWarnings("unchecked") private LinkedHashMap<String, String> doTotal(LinkedHashMap<String, Object> reportContainer) { LinkedHashMap<String, Object> myHash = reportContainer; Iterator<String> i = myHash.keySet().iterator(); while (i.hasNext()) { String key = i.next(); if (myHash.get(key) instanceof LinkedHashMap) { docContainer = doTotal((LinkedHashMap<String, Object>) (myHash.get(key))); } else { if (docContainer.get(key) == null) { docContainer.put(key, (String) myHash.get(key)); } else { // Do total if value already exists int val = Integer.parseInt(docContainer.get(key)) + Integer.parseInt((String) myHash.get(key)); docContainer.put(key, Integer.toString(val)); } } } return docContainer; }
From source file:com.espertech.esper.epl.spec.PatternStreamSpecRaw.java
private static MatchEventSpec analyzeMatchEvent(EvalFactoryNode relativeNode) { LinkedHashMap<String, Pair<EventType, String>> taggedEventTypes = new LinkedHashMap<String, Pair<EventType, String>>(); LinkedHashMap<String, Pair<EventType, String>> arrayEventTypes = new LinkedHashMap<String, Pair<EventType, String>>(); // Determine all the filter nodes used in the pattern EvalNodeAnalysisResult evalNodeAnalysisResult = EvalNodeUtil.recursiveAnalyzeChildNodes(relativeNode); // collect all filters underneath for (EvalFilterFactoryNode filterNode : evalNodeAnalysisResult.getFilterNodes()) { String optionalTag = filterNode.getEventAsName(); if (optionalTag != null) { taggedEventTypes.put(optionalTag, new Pair<EventType, String>(filterNode.getFilterSpec().getFilterForEventType(), filterNode.getFilterSpec().getFilterForEventTypeName())); }/*from ww w . j av a 2s .c om*/ } // collect those filters under a repeat since they are arrays Set<String> arrayTags = new HashSet<String>(); for (EvalMatchUntilFactoryNode matchUntilNode : evalNodeAnalysisResult.getRepeatNodes()) { EvalNodeAnalysisResult matchUntilAnalysisResult = EvalNodeUtil .recursiveAnalyzeChildNodes(matchUntilNode.getChildNodes().get(0)); for (EvalFilterFactoryNode filterNode : matchUntilAnalysisResult.getFilterNodes()) { String optionalTag = filterNode.getEventAsName(); if (optionalTag != null) { arrayTags.add(optionalTag); } } } // for each array tag change collection for (String arrayTag : arrayTags) { if (taggedEventTypes.get(arrayTag) != null) { arrayEventTypes.put(arrayTag, taggedEventTypes.get(arrayTag)); taggedEventTypes.remove(arrayTag); } } return new MatchEventSpec(taggedEventTypes, arrayEventTypes); }
From source file:org.peerfact.impl.service.aggregation.skyeye.visualization.SkyNetVisualization.java
public void updateDisplayedMetrics(long time, LinkedHashMap<String, MetricsAggregate> simulatorMetrics, LinkedHashMap<String, MetricsAggregate> rootMetrics/* * , double * nodeCounter */) { if (activated && displayedMetrics.size() > 0) { Iterator<String> nameIter = simulatorMetrics.keySet().iterator(); String name = null;//from w ww . j ava2 s .c o m while (nameIter.hasNext()) { name = nameIter.next(); if (displayedMetrics.containsKey(name)) { DeviationSet[] values = null; if (rootMetrics.size() > 0) { DeviationSet[] temp = { new DeviationSet(simulatorMetrics.get(name).getAverage()), new DeviationSet(rootMetrics.get(name).getAverage(), rootMetrics.get(name).getStandardDeviation()), new DeviationSet(rootMetrics.get(name).getMinimum()), new DeviationSet(rootMetrics.get(name).getMaximum()) }; values = temp; } else { DeviationSet[] temp = { new DeviationSet(simulatorMetrics.get(name).getAverage()), new DeviationSet(0), new DeviationSet(0), new DeviationSet(0), }; values = temp; } String[] metricNames = { "Real " + name, "Measured " + name, "Min_Measured" + name, "Max_Measured" + name }; MetricsPlot temp = displayedMetrics.remove(name); temp.updatePlot(name, new DataSet(VisualizationType.Metric, time / 1000, values, metricNames)); displayedMetrics.put(name, temp); updatePlotInWindow(time, name); } } validate(); repaint(); } }
From source file:com.amalto.workbench.utils.XSDAnnotationsStructure.java
public TreeMap<String, String> getWorkflows() { TreeMap<String, String> targetSystems = new TreeMap<String, String>(); LinkedHashMap<String, String> appInfos = getAppInfos(ICoreConstants.X_Workflow); Set<String> keys = appInfos.keySet(); for (Iterator<String> iter = keys.iterator(); iter.hasNext();) { String key = iter.next(); String v = appInfos.get(key); if (v == null || v.trim().length() == 0) { continue; }/* ww w .j a va 2 s.co m*/ targetSystems.put(key, appInfos.get(key)); } return targetSystems; }