List of usage examples for java.util SortedMap keySet
Set<K> keySet();
From source file:com.aurel.track.report.dashboard.StatusOverTimeGraph.java
/** * Computes the hierarchical data for new issues /*from w ww . j a v a2s. c o m*/ * @return */ public static SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> calculateNewWorkItems( List<TWorkItemBean> workItemBeans, Date dateFrom, Date dateTo, int selectedTimeInterval) { SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> yearToPeriodToProjectIDToWorkItemNumbersMap = new TreeMap<Integer, SortedMap<Integer, Map<Integer, Integer>>>(); if (workItemBeans == null || workItemBeans.isEmpty()) { LOGGER.debug("No workItems in datasource"); return yearToPeriodToProjectIDToWorkItemNumbersMap; } SortedMap<Integer, SortedMap<Integer, List<TWorkItemBean>>> periodNewWorkItems = getNewWorkItemsMap( workItemBeans, selectedTimeInterval, dateFrom, dateTo); List entityList = new ArrayList(); //for new WorkItems we have a single entity (a single graphic), hardcoded with Integer(0) //Integer hardCodedentityID = Integer.valueOf(0); entityList.add(ENTITY_PLACEHOLDER); Iterator<Integer> yearIterator = periodNewWorkItems.keySet().iterator(); while (yearIterator.hasNext()) { Integer year = (Integer) yearIterator.next(); SortedMap<Integer, List<TWorkItemBean>> intervalToStatusChangeBeans = periodNewWorkItems.get(year); Iterator<Integer> periodIterator = intervalToStatusChangeBeans.keySet().iterator(); while (periodIterator.hasNext()) { Integer period = (Integer) periodIterator.next(); List<TWorkItemBean> workItemBeansForInterval = intervalToStatusChangeBeans.get(period); if (workItemBeansForInterval != null) { Iterator<TWorkItemBean> workItemBeansIterator = workItemBeansForInterval.iterator(); while (workItemBeansIterator.hasNext()) { TWorkItemBean workItemBean = (TWorkItemBean) workItemBeansIterator.next(); if (workItemBean != null) { setCount(yearToPeriodToProjectIDToWorkItemNumbersMap, year, period, ENTITY_PLACEHOLDER, 1); } } } } } addZerosForEmptyIntervals(dateFrom, dateTo, selectedTimeInterval, yearToPeriodToProjectIDToWorkItemNumbersMap, entityList); //addTimeSeries(timeSeriesCollection, yearToPeriodToProjectIDToWorkItemNumbersMap, null, selectedTimeInterval, accumulated); return yearToPeriodToProjectIDToWorkItemNumbersMap; }
From source file:rrlFramework.RRLExperiment.java
/** * Compiles the performance files together into a single file, detailing the * average, min and max performances./*from w ww . j av a 2s . c o m*/ * * @param runEnd * The last run. * @param byEpisode * If the performances are being combined by episode (in * intervals) or by regular CE interval. */ private long combineTempFiles(File performanceFile, int runEnd, long experimentStart) throws Exception { List<List<Float[]>> performances = new ArrayList<List<Float[]>>(); float min = Float.MAX_VALUE; int minRun = -1; float max = -Float.MAX_VALUE; int maxRun = -1; double[] episodeLengths = new double[runEnd]; double[] numSlots = new double[runEnd]; long averageRunTime = 0; File combinedPerfFile = performanceFile; if (Config.getInstance().getGeneratorFile() != null) { combinedPerfFile = new File(performanceFile.getAbsolutePath() + "greedy"); ProgramArgument.PERFORMANCE_EPISODE_GAP .setDoubleValue(ProgramArgument.PERFORMANCE_TESTING_SIZE.intValue() * ProgramArgument.POLICY_REPEATS.intValue()); } if (!combinedPerfFile.exists()) combinedPerfFile.createNewFile(); // For every performance file for (int i = 0; i < runEnd; i++) { File tempPerf = new File(Config.TEMP_FOLDER + "/" + performanceFile + i); if (!Performance.readRawPerformanceFile(tempPerf, true)) { System.err.println("Error reading performance file."); return 0; } List<Float[]> thisRunPerformances = new ArrayList<Float[]>(); performances.add(thisRunPerformances); // Run through the performances and place them in the matrix SortedMap<Integer, Float[]> runPerformances = Performance.getPerformanceArray(); averageRunTime += Performance.getRunTime(); Iterator<Integer> iter = runPerformances.keySet().iterator(); Integer current = iter.next(); Integer previous = null; int currentKeyframeEpisode = ProgramArgument.PERFORMANCE_EPISODE_GAP.intValue(); // Run through the performances, using linear interpolation to // get estimates of the performance at a given interval. do { // If the current segment is further along than the current // value, advance to the next value. while (currentKeyframeEpisode > current) { previous = current; if (iter.hasNext()) current = iter.next(); else break; } // If the keyframe isn't up to the first episode, just use // the current value Float[] episodePerformance = runPerformances.get(current); if (previous == null) { // Add to the previous value. thisRunPerformances.add(episodePerformance); } else { // Interpolate from the previous value to the current // one. Float[] interpolatedPerformance = new Float[episodePerformance.length]; if (previous == current) { interpolatedPerformance = episodePerformance; } else { Float[] prevPerformance = runPerformances.get(previous); for (int j = 0; j < episodePerformance.length; j++) { Float currPerf = episodePerformance[j]; Float prevPerf = prevPerformance[j]; // Adjust for null elites if (j == PerformanceDetails.ELITEMAX.ordinal() || j == PerformanceDetails.ELITEMEAN.ordinal()) { if (currPerf == null) currPerf = episodePerformance[PerformanceDetails.MEAN.ordinal()]; if (prevPerf == null) prevPerf = prevPerformance[PerformanceDetails.MEAN.ordinal()]; } if (currPerf == null || prevPerf == null) interpolatedPerformance[j] = null; else interpolatedPerformance[j] = (currPerf - prevPerf) * (1f * (currentKeyframeEpisode - previous) / (current - previous)) + prevPerf; } } // Add to the performances thisRunPerformances.add(interpolatedPerformance); } // To the next increment currentKeyframeEpisode += ProgramArgument.PERFORMANCE_EPISODE_GAP.intValue(); } while (currentKeyframeEpisode <= runPerformances.lastKey()); Float[] lastPerf = runPerformances.get(runPerformances.lastKey()); thisRunPerformances.add(lastPerf); System.out.println(runPerformances.get(runPerformances.lastKey())[PerformanceDetails.MEAN.ordinal()]); // Find min or max runs float runVal = runPerformances.get(runPerformances.lastKey())[PerformanceDetails.MEAN.ordinal()]; if (runVal < min) { min = runVal; minRun = i; } if (runVal > max) { max = runVal; maxRun = i; } episodeLengths[i] = runPerformances.lastKey(); } // Calculate the average and print out the stats FileWriter writer = new FileWriter(combinedPerfFile); BufferedWriter buf = new BufferedWriter(writer); Config.writeFileHeader(buf, Config.getInstance().getGoal()); buf.write( "Episode\tAverage\tSD\tMin\tMax\tElite-Average\tElite-SD\tNumSlots\tSlots-SD\tNumRules\tRules-SD\n"); boolean moreEpisodes = true; int index = 0; Mean mean = new Mean(); StandardDeviation sd = new StandardDeviation(); while (moreEpisodes) { moreEpisodes = false; // Compile the array of performances for the given index double[][] performanceArray = new double[PerformanceDetails.values().length][performances.size()]; double maxVal = 0; double minVal = 0; for (int run = 0; run < performances.size(); run++) { List<Float[]> runPerformanceList = performances.get(run); int thisIndex = Math.min(index, runPerformanceList.size() - 1); if (index < runPerformanceList.size() - 1) moreEpisodes = true; Float[] performanceDetails = runPerformanceList.get(thisIndex); for (int j = 0; j < performanceDetails.length; j++) { if (performanceDetails[j] != null) performanceArray[j][run] = performanceDetails[j]; } // Max and min if (run == minRun) minVal = performanceArray[PerformanceDetails.MEAN.ordinal()][run]; if (run == maxRun) maxVal = performanceArray[PerformanceDetails.MEAN.ordinal()][run]; } // Find the statistics int episodeNum = (index + 1) * ProgramArgument.PERFORMANCE_EPISODE_GAP.intValue(); buf.write(episodeNum + "\t" + mean.evaluate(performanceArray[PerformanceDetails.MEAN.ordinal()]) + "\t" + sd.evaluate(performanceArray[PerformanceDetails.MEAN.ordinal()]) + "\t" + minVal + "\t" + maxVal + "\t" + mean.evaluate(performanceArray[PerformanceDetails.ELITEMEAN.ordinal()]) + "\t" + sd.evaluate(performanceArray[PerformanceDetails.ELITEMEAN.ordinal()]) + "\t" + mean.evaluate(performanceArray[PerformanceDetails.NUMSLOTS.ordinal()]) + "\t" + sd.evaluate(performanceArray[PerformanceDetails.NUMSLOTS.ordinal()]) + "\t" + mean.evaluate(performanceArray[PerformanceDetails.NUMRULES.ordinal()]) + "\t" + sd.evaluate(performanceArray[PerformanceDetails.NUMRULES.ordinal()]) + "\n"); index++; } averageRunTime /= runEnd; buf.write("Average Run Time: " + toTimeFormat(averageRunTime) + "\n"); // Write the average episode length buf.write("\nAverage episode length: " + mean.evaluate(episodeLengths) + " +- " + sd.evaluate(episodeLengths) + "\n"); buf.write("\nAverage num slots: " + mean.evaluate(numSlots) + " +- " + sd.evaluate(numSlots) + "\n"); buf.close(); writer.close(); return averageRunTime; }
From source file:org.alfresco.extension.bulkimport.source.fs.DirectoryAnalyser.java
private final NavigableSet<FilesystemBulkImportItemVersion> constructImportItemVersions( final SortedMap<BigDecimal, Pair<File, File>> itemVersions) throws InterruptedException { // PRECONDITIONS if (itemVersions == null) throw new IllegalArgumentException("itemVersions cannot be null."); if (itemVersions.size() <= 0) throw new IllegalArgumentException("itemVersions cannot be empty."); // Body//from w w w. j ava2 s . c om final NavigableSet<FilesystemBulkImportItemVersion> result = new TreeSet<>(); for (final BigDecimal versionNumber : itemVersions.keySet()) { if (importStatus.isStopping() || Thread.currentThread().isInterrupted()) throw new InterruptedException( Thread.currentThread().getName() + " was interrupted. Terminating early."); final Pair<File, File> contentAndMetadataFiles = itemVersions.get(versionNumber); final FilesystemBulkImportItemVersion version = new FilesystemBulkImportItemVersion(serviceRegistry, configuredContentStore, metadataLoader, versionNumber, contentAndMetadataFiles.getFirst(), contentAndMetadataFiles.getSecond()); result.add(version); } return (result); }
From source file:playground.sergioo.facilitiesGenerator2012.WorkFacilitiesGeneration.java
private static Tuple<FittingCapacities, List<CentroidCluster<PointPerson>>> getFittingCapacitiesObject( SortedMap<Id<ActivityFacility>, ActivityFacility> mPAreas, Map<Id<ActivityFacility>, MPAreaData> dataMPAreas) throws BadStopException, IOException, InstantiationException, IllegalAccessException, ClassNotFoundException, SQLException, NoConnectionException { System.out.println("Process starts with " + SIZE + " clusters and " + NUM_ITERATIONS + " iterations."); List<CentroidCluster<PointPerson>> clusters = clusterWorkActivities(getWorkActivityTimes()); System.out.println("Clustering done!"); DataBaseAdmin dataBaseAux = new DataBaseAdmin(new File("./data/facilities/DataBaseAuxiliar.properties")); SortedMap<String, Coord> stopsBase = new TreeMap<String, Coord>(); ResultSet stopsResult = dataBaseAux.executeQuery("SELECT * FROM stops"); while (stopsResult.next()) stopsBase.put(stopsResult.getString(1), new CoordImpl(stopsResult.getDouble(3), stopsResult.getDouble(2))); stopsResult.close();/*from w w w . j a v a 2 s. co m*/ System.out.println("Stops done!"); Map<Id<TransitStopFacility>, Double> stops = new HashMap<Id<TransitStopFacility>, Double>(); Map<String, Double> quantitiesMap = calculateStopClustersQuantities(clusters, stops); MatrixND<Double> quantities = new Matrix2DImpl(new int[] { clusters.size(), stopsBase.size() }); for (int c = 0; c < quantities.getDimension(0); c++) { Iterator<String> stopsI = stopsBase.keySet().iterator(); for (int s = 0; s < quantities.getDimension(1); s++) { Double quantity = quantitiesMap.get(stopsI.next() + SEPARATOR + c); if (quantity == null) quantity = 0.0; quantities.setElement(new int[] { c, s }, quantity); } } System.out.println("Quantities done!"); Map<String, Double> workerAreas = new HashMap<String, Double>(); ResultSet typesResult = dataBaseAux.executeQuery("SELECT * FROM building_types"); while (typesResult.next()) workerAreas.put(typesResult.getString(1), typesResult.getDouble(2)); typesResult.close(); Tuple<Map<Tuple<Id<TransitStopFacility>, Id<ActivityFacility>>, Tuple<Boolean, Double>>, Network> weightsMap = calculateAreaStopWeights( stopsBase, stops, workerAreas, mPAreas, dataMPAreas); WeigthsNetworkWindow weigthsNetworkWindow = new WeigthsNetworkWindow("Weights", new NetworkPainter(weightsMap.getSecond()), weightsMap.getFirst(), mPAreas, stopsBase); weigthsNetworkWindow.setVisible(true); while (!weigthsNetworkWindow.isReadyToExit()) try { Thread.sleep(100); } catch (InterruptedException e) { e.printStackTrace(); } System.out.println("Facilities done!"); Matrix2DImpl weights = new Matrix2DImpl(new int[] { mPAreas.size(), stopsBase.size() }); Iterator<Id<ActivityFacility>> mPAreaI = mPAreas.keySet().iterator(); for (int f = 0; f < weights.getDimension(0); f++) { Id<ActivityFacility> facilityId = mPAreaI.next(); Iterator<String> stopsI = stopsBase.keySet().iterator(); for (int s = 0; s < weights.getDimension(1); s++) { Double weight = weightsMap.getFirst().get(new Tuple<Id<TransitStopFacility>, Id<ActivityFacility>>( Id.create(stopsI.next(), TransitStopFacility.class), facilityId)).getSecond(); if (weight == null) weight = 0.0; weights.setElement(f, s, weight); } } System.out.println("Weights done!"); Matrix2DImpl proportions = new Matrix2DImpl(new int[] { mPAreas.size(), clusters.size() }); Map<String, List<Double>> proportionsMap = calculateTypeBuildingOptionWeights(clusters); mPAreaI = mPAreas.keySet().iterator(); for (int f = 0; f < proportions.getDimension(0); f++) { Id<ActivityFacility> facilityId = mPAreaI.next(); for (int c = 0; c < proportions.getDimension(1); c++) proportions.setElement(f, c, proportionsMap.get(dataMPAreas.get(facilityId).getType()).get(c)); } System.out.println("Proportions done!"); MatrixND<Double> maxs = new Matrix1DImpl(new int[] { mPAreas.size() }, 60.0); dataBaseAux.close(); mPAreaI = mPAreas.keySet().iterator(); for (int f = 0; f < maxs.getDimension(0); f++) { MPAreaData dataMPArea = dataMPAreas.get(mPAreaI.next()); double max = (dataMPArea.getMaxArea() / workerAreas.get(dataMPArea.getType())) * dataMPArea.getModeShare(); maxs.setElement(new int[] { f }, max); } System.out.println("Max areas done!"); return new Tuple<FittingCapacities, List<CentroidCluster<PointPerson>>>( new FittingCapacities(new int[] { mPAreas.size(), clusters.size(), stopsBase.size() }, weights, quantities, proportions, maxs), clusters); }
From source file:com.aurel.track.report.dashboard.StatusOverTimeGraph.java
/** * Add the time series to the timeSeriesCollection * SortedMap at first and second level (year and period) * (Sorted because the accumulated should be calculated in the right order) *//*w w w. j a va 2s. c o m*/ private static String generateAxesValues( SortedMap<Integer, SortedMap<Integer, Map<Integer, Integer>>> yearToPeriodToEntityIDToWorkItemNumbersMap, Map entityMap, int selectedTimeInterval, boolean accumulated, Set<String> stateNameOrder) { Map timeSeriesMap = new HashMap(); Map accumulatedMap = new HashMap(); StringBuilder sb = new StringBuilder(); sb.append("["); Map<String, String> valuesMap; Iterator yearIterator = yearToPeriodToEntityIDToWorkItemNumbersMap.keySet().iterator(); while (yearIterator.hasNext()) { Integer year = (Integer) yearIterator.next(); SortedMap<Integer, Map<Integer, Integer>> intervalToStatusChangeBeans = yearToPeriodToEntityIDToWorkItemNumbersMap .get(year); Iterator periodIterator = intervalToStatusChangeBeans.keySet().iterator(); while (periodIterator.hasNext()) { Integer period = (Integer) periodIterator.next(); Map<Integer, Integer> entityIDToWorkItemNumbersMap = intervalToStatusChangeBeans.get(period); if (entityIDToWorkItemNumbersMap != null) { Iterator entityIDIterator = entityIDToWorkItemNumbersMap.keySet().iterator(); sb.append("{"); valuesMap = new HashMap<String, String>(); JSONUtility.appendStringValue(sb, "date", createRegularTimePeriodForJSON(period.intValue(), year.intValue(), selectedTimeInterval)); while (entityIDIterator.hasNext()) { Integer entityID = (Integer) entityIDIterator.next(); Integer numberOfStates = entityIDToWorkItemNumbersMap.get(entityID); if (numberOfStates != null) { String sb1 = (String) timeSeriesMap.get(entityID); Integer accumuletedValueForEntity = (Integer) accumulatedMap.get(entityID); String label = ""; if (sb1 == null) { ILabelBean iLabelBean = null; if (entityMap != null) { iLabelBean = (ILabelBean) entityMap.get(entityID); } if (iLabelBean != null) { label = iLabelBean.getLabel(); } String axeValue = new String(); timeSeriesMap.put(entityID, axeValue); accumulatedMap.put(entityID, Integer.valueOf(0)); accumuletedValueForEntity = (Integer) accumulatedMap.get(entityID); } Integer timeSeriesValue; if (accumulated) { accumulatedMap.put(entityID, Integer .valueOf(accumuletedValueForEntity.intValue() + numberOfStates.intValue())); timeSeriesValue = (Integer) accumulatedMap.get(entityID); } else { timeSeriesValue = numberOfStates; } String status = ""; ILabelBean iLabelBeanJSON = null; if (entityMap != null) { iLabelBeanJSON = (ILabelBean) entityMap.get(entityID); } if (iLabelBeanJSON != null) { status = iLabelBeanJSON.getLabel(); } if (status.equals("")) { status = "opened"; } // JSONUtility.appendStringValue(sb, status, timeSeriesValue.toString(), true); // sb.append(","); valuesMap.put(status, timeSeriesValue.toString()); timeSeriesMap.put(entityID, sb.toString()); } } if (entityMap != null) { for (String stateName : stateNameOrder) { if (valuesMap.get(stateName) != null) { JSONUtility.appendStringValue(sb, stateName, valuesMap.get(stateName), true); sb.append(","); } } } else { if (valuesMap.get("opened") != null) { JSONUtility.appendStringValue(sb, "opened", valuesMap.get("opened"), true); sb.append(","); } } sb.deleteCharAt(sb.length() - 1); sb.append("},"); } } } sb.deleteCharAt(sb.length() - 1); if (sb.length() != 0) { sb.append("]"); } else { sb.append("[{}]"); } return sb.toString(); }
From source file:org.mule.devkit.doclet.Doclava.java
public static Data makePackageHDF() { Data data = makeHDF();/*from w w w. ja v a 2s . c om*/ ClassInfo[] classes = Converter.rootClasses(); SortedMap<String, PackageInfo> sorted = new TreeMap<String, PackageInfo>(); for (ClassInfo cl : classes) { PackageInfo pkg = cl.containingPackage(); String name; if (pkg == null) { name = ""; } else { name = pkg.name(); } sorted.put(name, pkg); } int i = 0; for (String s : sorted.keySet()) { PackageInfo pkg = sorted.get(s); if (pkg.isHidden()) { continue; } Boolean allHidden = true; int pass = 0; ClassInfo[] classesToCheck = null; while (pass < 5) { switch (pass) { case 0: classesToCheck = pkg.ordinaryClasses(); break; case 1: classesToCheck = pkg.enums(); break; case 2: classesToCheck = pkg.errors(); break; case 3: classesToCheck = pkg.exceptions(); break; case 4: classesToCheck = pkg.getInterfaces(); break; default: System.err.println("Error reading package: " + pkg.name()); break; } for (ClassInfo cl : classesToCheck) { if (!cl.isHidden()) { allHidden = false; break; } } if (!allHidden) { break; } pass++; } if (allHidden) { continue; } data.setValue("reference", "1"); data.setValue("reference.apilevels", sinceTagger.hasVersions() ? "1" : "0"); data.setValue("docs.packages." + i + ".name", s); data.setValue("docs.packages." + i + ".link", pkg.htmlPage()); data.setValue("docs.packages." + i + ".since.key", SinceTagger.keyForName(pkg.getSince())); data.setValue("docs.packages." + i + ".since.name", pkg.getSince()); TagInfo.makeHDF(data, "docs.packages." + i + ".shortDescr", pkg.firstSentenceTags()); i++; } sinceTagger.writeVersionNames(data); return data; }
From source file:com.aurel.track.fieldType.bulkSetters.CompositeSelectBulkSetter.java
/** * The JSON configuration object for configuring the js control(s) containing the value * @param baseName: the name of the control: important by submit * @param value: the value to be set by rendering (first time or after a submit) * @param dataSource: defined only for lists (list for global lists, map for context dependent lists) * @param labelMap: defined only for context (project/issuType) dependent lists * @param disabled: whether the control is disabled * @param locale//from w w w. ja va2 s . c om * @return */ public String getJsonValuesForList(String baseName, String baseItemID, Object value, Object dataSource, Integer listID) { Integer fieldID = getFieldID(); StringBuilder stringBuilder = new StringBuilder(); CustomCompositeBaseRT compositeBaseRT = (CustomCompositeBaseRT) FieldTypeManager.getFieldTypeRT(fieldID); if (compositeBaseRT != null) { Map<Integer, SortedMap<Integer, List<ILabelBean>>> dataSourceMap = (Map<Integer, SortedMap<Integer, List<ILabelBean>>>) dataSource; if (dataSourceMap != null) { Map<Integer, SortedMap<Integer, Integer[]>> valueMap = (Map<Integer, SortedMap<Integer, Integer[]>>) value; stringBuilder.append("["); SortedMap<Integer, List<ILabelBean>> compositeListDataSource = dataSourceMap.get(listID); if (compositeListDataSource != null) { SortedMap<Integer, Integer[]> compositeListValue = null; if (valueMap != null) { compositeListValue = valueMap.get(listID); } for (Iterator<Integer> itrPart = compositeListDataSource.keySet().iterator(); itrPart .hasNext();) { Integer parameterCode = itrPart.next(); List<ILabelBean> partDataSource = compositeListDataSource.get(parameterCode); stringBuilder.append("{"); JSONUtility.appendStringValue(stringBuilder, JSONUtility.JSON_FIELDS.NAME, getNameWithMergedKey(baseName, fieldID, listID, parameterCode)); JSONUtility.appendStringValue(stringBuilder, JSONUtility.JSON_FIELDS.ITEMID, getItemIdWithMergedKey(baseItemID, fieldID, listID, parameterCode)); JSONUtility.appendILabelBeanList(stringBuilder, JSONUtility.JSON_FIELDS.DATA_SOURCE, partDataSource); Integer[] listValues = null; Integer listValue = null; if (compositeListValue != null) { listValues = compositeListValue.get(parameterCode); if (listValues != null && listValues.length > 0) { listValue = listValues[0]; } } if (listValue == null) { if (partDataSource != null && !partDataSource.isEmpty()) { listValue = partDataSource.get(0).getObjectID(); } } if (listValue != null) { JSONUtility.appendIntegerValue(stringBuilder, JSONUtility.JSON_FIELDS.VALUE, listValue, true); } stringBuilder.append("}"); if (itrPart.hasNext()) { stringBuilder.append(","); } } } stringBuilder.append("]"); } } return stringBuilder.toString(); }
From source file:org.kuali.kra.budget.external.budget.impl.BudgetAdjustmentClientBase.java
/** * This method sets the personnel salary accounting line. * @param accountingLines /*w w w . j a va2 s.c om*/ * @return * @throws Exception */ protected boolean setPersonnelSalaryAccountingLines(AwardBudgetDocument awardBudgetDocument, Map<String, ScaleTwoDecimal> accountingLines) throws Exception { Budget currentBudget = awardBudgetDocument.getBudget(); AwardBudgetExt previousBudget = getPrevBudget(awardBudgetDocument); boolean complete = true; SortedMap<String, ScaleTwoDecimal> netCost = getBudgetAdjustmentServiceHelper() .getPersonnelSalaryCost(currentBudget, previousBudget); for (String name : netCost.keySet()) { String financialObjectCode = getFinancialObjectCode(name); if (ObjectUtils.isNull(financialObjectCode)) { complete &= false; } else { if (!accountingLines.containsKey(financialObjectCode)) { accountingLines.put(financialObjectCode, netCost.get(name)); } else { accountingLines.put(financialObjectCode, accountingLines.get(financialObjectCode).add(netCost.get(name))); } } } return complete; }
From source file:viper.api.time.TimeEncodedList.java
/** * {@inheritDoc}/* ww w . j av a 2 s .com*/ */ public Comparable firstAfter(Comparable c) { SortedMap tail = values.tailMap(c); if (!tail.isEmpty()) { Iterator iter = tail.keySet().iterator(); Comparable a = (Comparable) iter.next(); if (a.compareTo(c) == 0) { if (iter.hasNext()) { a = (Comparable) iter.next(); } else { return null; } } return c; } return null; }
From source file:com.aurel.track.fieldType.runtime.matchers.converter.CompositSelectMatcherConverter.java
/** * Convert the object value to xml string for save * @param value/*www . ja va 2s. com*/ * @param matcherRelation * @return */ @Override public String toXMLString(Object value, Integer matcherRelation) { if (value == null || matcherRelation == null) { return null; } switch (matcherRelation.intValue()) { case MatchRelations.EQUAL: case MatchRelations.NOT_EQUAL: case MatchRelations.PARTIAL_MATCH: case MatchRelations.PARTIAL_NOTMATCH: SortedMap<Integer, Integer[]> actualValuesMap = null; try { actualValuesMap = (SortedMap<Integer, Integer[]>) value; } catch (Exception e) { LOGGER.warn("Converting the " + value + " to SortedMap<Integer, Integer[]> for display string failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } if (actualValuesMap != null) { StringBuffer stringBuffer = new StringBuffer(); Iterator<Integer> iterator = actualValuesMap.keySet().iterator(); while (iterator.hasNext()) { Integer partNo = iterator.next(); Integer[] partValueArr = null; try { partValueArr = actualValuesMap.get(partNo); } catch (Exception e) { LOGGER.warn("Converting the part " + partNo + " to Integer[] for XML string string failed with " + e.getMessage()); LOGGER.debug(ExceptionUtils.getStackTrace(e)); } String partValue = ""; if (partValueArr != null && partValueArr.length > 0) { //partValue is probably an integer array //if there is a possibility that the composite contains also other //datatypes for example date which should be formatted then //we would need to extend the API with further method parameters partValue = partValueArr[0].toString(); } stringBuffer.append(partValue); if (iterator.hasNext()) { stringBuffer.append(PART_SPLITTER_STRING); } } return stringBuffer.toString().trim(); } } return null; }