List of usage examples for java.util SortedMap get
V get(Object key);
From source file:lisong_mechlab.view.graphs.DamageGraph.java
private TableXYDataset getSeries() { final Collection<Modifier> modifiers = loadout.getModifiers(); SortedMap<Weapon, List<Pair<Double, Double>>> data = new TreeMap<Weapon, List<Pair<Double, Double>>>( new Comparator<Weapon>() { @Override/*ww w . j av a 2 s. co m*/ public int compare(Weapon aO1, Weapon aO2) { int comp = Double.compare(aO2.getRangeMax(modifiers), aO1.getRangeMax(modifiers)); if (comp == 0) return aO1.compareTo(aO2); return comp; } }); Double[] ranges = WeaponRanges.getRanges(loadout); for (double range : ranges) { Set<Entry<Weapon, Double>> damageDistributio = maxSustainedDPS.getWeaponRatios(range).entrySet(); for (Map.Entry<Weapon, Double> entry : damageDistributio) { final Weapon weapon = entry.getKey(); final double ratio = entry.getValue(); final double dps = weapon.getStat("d/s", modifiers); final double rangeEff = weapon.getRangeEffectivity(range, modifiers); if (!data.containsKey(weapon)) { data.put(weapon, new ArrayList<Pair<Double, Double>>()); } data.get(weapon).add(new Pair<Double, Double>(range, dps * ratio * rangeEff)); } } DefaultTableXYDataset dataset = new DefaultTableXYDataset(); for (Map.Entry<Weapon, List<Pair<Double, Double>>> entry : data.entrySet()) { XYSeries series = new XYSeries(entry.getKey().getName(), true, false); for (Pair<Double, Double> pair : entry.getValue()) { series.add(pair.first, pair.second); } dataset.addSeries(series); } return dataset; }
From source file:com.aurel.track.fieldType.runtime.custom.select.CustomSelectSimpleRT.java
/** * Loads the datasource for the mass operation * used mainly by select fields to get /*w w w . j av a 2 s . co m*/ * the all possible options for a field (system or custom select) * It also sets a value if not yet selected * The value can be a List for simple select or a Map of lists for composite selects * @param massOperationContext * @param massOperationValue * @param parameterCode * @param personBean * @param locale * @return */ @Override public void loadBulkOperationDataSource(MassOperationContext massOperationContext, MassOperationValue massOperationValue, Integer parameterCode, TPersonBean personBean, Locale locale) { //TODO filter by configOptionRoleDAO combine the entries for each project //dataSource = configOptionRoleDAO.loadOptionsByConfigForRoles(configID, person, project); //if not restricted load all options for the list //if (dataSource == null) { //no role restrictions, load all options Integer fieldID = massOperationValue.getFieldID(); Map<Integer, Map<Integer, String>> fieldIDToListIDToLabels = massOperationContext .getFieldIDToListIDToLabels(); if (fieldIDToListIDToLabels != null) { Map<Integer, String> listIDToLabelMap = fieldIDToListIDToLabels.get(fieldID); if (listIDToLabelMap != null) { //initialize the value, datasoure and label maps Map<Integer, Object> dataSourceMap = new HashMap<Integer, Object>(); massOperationValue.setPossibleValues(dataSourceMap); Map<Integer, Object> valueMap = (Map<Integer, Object>) massOperationValue.getValue(); if (valueMap == null) { //no submitted value (by first rendering null anyway) valueMap = new HashMap<Integer, Object>(); massOperationValue.setValue(valueMap); } Map<Integer, String> valueLabelMap = new HashMap<Integer, String>(); massOperationValue.setValueLabelMap(valueLabelMap); for (Integer listID : listIDToLabelMap.keySet()) { valueLabelMap.put(listID, listIDToLabelMap.get(listID)); List<IBeanID> optionList = LocalizeUtil .localizeDropDownList(OptionBL.loadDataSourceByList(listID), locale); Object actualValue = valueMap.get(listID); if (parameterCode == null) { dataSourceMap.put(listID, optionList); //not a composite select //for multiple select there is no need to preselect some value Integer[] actualIntArrValue = (Integer[]) actualValue; valueMap.put(listID, getBulkSelectValue(massOperationContext, fieldID, parameterCode, actualIntArrValue, optionList)); } else { //first part of a composite part Map<Integer, List<IBeanID>> listDatasourceMap = new TreeMap<Integer, List<IBeanID>>(); dataSourceMap.put(listID, listDatasourceMap); listDatasourceMap.put(parameterCode, optionList); SortedMap<Integer, Integer[]> actualValueMap = (SortedMap<Integer, Integer[]>) valueMap .get(listID); Integer[] actualIntArrValue = null; if (actualValueMap == null) { //no submitted value (by first rendering null anyway) actualValueMap = new TreeMap<Integer, Integer[]>(); valueMap.put(listID, actualValueMap); } else { //submitted value: get to see whether it is still valid actualIntArrValue = actualValueMap.get(parameterCode); } actualValueMap.put(parameterCode, getBulkSelectValue(massOperationContext, fieldID, parameterCode, actualIntArrValue, optionList)); } } } } }
From source file:lisong_mechlab.view.graphs.SustainedDpsGraph.java
private TableXYDataset getSeries() { final Collection<Modifier> modifiers = loadout.getModifiers(); SortedMap<Weapon, List<Pair<Double, Double>>> data = new TreeMap<Weapon, List<Pair<Double, Double>>>( new Comparator<Weapon>() { @Override/*from w w w . j ava2 s .c o m*/ public int compare(Weapon aO1, Weapon aO2) { int comp = Double.compare(aO2.getRangeMax(modifiers), aO1.getRangeMax(modifiers)); if (comp == 0) return aO1.compareTo(aO2); return comp; } }); Double[] ranges = WeaponRanges.getRanges(loadout); for (double range : ranges) { Set<Entry<Weapon, Double>> damageDistributio = maxSustainedDPS.getWeaponRatios(range).entrySet(); for (Map.Entry<Weapon, Double> entry : damageDistributio) { final Weapon weapon = entry.getKey(); final double ratio = entry.getValue(); final double dps = weapon.getStat("d/s", modifiers); final double rangeEff = weapon.getRangeEffectivity(range, modifiers); if (!data.containsKey(weapon)) { data.put(weapon, new ArrayList<Pair<Double, Double>>()); } data.get(weapon).add(new Pair<Double, Double>(range, dps * ratio * rangeEff)); } } List<Weapon> orderedWeapons = new ArrayList<>(); DefaultTableXYDataset dataset = new DefaultTableXYDataset(); for (Map.Entry<Weapon, List<Pair<Double, Double>>> entry : data.entrySet()) { XYSeries series = new XYSeries(entry.getKey().getName(), true, false); for (Pair<Double, Double> pair : entry.getValue()) { series.add(pair.first, pair.second); } dataset.addSeries(series); orderedWeapons.add(entry.getKey()); } Collections.reverse(orderedWeapons); colours.updateColoursToMatch(orderedWeapons); return dataset; }
From source file:eu.annocultor.converter.Analyser.java
private void computeAndExportStatistics(SortedMap<String, Map<String, ValueCount>> statistics, File tmpDir) throws SAXException { Graph trg = new RdfGraph(null, task.getEnvironment(), "analyse", "", ""); Namespaces namespaces = new Namespaces(); /*/* w w w .j a va 2 s .co m*/ * Header */ try { // top how many trg.add(new Triple(Namespaces.ANNOCULTOR_REPORT + "Summary", new Property(Namespaces.ANNOCULTOR_REPORT + "topCount"), new LiteralValue(MAX_VALUES + ""), null)); } catch (Exception e) { throw new SAXException(e); } /* * Here we find top ten and form an RDF report */ for (String propertyName : statistics.keySet()) { StringBuffer message = new StringBuffer(propertyName + " has "); Map<String, ValueCount> values = statistics.get(propertyName); // find top ten int totalRecords = 0; List<ValueCount> topTen = new LinkedList<ValueCount>(); for (String value : values.keySet()) { ValueCount vc = values.get(value); topTen.add(vc); totalRecords += vc.count; } Collections.sort(topTen); // print String propertyUrl = Namespaces.ANNOCULTOR_REPORT + "__" + propertyName.replace('@', 'a').replaceAll(";", "/"); int totalValues = values.size(); message.append(totalValues + " values: "); int i = 0; boolean allUnique = false; try { for (Iterator<ValueCount> it = topTen.iterator(); it.hasNext() && i < MAX_VALUES;) { ValueCount count = it.next(); if (i == 0) { allUnique = (count.count == 1); message.append(allUnique ? " ALL UNIQUE \n" : "\n"); // RDF report on tag trg.add(new Triple(propertyUrl, Concepts.REPORTER.REPORT_NAME, new LiteralValue(propertyName), null)); trg.add(new Triple(propertyUrl, Concepts.REPORTER.REPORT_LABEL, new LiteralValue( Path.formatPath(new Path(propertyName.replace("*", "/")), namespaces)), null)); trg.add(new Triple(propertyUrl, Concepts.REPORTER.REPORT_TOTAL_VALUES, new LiteralValue("" + totalValues), null)); trg.add(new Triple(propertyUrl, Concepts.REPORTER.REPORT_ALL_UNIQUE, new LiteralValue("" + allUnique), null)); } message.append(count.value + (allUnique ? "" : (" (" + count.count + ", " + count.percent(totalRecords) + "%)")) + " \n"); // RDF report on topTen trg.add(new Triple( propertyUrl, Concepts.REPORTER.REPORT_VALUE, new LiteralValue(String.format("%07d", i) + "," + count.count + "," + count.percent(totalRecords) + "," + count.value), null)); i++; } } catch (Exception e) { throw new SAXException(e); } } try { trg.endRdf(); System.out.println("Statistic saved to " + trg.getFinalFile(1).getCanonicalPath()); // transform results Helper.xsl(trg.getFinalFile(1), new File(trg.getFinalFile(1).getCanonicalPath().replaceFirst("\\.rdf", ".html")), this.getClass().getResourceAsStream("/AnalyserReportRDF2HTML.xsl")); } catch (Exception e) { System.out.println(e.getMessage()); throw new SAXException(e); } }
From source file:com.cyberway.issue.crawler.admin.StatisticsTracker.java
protected void writeHostsReportTo(final PrintWriter writer) { // TODO: use CrawlHosts for all stats; only perform sorting on // manageable number of hosts SortedMap hd = getReverseSortedHostsDistribution(); // header//w ww.java2 s . c o m writer.print("[#urls] [#bytes] [host] [#robots] [#remaining]\n"); for (Iterator i = hd.keySet().iterator(); i.hasNext();) { // Key is 'host'. String key = (String) i.next(); CrawlHost host = controller.getServerCache().getHostFor(key); LongWrapper val = (LongWrapper) hd.get(key); writeReportLine(writer, ((val == null) ? "-" : val.longValue), getBytesPerHost(key), key, host.getSubstats().getRobotsDenials(), host.getSubstats().getRemaining()); } // StatisticsTracker doesn't know of zero-completion hosts; // so supplement report with those entries from host cache Closure logZeros = new Closure() { public void execute(Object obj) { CrawlHost host = (CrawlHost) obj; if (host.getSubstats().getRecordedFinishes() == 0) { writeReportLine(writer, host.getSubstats().getRecordedFinishes(), host.getSubstats().getTotalBytes(), host.getHostName(), host.getSubstats().getRobotsDenials(), host.getSubstats().getRemaining()); } } }; controller.getServerCache().forAllHostsDo(logZeros); }
From source file:com.googlecode.fascinator.common.StorageDataUtil.java
/** * Getlist method to get the values of key from the sourceMap * * @param sourceMap Map container// w w w . j a v a 2s .com * @param baseKey field to searchclass * @return list of value based on baseKey */ public Map<String, Object> getList(Map<String, Object> sourceMap, String baseKey) { SortedMap<String, Object> valueMap = new TreeMap<String, Object>(); Map<String, Object> data; if (baseKey == null) { log.error("NULL baseKey provided!"); return valueMap; } if (!baseKey.endsWith(".")) { baseKey = baseKey + "."; } if (sourceMap == null) { log.error("NULL sourceMap provided!"); return valueMap; } for (String key : sourceMap.keySet()) { if (key.startsWith(baseKey)) { String value = sourceMap.get(key).toString(); String field = baseKey; if (key.length() >= baseKey.length()) { field = key.substring(baseKey.length(), key.length()); } String index = field; if (field.indexOf(".") > 0) { index = field.substring(0, field.indexOf(".")); } if (valueMap.containsKey(index)) { data = (Map<String, Object>) valueMap.get(index); } else { data = new LinkedHashMap<String, Object>(); valueMap.put(index, data); } if (value.length() == 1) { value = String.valueOf(value.charAt(0)); } data.put(field.substring(field.indexOf(".") + 1, field.length()), value); } } return valueMap; }
From source file:org.solmix.datax.mybatis.MybatisDataService.java
protected DSResponse executeInConcurrency(SortedMap<String, DataSource> dsMap, DSRequest req, final OperationType type) { DSResponse res = new DSResponseImpl(req, Status.STATUS_SUCCESS); List<RoutingRequest> requests = new ArrayList<RoutingRequest>(); boolean usedTransaction = false; if (req.getDSCall() != null && this.canJoinTransaction(req)) { usedTransaction = true;/* w w w . j a v a2s .c o m*/ } for (String key : dsMap.keySet()) { DataSourceInfo dsi = dataSourceService.getDataSourceInfo(key); ExecutorService es = dsi.getExecutorService(); synchronized (es) { if (es == null) { es = createExecutorServiceIfNS(dsi); } } RoutingRequest rrequest = new RoutingRequest(); rrequest.setExecutor(es); rrequest.setRequest(req); rrequest.setDataSource(dsMap.get(key)); requests.add(rrequest); } if (CollectionUtils.isEmpty(requests)) { return res; } List<SqlSessionDepository> des = makeupSessionInConcurrency(requests, usedTransaction); // concurrent final CountDownLatch latch = new CountDownLatch(des.size()); List<Future<Object>> futures = new ArrayList<Future<Object>>(); try { for (SqlSessionDepository routing : des) { RoutingRequest request = routing.getRequest(); final DSRequest actualReq = request.getRequest(); final SqlSession session = routing.getSqlSession(); futures.add(request.getExecutor().submit(new Callable<Object>() { @Override public Object call() throws Exception { try { return executeWithSqlSession(session, actualReq, type); } finally { latch.countDown(); } } })); } try { latch.await(); } catch (InterruptedException e) { throw new ConcurrencyRequestException( "interrupted when processing data access request in concurrency", e); } } finally { if (!usedTransaction) { for (SqlSessionDepository routing : des) { SqlSession session = routing.getSqlSession(); session.close(); } } } prepareResult(futures, type, req, res); return res; }
From source file:org.jahia.services.templates.TemplatePackageRegistry.java
public void registerPackageVersion(JahiaTemplatesPackage pack) { if (!packagesWithVersionById.containsKey(pack.getId())) { packagesWithVersionById.put(pack.getId(), new TreeMap<ModuleVersion, JahiaTemplatesPackage>()); }/*from w w w .java2 s. c o m*/ SortedMap<ModuleVersion, JahiaTemplatesPackage> map = packagesWithVersionById.get(pack.getId()); if (!packagesWithVersionByName.containsKey(pack.getName())) { packagesWithVersionByName.put(pack.getName(), map); } JahiaTemplatesPackage jahiaTemplatesPackage = map.get(pack.getVersion()); if (jahiaTemplatesPackage == null || jahiaTemplatesPackage.getClass().equals(pack.getClass()) || !(pack.getClass().equals(JahiaTemplatesPackage.class))) { map.put(pack.getVersion(), pack); } }
From source file:org.codehaus.mojo.license.LicenseMap.java
protected SortedProperties loadUnsafeMapping(SortedMap<String, MavenProject> artifactCache, String encoding, File missingFile) throws IOException, ProjectBuildingException { SortedSet<MavenProject> unsafeDependencies = getUnsafeDependencies(); SortedProperties unsafeMappings = new SortedProperties(encoding); // there is some unsafe dependencies if (missingFile.exists()) { getLog().info("Load missing file " + missingFile); // load the missing file unsafeMappings.load(missingFile); }/*from ww w .j a v a 2s . co m*/ // get from the missing file, all unknown dependencies List<String> unknownDependenciesId = new ArrayList<String>(); // migrate unsafe mapping (before version 3.0 we do not have the type of // dependency in the missing file, now we must deal with it, so check it List<String> migrateId = new ArrayList<String>(); // SortedMap<String, MavenProject> artifactCache = AbstractAddThirdPartyMojo.getArtifactCache(); for (Object o : unsafeMappings.keySet()) { String id = (String) o; MavenProject project = artifactCache.get(id); if (project == null) { // try with the --jar type project = artifactCache.get(id + "--jar"); if (project == null) { // now we are sure this is a unknown dependency unknownDependenciesId.add(id); } else { // this dependency must be migrated migrateId.add(id); } } } if (!unknownDependenciesId.isEmpty()) { // there is some unknown dependencies in the missing file, remove them for (String id : unknownDependenciesId) { getLog().warn( "dependency [" + id + "] does not exists in project, remove it from the missing file."); unsafeMappings.remove(id); } unknownDependenciesId.clear(); } if (!migrateId.isEmpty()) { // there is some dependencies to migrate in the missing file for (String id : migrateId) { String newId = id + "--jar"; getLog().info("Migrate " + id + " to " + newId + " in the missing file."); Object value = unsafeMappings.get(id); unsafeMappings.remove(id); unsafeMappings.put(newId, value); } migrateId.clear(); } // push back loaded dependencies for (Object o : unsafeMappings.keySet()) { String id = (String) o; MavenProject project = artifactCache.get(id); if (project == null) { getLog().warn("dependency [" + id + "] does not exists in project."); continue; } String license = (String) unsafeMappings.get(id); if (StringUtils.isEmpty(license)) { // empty license means not fill, skip it continue; } // add license in map License l = new License(); l.setName(license.trim()); l.setUrl(license.trim()); // add license addLicense(project, Arrays.asList(l)); // remove unknown license unsafeDependencies.remove(project); } if (unsafeDependencies.isEmpty()) { // no more unknown license in map remove(getUnknownLicenseMessage()); } else { // add a "with no value license" for missing dependencies for (MavenProject project : unsafeDependencies) { String id = ArtifactHelper.getArtifactId(project.getArtifact()); unsafeMappings.setProperty(id, ""); } } return unsafeMappings; }
From source file:org.apache.hadoop.mapred.gridmix.test.system.GridmixJobVerification.java
/** * Verify the job submission order between the jobs in replay mode. * @param origSubmissionTime - sorted map of original jobs submission times. * @param simuSubmissionTime - sorted map of simulated jobs submission times. *///from www. j a v a2s . co m public void verifyJobSumissionTime(SortedMap<Long, String> origSubmissionTime, SortedMap<Long, String> simuSubmissionTime) { Assert.assertEquals( "Simulated job's submission time count has " + "not match with Original job's submission time count.", origSubmissionTime.size(), simuSubmissionTime.size()); for (int index = 0; index < origSubmissionTime.size(); index++) { String origAndSimuJobID = origSubmissionTime.get(index); String simuAndorigJobID = simuSubmissionTime.get(index); Assert.assertEquals( "Simulated jobs have not submitted in same " + "order as original jobs submitted in REPLAY mode.", origAndSimuJobID, simuAndorigJobID); } }