List of usage examples for java.util SortedMap firstKey
K firstKey();
From source file:hudson.model.Job.java
/** * Returns the last build.// w w w .j av a 2 s . com * @see LazyBuildMixIn#getLastBuild */ @Exported @QuickSilver public RunT getLastBuild() { SortedMap<Integer, ? extends RunT> runs = _getRuns(); if (runs.isEmpty()) return null; return runs.get(runs.firstKey()); }
From source file:okuyama.imdst.util.DataDispatcher.java
/** * ConsitentHash??????.<br>//from ww w . j ava2s . c o m * ??????????????????keyNodeMap?<br> * "oldNodeCircle"????????.<br> * ??????????????Hash??????<br> * FullName???HashMap????.<br> * ???HashMap?"main"??????"sub"???????"third"?????Map?????.<br> * ???"tomain"??????"tosub"??????"tothird"????????.<br> * * @param keyNodeFullName ? "192.168.1.3:5555" * @param subKeyNodeFullName ? "192.168.2.3:5555" * @param thirdKeyNodeFullName ? "192.168.2.3:5555" * @return HashMap ? */ public static HashMap addNode4ConsistentHash(String keyNodeFullName, String subKeyNodeFullName, String thirdKeyNodeFullName) { if (oldCircle != null) return null; HashMap retMap = new HashMap(2); HashMap convertMap = new HashMap(); HashMap subConvertMap = new HashMap(); HashMap thirdConvertMap = new HashMap(); ArrayList keyNodeList = new ArrayList(); ArrayList subKeyNodeList = new ArrayList(); ArrayList thirdKeyNodeList = new ArrayList(); oldCircle = new TreeMap(); // ???? Set set = nodeCircle.keySet(); Iterator iterator = set.iterator(); // oldCircle? while (iterator.hasNext()) { Integer key = (Integer) iterator.next(); String nodeFullName = (String) nodeCircle.get(key); oldCircle.put(key, nodeFullName); } // ? convertMap = new HashMap(); for (int i = 0; i < virtualNodeSize; i++) { int targetHash = sha1Hash4Int(keyNodeFullName + "_" + i); int targetHashStart = 0; int targetHashEnd = targetHash; String nodeName = null; SortedMap headMap = nodeCircle.headMap(targetHash); SortedMap tailMap = nodeCircle.tailMap(targetHash); // ??????????????? // ?????1????????? // ???????????? // ????Node01,Node02,Node03???Node04????? // Node01?511112430???Node02?45676987654?? if (headMap.isEmpty()) { int hash = ((Integer) nodeCircle.lastKey()).intValue(); targetHashStart = hash + 1; nodeName = (String) nodeCircle.get(nodeCircle.firstKey()); } else { int hash = ((Integer) headMap.lastKey()).intValue(); targetHashStart = hash + 1; if (tailMap.isEmpty()) { nodeName = (String) nodeCircle.get(nodeCircle.firstKey()); } else { nodeName = (String) nodeCircle.get(tailMap.firstKey()); } } // ??????????? // _?? // Node01:5553,"6756-9876_12345-987654" // Node02:5553,"342-3456_156456-178755" if (convertMap.containsKey(nodeName)) { String work = (String) convertMap.get(nodeName); convertMap.put(nodeName, work + "_" + targetHashStart + "-" + targetHashEnd); // String[] subDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_sub"); if (subDataNodeInfo != null) { subConvertMap.put(subDataNodeInfo[2], work + "_" + targetHashStart + "-" + targetHashEnd); } // String[] thirdDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_third"); if (thirdDataNodeInfo != null) { thirdConvertMap.put(thirdDataNodeInfo[2], work + "_" + targetHashStart + "-" + targetHashEnd); } } else { convertMap.put(nodeName, targetHashStart + "-" + targetHashEnd); // String[] subDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_sub"); if (subDataNodeInfo != null) { subConvertMap.put(subDataNodeInfo[2], targetHashStart + "-" + targetHashEnd); } // String[] thirdDataNodeInfo = (String[]) keyNodeMap.get(nodeName + "_third"); if (thirdDataNodeInfo != null) { thirdConvertMap.put(thirdDataNodeInfo[2], targetHashStart + "-" + targetHashEnd); } } } // ???Map? retMap.put("tomain", keyNodeFullName); retMap.put("tosub", subKeyNodeFullName); retMap.put("tothird", thirdKeyNodeFullName); retMap.put("main", convertMap); retMap.put("sub", subConvertMap); retMap.put("third", thirdConvertMap); // ??? // ?? // [0][*]=Name // [1][*]=Port // [2][*]=Full // [3][*]=Name // [4][*]=Port // [5][*]=Full // [6][*]=Name // [7][*]=Port // [8][*]=Full String[][] allNodeDetailList = (String[][]) keyNodeMap.get("list"); String[][] newAllNodeDetailList = new String[9][allNodeDetailList.length + 1]; keyNodeList = (ArrayList) allNodeMap.get("main"); // allNodeDetailList?????? for (int allNodeDetailListIdx = 0; allNodeDetailListIdx < allNodeDetailList[0].length; allNodeDetailListIdx++) { newAllNodeDetailList[0][allNodeDetailListIdx] = allNodeDetailList[0][allNodeDetailListIdx]; newAllNodeDetailList[1][allNodeDetailListIdx] = allNodeDetailList[1][allNodeDetailListIdx]; newAllNodeDetailList[2][allNodeDetailListIdx] = allNodeDetailList[2][allNodeDetailListIdx]; newAllNodeDetailList[3][allNodeDetailListIdx] = allNodeDetailList[3][allNodeDetailListIdx]; newAllNodeDetailList[4][allNodeDetailListIdx] = allNodeDetailList[4][allNodeDetailListIdx]; newAllNodeDetailList[5][allNodeDetailListIdx] = allNodeDetailList[5][allNodeDetailListIdx]; newAllNodeDetailList[6][allNodeDetailListIdx] = allNodeDetailList[6][allNodeDetailListIdx]; newAllNodeDetailList[7][allNodeDetailListIdx] = allNodeDetailList[7][allNodeDetailListIdx]; newAllNodeDetailList[8][allNodeDetailListIdx] = allNodeDetailList[8][allNodeDetailListIdx]; } String keyNode = keyNodeFullName; String[] keyNodeDt = keyNode.split(":"); keyNodeList.add(keyNode); // ??allNodeDetailList? newAllNodeDetailList[2][allNodeDetailList.length] = keyNode; newAllNodeDetailList[0][allNodeDetailList.length] = keyNodeDt[0]; newAllNodeDetailList[1][allNodeDetailList.length] = keyNodeDt[1]; String[] mainNodeDt = { keyNodeDt[0], keyNodeDt[1], keyNode }; // keyNodeMap? keyNodeMap.put(keyNode, mainNodeDt); // ConsistentHash?? for (int i = 0; i < virtualNodeSize; i++) { // FullName???????????Hash???Circle? // KeyNode?? // ??keyNodeMap????? nodeCircle.put(new Integer(sha1Hash4Int(keyNode + "_" + i)), keyNode); } synchronized (syncObj) { allNodeMap.put("main", keyNodeList); } // SubNode? if (subKeyNodeFullName != null && !subKeyNodeFullName.equals("")) { String subKeyNode = subKeyNodeFullName; String[] subKeyNodeDt = subKeyNode.split(":"); subKeyNodeList = (ArrayList) allNodeMap.put("sub", subKeyNodeList); subKeyNodeList.add(subKeyNode); newAllNodeDetailList[5][allNodeDetailList.length] = subKeyNode; newAllNodeDetailList[3][allNodeDetailList.length] = subKeyNodeDt[0]; newAllNodeDetailList[4][allNodeDetailList.length] = subKeyNodeDt[1]; String[] subNodeDt = { subKeyNodeDt[0], subKeyNodeDt[1], subKeyNode }; keyNodeMap.put(newAllNodeDetailList[2][allNodeDetailList.length] + "_sub", subNodeDt); synchronized (syncObj) { allNodeMap.put("sub", subKeyNodeList); } } // ThirdNode? if (thirdKeyNodeFullName != null && !thirdKeyNodeFullName.equals("")) { String thirdKeyNode = thirdKeyNodeFullName; String[] thirdKeyNodeDt = thirdKeyNode.split(":"); thirdKeyNodeList = (ArrayList) allNodeMap.put("third", thirdKeyNodeList); thirdKeyNodeList.add(thirdKeyNode); newAllNodeDetailList[8][allNodeDetailList.length] = thirdKeyNode; newAllNodeDetailList[6][allNodeDetailList.length] = thirdKeyNodeDt[0]; newAllNodeDetailList[7][allNodeDetailList.length] = thirdKeyNodeDt[1]; String[] thirdNodeDt = { thirdKeyNodeDt[0], thirdKeyNodeDt[1], thirdKeyNode }; keyNodeMap.put(newAllNodeDetailList[2][allNodeDetailList.length] + "_third", thirdNodeDt); synchronized (syncObj) { allNodeMap.put("third", thirdKeyNodeList); } } // ???? keyNodeMap.put("list", newAllNodeDetailList); return retMap; }
From source file:cerrla.LocalCrossEntropyDistribution.java
/** * Generates a policy from the current distribution. * /* w ww.j av a 2 s . c o m*/ * @param existingSubGoals * A collection of all existing sub-goals in the parent policy * this policy is to be put into. * @return A newly generated policy from the current distribution. */ public ModularPolicy generatePolicy(Collection<ModularPolicy> existingSubGoals) { // If testing greedy policies if (Config.getInstance().getGeneratorFile() != null) { if (bestPolicy_ == null || testEpisode_ >= ProgramArgument.TEST_ITERATIONS.intValue()) { SortedMap<Integer, RelationalPolicy> greedyPolicies = policyGenerator_.getGreedyPolicyMap(); SortedMap<Integer, RelationalPolicy> nextKey = greedyPolicies.tailMap(currentEpisode_ + 1); if (ProgramArgument.TESTING.booleanValue()) { currentEpisode_ = greedyPolicies.lastKey(); bestPolicy_ = new ModularPolicy(greedyPolicies.get(currentEpisode_), this); testEpisode_ = 0; } else if (nextKey == null || nextKey.isEmpty()) { // End of testing. Exit. bestPolicyEpisode_ = ProgramArgument.TEST_ITERATIONS.intValue(); } else { // Next policy and next episode. currentEpisode_ = nextKey.firstKey(); bestPolicy_ = new ModularPolicy(greedyPolicies.get(currentEpisode_), this); testEpisode_ = 0; } } bestPolicy_.clearPolicyRewards(); return bestPolicy_; } if (frozen_ && state_ == AlgorithmState.BEST_POLICY) { bestPolicy_.clearPolicyRewards(); return bestPolicy_; } // Initialise undertested if (undertestedPolicies_ == null) undertestedPolicies_ = new LinkedList<ModularPolicy>(); // If there remains an undertested policy not already in the parent // policy, use that for (Iterator<ModularPolicy> iter = undertestedPolicies_.iterator(); iter.hasNext();) { ModularPolicy undertested = iter.next(); if (undertested.shouldRegenerate() || !isValidSample(undertested, false)) // If the element is fully tested, remove it. iter.remove(); else if (!existingSubGoals.contains(undertested)) { // If the parent policy doesn't already contain the undertested // policy, return it. undertested.clearChildren(); return undertested; } } // Otherwise generate a new policy RelationalPolicy newPol = policyGenerator_.generatePolicy(true, false); ModularPolicy newModPol = null; if (newPol instanceof ModularPolicy) newModPol = new ModularPolicy((ModularPolicy) newPol); else newModPol = new ModularPolicy(newPol, this); undertestedPolicies_.add(newModPol); return newModPol; }
From source file:io.fabric8.maven.plugin.mojo.internal.ImportMojo.java
private void chooseSshKeyPairs(Map<String, String> secretData, String host) throws MojoExecutionException { String homeDir = System.getProperty("user.home", "."); File sshDir = new File(homeDir, ".ssh"); SortedMap<String, String> keyPairs = new TreeMap<>(); if (sshDir.isDirectory() && sshDir.exists()) { File[] files = sshDir.listFiles(); if (files != null) { for (File file : files) { String publicName = file.getName(); if (file.isFile() && publicName.endsWith(".pub")) { String privateName = Strings.stripSuffix(publicName, ".pub"); if (new File(sshDir, privateName).isFile()) { keyPairs.put(privateName, publicName); }/* w ww .j a va2 s . c om*/ } } } } if (keyPairs.isEmpty()) { log.warn("No SSH key pairs could be found in %s to choose from!", sshDir); log.warn("You may want to clone the git repository over https:// instead to avoid ssh key pairs?"); } else { if (keyPairs.size() == 0) { String privateName = keyPairs.firstKey(); importSshKeys(secretData, sshDir, privateName, keyPairs.get(privateName)); } else { List<String> privateKeys = new ArrayList<>(keyPairs.keySet()); String privateKey = null; try { privateKey = prompter.prompt( "Which public / private key pair do you wish to use for SSH authentication with host: " + host, privateKeys); } catch (PrompterException e) { log.warn("Failed to get user input: %s", e); } if (Strings.isNotBlank(privateKey)) { String publicKey = keyPairs.get(privateKey); if (Strings.isNullOrBlank(publicKey)) { log.warn("Invalid answer: %s when available values are: %s", privateKey, privateKeys); } else { importSshKeys(secretData, sshDir, privateKey, publicKey); } } } } }
From source file:com.alibaba.cobar.client.CobarSqlMapClientTemplate.java
@Override public int delete(final String statementName, final Object parameterObject) throws DataAccessException { auditSqlIfNecessary(statementName, parameterObject); long startTimestamp = System.currentTimeMillis(); try {// w w w . j a v a 2s .c o m if (isPartitioningBehaviorEnabled()) { SortedMap<String, DataSource> dsMap = lookupDataSourcesByRouter(statementName, parameterObject); if (!MapUtils.isEmpty(dsMap)) { SqlMapClientCallback action = new SqlMapClientCallback() { public Object doInSqlMapClient(SqlMapExecutor executor) throws SQLException { return executor.delete(statementName, parameterObject); } }; if (dsMap.size() == 1) { DataSource dataSource = dsMap.get(dsMap.firstKey()); return (Integer) executeWith(dataSource, action); } else { List<Object> results = executeInConcurrency(action, dsMap); Integer rowAffacted = 0; for (Object item : results) { rowAffacted += (Integer) item; } return rowAffacted; } } } // end if for partitioning status checking return super.delete(statementName, parameterObject); } finally { if (isProfileLongTimeRunningSql()) { long interval = System.currentTimeMillis() - startTimestamp; if (interval > getLongTimeRunningSqlIntervalThreshold()) { logger.warn( "SQL Statement [{}] with parameter object [{}] ran out of the normal time range, it consumed [{}] milliseconds.", new Object[] { statementName, parameterObject, interval }); } } } }
From source file:org.omnaest.utils.table.TableTest.java
@Test public void testIndexOfArbitraryKeyExtractor() { Table<String> table = this.filledTable(100, 5); KeyExtractor<Integer, RowDataReader<String>> keyExtractor = new KeyExtractor<Integer, RowDataReader<String>>() { private static final long serialVersionUID = -4201644938610833630L; @Override// w ww . j av a 2 s . com public Integer extractKey(RowDataReader<String> rowDataReader) { String[] elements = rowDataReader.getElements(); String[] tokens = elements[1].split(":"); return Integer.valueOf(tokens[0]); } }; SortedMap<Integer, Set<Row<String>>> sortedMap = table.index().of(keyExtractor); { assertNotNull(sortedMap); assertEquals(table.rowSize(), sortedMap.size()); assertTrue(sortedMap.containsKey(0)); } table.removeRow(0); { assertFalse(sortedMap.containsKey(0)); assertTrue(sortedMap.containsKey(1)); assertFalse(sortedMap.containsKey(101)); table.setElement(0, 1, "101:88"); assertTrue(sortedMap.containsKey(101)); Set<Row<String>> rowSet = sortedMap.get(101); assertEquals(1, rowSet.size()); } { assertSame(sortedMap, table.index().of(keyExtractor)); } table.setRowElements(1, "0:0", "200:0"); { assertTrue(sortedMap.containsKey(200)); } { SortedMap<Integer, Set<Row<String>>> tailMap = sortedMap.tailMap(90); assertEquals(100 - 90 + 2, tailMap.size()); assertEquals(90, tailMap.firstKey().intValue()); assertEquals(200, tailMap.lastKey().intValue()); } { SortedMap<Integer, Set<Row<String>>> headMap = sortedMap.headMap(10); assertEquals(9 - 2, headMap.size()); assertEquals(3, headMap.firstKey().intValue()); assertEquals(9, headMap.lastKey().intValue()); } { table.clear(); assertTrue(sortedMap.isEmpty()); } }
From source file:org.mitre.ccv.canopy.CcvCanopyCluster.java
/** * Sets the thresholds 1 and 2 using MaxLike profile. * * Issues/Pittfalls:/*from ww w . ja v a 2 s . c o m*/ * <ol> * <ul>t2 might be to small and nothing is removed from the list * <ul>t1 might be to large and everything is added to a canopy * </ol> * @todo: figure out how to select threshold1 (not to big not to small) */ public double[] autoThreshold() throws Exception { LOG.info("autoThreshold: Generating distance distribution"); //SortedMap<Double, Integer> sortMap = new TreeMap<Double, Integer>(new ReverseDoubleComparator()); SortedMap<Double, Integer> sortMap = new TreeMap<Double, Integer>(); // generate all the pairwise distances final int size = completeMatrix.getMatrix().getColumnDimension(); for (int i = 0; i < size; ++i) { for (int j = i + 1; j < size; ++j) { // only calculate one triangle not full! Double d = this.cheapMetric.distance(i, j); //set.add(this.cheapMetric.distance(i, j)); if (sortMap.containsKey(d)) { sortMap.put(d, sortMap.get(d) + 1); } else { sortMap.put(d, 1); } } } /** * $gnuplot * > set nokey * > set xlabel "Pairwise distance" * > set ylabel "Number of samples" * > plot "output.txt" using 1:2 */ /* */ for (Iterator<Entry<Double, Integer>> i = sortMap.entrySet().iterator(); i.hasNext();) { Entry<Double, Integer> entry = i.next(); //System.out.printf("%f\t%d\n", entry.getKey(), entry.getValue()); } /* */ /** * How many bins per samples do we want? * Using the two end cases at lower and upper bounds. */ TH1D hist = new TH1D(completeMatrix.getMatrix().getColumnDimension() * 2, sortMap.firstKey(), sortMap.lastKey()); LOG.info(String.format("autoThreshold: Packing into histogram with %d bins (%f, %f)", hist.getBins().length, hist.getLower(), hist.getUpper())); hist.pack(sortMap); int[] bins = hist.getBins(); if (LOG.isDebugEnabled()) { if (hist.getNumberOverflows() != 0) { LOG.debug( String.format("autoThreshold: Have %d overflows in histogram!", hist.getNumberOverflows())); } if (hist.getNumberUnderflows() != 0) { LOG.debug(String.format("autoThreshold: Have %d underflows in histogram!", hist.getNumberUnderflows())); } } // print out histogram bins for (int i = 0; i < bins.length; i++) { //System.out.printf("%f\t%d\n", hist.getBinCenter(i), hist.getBinContent(i)); } TSpectrum spectrum = new TSpectrum(); // use default values (sigma = 1, threshold = 0.5 int numFound = spectrum.search(hist); LOG.info(String.format("autoThreshold: Found %d peaks", numFound)); if (numFound == 0) { LOG.fatal("autoThreshold: No peaks found in data!"); throw new Exception(); } double xpeaks[] = spectrum.getPostionX(); double[] rtn = new double[2]; // t1, t2 if (numFound == 1) { int bin = hist.findBin(xpeaks[0]); // is this in the top or bottom half? // @todo: must be better way than this hack if (bin > 0) { bin--; } rtn[0] = hist.getBinCenter(bin); // threshold1 is only peak rtn[1] = (hist.getLower() + rtn[0]) / 2; return rtn; } // more than one peak /** * Several possible options: * - select t1 first than find a good t2 * - select t2 first than find a good t1 * * make sure that there is enough samples below t2 and above t1 if (xpeaks[0] > xpeaks[1]) { // what about sigma value: how many are between these two rtn[0] = xpeaks[0]; // t1 rtn[1] = xpeaks[1]; //t2 } else { rtn[0] = xpeaks[1]; rtn[1] = xpeaks[0]; } */ // find the peak with the smallest this will be the basis for t2 double minPeakX = hist.getUpper(); int minPeakI = -1; for (int i = 0; i < numFound; i++) { final double x = xpeaks[i]; if (x < minPeakX) { minPeakX = x; minPeakI = i; } } //System.err.printf("minPeakX=%f (%d)\n", minPeakX, minPeakI); // find next peak above the smallest // should try using something about the average and standard deviation // of the distribution of entries in picking this double min2PeakX = hist.getUpper(); int min2PeakI = -1; for (int i = 0; i < numFound; i++) { final double x = xpeaks[i]; if (i != minPeakI && x < min2PeakX) { // should check that it isn't equal or within sigma min2PeakX = x; min2PeakI = i; } } //System.err.printf("min2PeakX=%f (%d)\n", min2PeakX, min2PeakI); /** if (minPeakI + 1 < min2PeakI - 1) { rtn[0] = hist.getBinCenter(min2PeakI - 1); // t1 rtn[1] = hist.getBinCenter(minPeakI + 1); // t2 } else { // really close not good - these should be the centers LOG.info("autoThreshold: t1 and t2 are possbily from adjacent bins!"); rtn[0] = min2PeakX; rtn[1] = minPeakX; } int t2bin = hist.findBin(minPeakX); if (t2bin - 1 > 0 ) { rtn[1] = hist.getBinCenter(t2bin - 1); // don't want the first bin? } else { rtn[1] = minPeakX; } int t1bin = hist.findBin(min2PeakX); if (t1bin + 1 < bins.length - 1) { // don't want the last bin? rtn[0] = hist.getBinCenter(t1bin + 1); } else { rtn[0] = min2PeakX; }*/ rtn[0] = min2PeakX; rtn[1] = minPeakX; /* double t1 = hist.getUpper(); double t2 = hist.getLower(); */ // print out what we found for (int p = 0; p < numFound; p++) { double xp = xpeaks[p]; int bin = hist.findBin(xp); int yp = hist.getBinContent(bin); // double yp System.err.printf("%d\t%f\t%d\n", bin, xp, yp); // if(yp- Math.sqrt(yp) < fline.eval(xp)) continue } return rtn; }
From source file:com.alibaba.cobar.client.CobarSqlMapClientTemplate.java
/** * we reorder the collection of entities in concurrency and commit them in * sequence, because we have to conform to the infrastructure of spring's * transaction management layer./* w ww.j av a 2 s . c o m*/ * * @param statementName * @param parameterObject * @return */ private Object batchInsertAfterReordering(final String statementName, final Object parameterObject) { Set<String> keys = new HashSet<String>(); keys.add(getDefaultDataSourceName()); keys.addAll(getCobarDataSourceService().getDataSources().keySet()); final CobarMRBase mrbase = new CobarMRBase(keys); ExecutorService executor = createCustomExecutorService(Runtime.getRuntime().availableProcessors(), "batchInsertAfterReordering"); try { final StringBuffer exceptionStaktrace = new StringBuffer(); Collection<?> paramCollection = ((BatchInsertTask) parameterObject).getEntities(); final CountDownLatch latch = new CountDownLatch(paramCollection.size()); Iterator<?> iter = paramCollection.iterator(); while (iter.hasNext()) { final Object entity = iter.next(); Runnable task = new Runnable() { public void run() { try { SortedMap<String, DataSource> dsMap = lookupDataSourcesByRouter(statementName, entity); if (MapUtils.isEmpty(dsMap)) { logger.info( "can't find routing rule for {} with parameter {}, so use default data source for it.", statementName, entity); mrbase.emit(getDefaultDataSourceName(), entity); } else { if (dsMap.size() > 1) { throw new IllegalArgumentException( "unexpected routing result, found more than 1 target data source for current entity:" + entity); } mrbase.emit(dsMap.firstKey(), entity); } } catch (Throwable t) { exceptionStaktrace.append(ExceptionUtils.getFullStackTrace(t)); } finally { latch.countDown(); } } }; executor.execute(task); } try { latch.await(); } catch (InterruptedException e) { throw new ConcurrencyFailureException( "unexpected interruption when re-arranging parameter collection into sub-collections ", e); } if (exceptionStaktrace.length() > 0) { throw new ConcurrencyFailureException( "unpected exception when re-arranging parameter collection, check previous log for details.\n" + exceptionStaktrace); } } finally { executor.shutdown(); } List<ConcurrentRequest> requests = new ArrayList<ConcurrentRequest>(); for (Map.Entry<String, List<Object>> entity : mrbase.getResources().entrySet()) { final List<Object> paramList = entity.getValue(); if (CollectionUtils.isEmpty(paramList)) { continue; } String identity = entity.getKey(); final DataSource dataSourceToUse = findDataSourceToUse(entity.getKey()); final SqlMapClientCallback callback = new SqlMapClientCallback() { public Object doInSqlMapClient(SqlMapExecutor executor) throws SQLException { return executor.insert(statementName, paramList); } }; ConcurrentRequest request = new ConcurrentRequest(); request.setDataSource(dataSourceToUse); request.setAction(callback); request.setExecutor(getDataSourceSpecificExecutors().get(identity)); requests.add(request); } return getConcurrentRequestProcessor().process(requests); }
From source file:org.opencms.workplace.commons.CmsPreferences.java
/** * Builds the html for the preferred editors select boxes of the editor settings.<p> * /* w ww . j a va2 s . c om*/ * @param htmlAttributes optional html attributes for the &lgt;select> tag * @return the html for the preferred editors select boxes */ public String buildSelectPreferredEditors(String htmlAttributes) { StringBuffer result = new StringBuffer(1024); HttpServletRequest request = getJsp().getRequest(); if (htmlAttributes != null) { htmlAttributes += " name=\"" + PARAM_PREFERREDEDITOR_PREFIX; } Map resourceEditors = OpenCms.getWorkplaceManager().getWorkplaceEditorManager().getConfigurableEditors(); if (resourceEditors != null) { // first: iterate over the resource types and consider order from configuration Iterator i = resourceEditors.keySet().iterator(); SortedMap rankResources = new TreeMap(); while (i.hasNext()) { String currentResourceType = (String) i.next(); CmsExplorerTypeSettings settings = OpenCms.getWorkplaceManager() .getExplorerTypeSetting(currentResourceType); rankResources.put(new Float(settings.getNewResourceOrder()), currentResourceType); } while (rankResources.size() > 0) { // get editor configuration with lowest order Float keyVal = (Float) rankResources.firstKey(); String currentResourceType = (String) rankResources.get(keyVal); SortedMap availableEditors = (TreeMap) resourceEditors.get(currentResourceType); if ((availableEditors != null) && (availableEditors.size() > 0)) { String preSelection = computeEditorPreselection(request, currentResourceType); List options = new ArrayList(availableEditors.size() + 1); List values = new ArrayList(availableEditors.size() + 1); options.add(key(Messages.GUI_PREF_EDITOR_BEST_0)); values.add(INPUT_DEFAULT); // second: iteration over the available editors for the resource type int selectedIndex = 0; int counter = 1; while (availableEditors.size() > 0) { Float key = (Float) availableEditors.lastKey(); CmsWorkplaceEditorConfiguration conf = (CmsWorkplaceEditorConfiguration) availableEditors .get(key); options.add(keyDefault(conf.getEditorLabel(), conf.getEditorLabel())); values.add(conf.getEditorUri()); if (conf.getEditorUri().equals(preSelection)) { selectedIndex = counter; } counter++; availableEditors.remove(key); } // create the table row for the current resource type result.append("<tr>\n\t<td style=\"white-space: nowrap;\">"); String localizedName = keyDefault("label.editor.preferred." + currentResourceType, ""); if (CmsStringUtil.isEmpty(localizedName)) { localizedName = CmsWorkplaceMessages.getResourceTypeName(this, currentResourceType); } result.append(localizedName); result.append("</td>\n\t<td>"); result.append(buildSelect(htmlAttributes + currentResourceType + "\"", options, values, selectedIndex)); result.append("</td>\n</tr>\n"); } rankResources.remove(keyVal); } } return result.toString(); }
From source file:org.alfresco.repo.domain.node.NodePropertyHelper.java
public Map<QName, Serializable> convertToPublicProperties( Map<NodePropertyKey, NodePropertyValue> propertyValues) { Map<QName, Serializable> propertyMap = new HashMap<QName, Serializable>(propertyValues.size(), 1.0F); // Shortcut/*from ww w .j a v a2 s. c o m*/ if (propertyValues.size() == 0) { return propertyMap; } // We need to process the properties in order SortedMap<NodePropertyKey, NodePropertyValue> sortedPropertyValues = new TreeMap<NodePropertyKey, NodePropertyValue>( propertyValues); // A working map. Ordering is important. SortedMap<NodePropertyKey, NodePropertyValue> scratch = new TreeMap<NodePropertyKey, NodePropertyValue>(); // Iterate (sorted) over the map entries and extract values with the same qname Long currentQNameId = Long.MIN_VALUE; Iterator<Map.Entry<NodePropertyKey, NodePropertyValue>> iterator = sortedPropertyValues.entrySet() .iterator(); while (true) { Long nextQNameId = null; NodePropertyKey nextPropertyKey = null; NodePropertyValue nextPropertyValue = null; // Record the next entry's values if (iterator.hasNext()) { Map.Entry<NodePropertyKey, NodePropertyValue> entry = iterator.next(); nextPropertyKey = entry.getKey(); nextPropertyValue = entry.getValue(); nextQNameId = nextPropertyKey.getQnameId(); } // If the QName is going to change, and we have some entries to process, then process them. if (scratch.size() > 0 && (nextQNameId == null || !nextQNameId.equals(currentQNameId))) { QName currentQName = qnameDAO.getQName(currentQNameId).getSecond(); PropertyDefinition currentPropertyDef = dictionaryService.getProperty(currentQName); // We have added something to the scratch properties but the qname has just changed Serializable collapsedValue = null; // We can shortcut if there is only one value if (scratch.size() == 1) { // There is no need to collapse list indexes collapsedValue = collapsePropertiesWithSameQNameAndListIndex(currentPropertyDef, scratch); } else { // There is more than one value so the list indexes need to be collapsed collapsedValue = collapsePropertiesWithSameQName(currentPropertyDef, scratch); } boolean forceCollection = false; // If the property is multi-valued then the output property must be a collection if (currentPropertyDef != null && currentPropertyDef.isMultiValued()) { forceCollection = true; } else if (scratch.size() == 1 && scratch.firstKey().getListIndex().intValue() > -1) { // This is to handle cases of collections where the property is d:any but not // declared as multiple. forceCollection = true; } if (forceCollection && collapsedValue != null && !(collapsedValue instanceof Collection<?>)) { // Can't use Collections.singletonList: ETHREEOH-1172 ArrayList<Serializable> collection = new ArrayList<Serializable>(1); collection.add(collapsedValue); collapsedValue = collection; } // Store the value propertyMap.put(currentQName, collapsedValue); // Reset scratch.clear(); } if (nextQNameId != null) { // Add to the current entries scratch.put(nextPropertyKey, nextPropertyValue); currentQNameId = nextQNameId; } else { // There is no next value to process break; } } // Done return propertyMap; }