List of usage examples for java.util LinkedHashMap size
int size();
From source file:com.opengamma.analytics.financial.interestrate.MultipleYieldCurveFinderDataBundle.java
public MultipleYieldCurveFinderDataBundle(final List<InstrumentDerivative> derivatives, final double[] marketValues, final YieldCurveBundle knownCurves, final LinkedHashMap<String, double[]> unknownCurveNodePoints, final LinkedHashMap<String, Interpolator1D> unknownCurveInterpolators, final boolean useFiniteDifferenceByDefault, final FXMatrix fxMatrix) { ArgumentChecker.notNull(derivatives, "derivatives"); ArgumentChecker.noNulls(derivatives, "derivatives"); ArgumentChecker.notNull(marketValues, "market values null"); ArgumentChecker.notNull(unknownCurveNodePoints, "unknown curve node points"); ArgumentChecker.notNull(unknownCurveInterpolators, "unknown curve interpolators"); ArgumentChecker.notEmpty(unknownCurveNodePoints, "unknown curve node points"); ArgumentChecker.notEmpty(unknownCurveInterpolators, "unknown curve interpolators"); ArgumentChecker.isTrue(derivatives.size() == marketValues.length, "marketValues wrong length; must be one par rate per derivative (have {} values for {} derivatives", marketValues.length, derivatives.size()); ArgumentChecker.notNull(fxMatrix, "FX matrix"); if (knownCurves != null) { for (final String name : knownCurves.getAllNames()) { if (unknownCurveInterpolators.containsKey(name)) { throw new IllegalArgumentException("Curve name in known set matches one to be solved for"); }/* w w w . ja va 2s. co m*/ } _knownCurves = knownCurves; } else { _knownCurves = null; } _derivatives = derivatives; _marketValues = marketValues; if (unknownCurveNodePoints.size() != unknownCurveInterpolators.size()) { throw new IllegalArgumentException("Number of unknown curves not the same as curve interpolators"); } final Iterator<Entry<String, double[]>> nodePointsIterator = unknownCurveNodePoints.entrySet().iterator(); final Iterator<Entry<String, Interpolator1D>> unknownCurvesIterator = unknownCurveInterpolators.entrySet() .iterator(); _names = new ArrayList<>(); while (nodePointsIterator.hasNext()) { final Entry<String, double[]> entry1 = nodePointsIterator.next(); final Entry<String, Interpolator1D> entry2 = unknownCurvesIterator.next(); final String name1 = entry1.getKey(); if (!name1.equals(entry2.getKey())) { throw new IllegalArgumentException("Names must be the same"); } ArgumentChecker.notNull(entry1.getValue(), "curve node points for " + name1); ArgumentChecker.notNull(entry2.getValue(), "interpolator for " + name1); _names.add(name1); } int nNodes = 0; for (final double[] nodes : unknownCurveNodePoints.values()) { nNodes += nodes.length; } if (nNodes > derivatives.size()) { throw new IllegalArgumentException("Total number of nodes (" + nNodes + ") is greater than the number of instruments (" + derivatives.size() + ")"); } _totalNodes = nNodes; _unknownCurveNodePoints = unknownCurveNodePoints; _unknownCurveInterpolators = unknownCurveInterpolators; _useFiniteDifferenceByDefault = useFiniteDifferenceByDefault; _fxMatrix = fxMatrix; }
From source file:gov.llnl.lc.infiniband.opensm.plugin.data.OSM_FabricDeltaAnalyzer.java
public String getNodeRateUtilizationSummary(IB_Guid guid) { String NodeHeader = "%-18s t1: %s t0: %s # links: %2d max rate: %s (%s)"; StringBuffer sbuff = new StringBuffer(); IB_Vertex v = getIB_Vertex(guid);/* w w w . j av a 2 s . c o m*/ if (v == null) { return "Node (vertex) unavailable during this time period"; } LinkedHashMap<String, IB_Edge> eMap = v.getEdgeMap(); int numPorts = v.getNumPorts(); int numLinks = eMap.size(); boolean initialNode = true; boolean initialLink = true; // Look inside the PortRates for a port that matches this guid and PN // loop through the ports (in order) and try to find a matching link to display for (int portNum = 1; portNum <= numPorts; portNum++) { String key = PFM_PortChange.getPFM_PortChangeKey(guid, portNum); PFM_PortRate pr = PortRates.get(key); if (pr != null) { if (initialNode) { initialNode = false; PFM_Port port1 = pr.getPortChange().getPort1(); PFM_Port port2 = pr.getPortChange().getPort2(); sbuff.append(String.format(NodeHeader, v.getName(), port1.getCounterTimeStamp().toString(), port2.getCounterTimeStamp().toString(), numLinks, getTheoreticalMaxRate().getRateName(), this.getTheoreticalMaxRateString(PortCounterUnits.MEGABYTES)) + "\n\n"); } sbuff.append(getNodeRateUtilizationLine(pr, portNum, initialLink)); initialLink = false; } else { // its okay to have ports that don't belong to links, but we may want to keep track // TODO - dangling port list (ports without links) //logger.info("Link not found for that port number"); } } if (initialNode || initialLink) sbuff.append(" please be patient, rates not yet available\n"); return sbuff.toString(); }
From source file:me.piebridge.bible.Bible.java
private boolean addSuggest(LinkedHashMap<String, String> osiss, String value, String osis, int limit) { if (!osiss.values().contains(osis)) { String text = get(TYPE.HUMAN, bible.getPosition(TYPE.OSIS, osis)); Log.d(TAG, "add suggest, text=" + text + ", data=" + osis); osiss.put(text, osis);//from w ww . ja v a 2 s. c o m } if (limit != -1 && osiss.size() >= limit) { Log.d(TAG, "arrive limit " + limit); return true; } return false; }
From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order. * /* w w w . j a v a2 s.c o m*/ * @param ef * @param bms * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly. */ private static LinkedHashMap<FactorValue, List<BioMaterial>> chunkOnFactor(ExperimentalFactor ef, List<BioMaterial> bms) { if (bms == null) { return null; } LinkedHashMap<FactorValue, List<BioMaterial>> chunks = new LinkedHashMap<FactorValue, List<BioMaterial>>(); /* * Get the factor values in the order we have things right now */ for (BioMaterial bm : bms) { for (FactorValue fv : bm.getFactorValues()) { if (!ef.getFactorValues().contains(fv)) { continue; } if (chunks.keySet().contains(fv)) { continue; } chunks.put(fv, new ArrayList<BioMaterial>()); } } /* * What if bm doesn't have a value for the factorvalue. Need a dummy value. */ FactorValue dummy = FactorValue.Factory.newInstance(ef); dummy.setValue(""); dummy.setId(-1L); chunks.put(dummy, new ArrayList<BioMaterial>()); for (BioMaterial bm : bms) { boolean found = false; for (FactorValue fv : bm.getFactorValues()) { if (ef.getFactorValues().contains(fv)) { found = true; assert chunks.containsKey(fv); chunks.get(fv).add(bm); } } if (!found) { if (log.isDebugEnabled()) log.debug(bm + " has no value for factor=" + ef + "; using dummy value"); chunks.get(dummy).add(bm); } } if (chunks.get(dummy).size() == 0) { if (log.isDebugEnabled()) log.debug("removing dummy"); chunks.remove(dummy); } log.debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size()); /* * Sanity check */ int total = 0; for (FactorValue fv : chunks.keySet()) { List<BioMaterial> chunk = chunks.get(fv); total += chunk.size(); } assert total == bms.size() : "expected " + bms.size() + ", got " + total; return chunks; }
From source file:com.tao.realweb.util.StringUtil.java
/** * ??= ??? (a=1,b=2 =>a=1&b=2) //from w w w. j a va 2 s .co m * * @param map * @return */ public static String linkedHashMapToString(LinkedHashMap<String, String> map) { if (map != null && map.size() > 0) { String result = ""; Iterator it = map.keySet().iterator(); while (it.hasNext()) { String name = (String) it.next(); String value = (String) map.get(name); result += (result.equals("")) ? "" : "&"; result += String.format("%s=%s", name, value); } return result; } return null; }
From source file:org.bimserver.charting.SupportFunctions.java
public static ArrayList<LinkedHashMap<String, Object>> getIfcMaterialsByClassWithTreeStructure( String structureKeyword, IfcModelInterface model, Chart chart, MutableInt subChartCount) { // Derive the column name. String leafColumnName = structureKeyword; // Update the chart configuration. chart.setDimensionLookupKey(structureKeyword, leafColumnName); chart.setDimensionLookupKey("date", "date"); chart.setDimensionLookupKey("size", "size"); // Prepare to iterate the relationships. LinkedHashMap<String, ArrayList<Double>> materialNameWithSizes = new LinkedHashMap<>(); // Iterate only the relationships. for (IfcRelAssociatesMaterial ifcRelAssociatesMaterial : model .getAllWithSubTypes(IfcRelAssociatesMaterial.class)) { // IfcMaterialSelect: IfcMaterial, IfcMaterialList, IfcMaterialLayerSetUsage, IfcMaterialLayerSet, IfcMaterialLayer. IfcMaterialSelect materialLike = ifcRelAssociatesMaterial.getRelatingMaterial(); // If there was a material-like object, sum it across X. if (materialLike != null) { // Get material name, like: Brick (000000), Air (000001); or, Concrete (0000000). String materialName = getNameOfMaterialsFromMaterialLike(materialLike, true, true); // Use material name if available. Otherwise, use OID of top-level material-like object. String name = (materialName != null) ? materialName : String.format("%d", materialLike.getOid()); // Add entry if it doesn't exist. if (!materialNameWithSizes.containsKey(name)) materialNameWithSizes.put(name, new ArrayList<Double>()); // Get existing size data. ArrayList<Double> sizes = materialNameWithSizes.get(name); // Iterate objects. EList<IfcRoot> ifcRoots = ifcRelAssociatesMaterial.getRelatedObjects(); for (IfcRoot ifcRoot : ifcRoots) { Double size = 0.0; if (ifcRoot instanceof IfcObjectDefinition) { IfcObjectDefinition ifcObjectDefinition = (IfcObjectDefinition) ifcRoot; if (ifcObjectDefinition instanceof IfcObject) { IfcObject ifcObject = (IfcObject) ifcObjectDefinition; if (ifcObject instanceof IfcProduct) { IfcProduct ifcProduct = (IfcProduct) ifcObject; Double volume = getRoughVolumeEstimateFromIfcProduct(ifcProduct); size = volume; }/*from w ww. j a v a2s. c o m*/ } } if (size != null && size > 0) sizes.add(size); } } } // subChartCount.setValue(materialNameWithSizes.size()); // ArrayList<LinkedHashMap<String, Object>> rawData = new ArrayList<>(); // for (Entry<String, ArrayList<Double>> entry : materialNameWithSizes.entrySet()) { String name = entry.getKey(); // Get existing size data. ArrayList<Double> sizes = materialNameWithSizes.get(name); // Sort, value ascending. Collections.sort(sizes, sortSmallerValuesToFront); sizes.add(0, 0.0); if (sizes.size() == 1) sizes.add(0, 0.0); // Count including empty first entry. double count = Math.max(1, sizes.size() - 1); double step = 10000.0 / count; double runningSize = 0.0; // Add sum of zero at entry zero. int i = 0; // Iterate objects, summing them across 0 to 10000 (an arbitrary range, a way to relate to other sums along X). for (Double size : sizes) { double someMeasurement = (size != null) ? size : 0.0; runningSize += someMeasurement; // Prepare to store this raw data entry. LinkedHashMap<String, Object> dataEntry = new LinkedHashMap<>(); // Name the group. dataEntry.put(leafColumnName, name); dataEntry.put("date", i * step); dataEntry.put("size", runningSize); // Push the entry into the data pool. rawData.add(dataEntry); // i += 1; } } // Send it all back. return rawData; }
From source file:ca.on.oicr.pde.workflows.GATKGenotypeGVCFsWorkflow.java
@Override public void buildWorkflow() { final String binDir = this.getWorkflowBaseDir() + "/bin/"; final Boolean manualOutput = BooleanUtils.toBoolean(getProperty("manual_output"), "true", "false"); final String queue = getOptionalProperty("queue", ""); final String java = getProperty("java"); final String gatk = getOptionalProperty("gatk_jar", binDir); final String gatkKey = getProperty("gatk_key"); final String identifier = getProperty("identifier"); final String refFasta = getProperty("ref_fasta"); final Double standCallConf = Double.valueOf(getProperty("stand_call_conf")); final Double standEmitConf = Double.valueOf(getProperty("stand_emit_conf")); final String dbsnpVcf = getOptionalProperty("gatk_dbsnp_vcf", null); final Integer gatkGenotypeGvcfsXmx = Integer.parseInt(getProperty("gatk_genotype_gvcfs_xmx")); final String gatkGenotypeGvcfsParams = getOptionalProperty("gatk_genotype_gvcfs_params", null); final Integer gatkCombineGVCFsXmx = Integer.parseInt(getProperty("gatk_combine_gvcfs_xmx")); final Integer gatkOverhead = Integer.parseInt(getProperty("gatk_sched_overhead_mem")); final Integer maxGenotypeGVCFsInputFiles = Integer .parseInt(getProperty("gatk_genotype_gvcfs_max_input_files")); final Integer maxCombineGVCFsInputFiles = Integer .parseInt(getProperty("gatk_combine_gvcfs_max_input_files")); final List<String> chrSizesList = Arrays.asList(StringUtils.split(getProperty("chr_sizes"), ",")); final Set<String> chrSizes = new LinkedHashSet<>(chrSizesList); if (chrSizes.size() != chrSizesList.size()) { throw new RuntimeException("Duplicate chr_sizes detected."); }/*from w ww . j a va 2 s. c o m*/ // one chrSize record is required, null will result in no parallelization if (chrSizes.isEmpty()) { chrSizes.add(null); } List<Pair<String, Job>> combineGvcfs = batchGVCFs(inputFiles, maxGenotypeGVCFsInputFiles, maxCombineGVCFsInputFiles, java, gatkCombineGVCFsXmx, gatkOverhead, tmpDir, gatk, gatkKey, tmpGVCFsDir, refFasta, queue); //use linked hashmap to keep "pairs" in sort order determined by chr_sizes LinkedHashMap<String, Pair<GenotypeGVCFs, Job>> vcfs = new LinkedHashMap<>(); for (String chrSize : chrSizes) { //GATK Genotype VCFs( https://www.broadinstitute.org/gatk/gatkdocs/org_broadinstitute_gatk_tools_walkers_variantutils_GenotypeGVCFs.php ) GenotypeGVCFs.Builder genotypeGvcfsBuilder = new GenotypeGVCFs.Builder(java, gatkGenotypeGvcfsXmx + "g", tmpDir, gatk, gatkKey, dataDir) .setReferenceSequence(refFasta) .setOutputFileName( identifier + (chrSize != null ? "." + chrSize.replace(":", "-") : "") + ".raw") .addInterval(chrSize).setStandardCallConfidence(standCallConf) .setStandardEmitConfidence(standEmitConf).setDbsnpFilePath(dbsnpVcf) .setExtraParameters(gatkGenotypeGvcfsParams); for (String f : getLeftCollection(combineGvcfs)) { genotypeGvcfsBuilder.addInputFile(f); } GenotypeGVCFs genotypeGvcfsCommand = genotypeGvcfsBuilder.build(); Job genotypeGvcfsJob = getWorkflow().createBashJob("GATKGenotypeGVCFs") .setMaxMemory(Integer.toString((gatkGenotypeGvcfsXmx + gatkOverhead) * 1024)).setQueue(queue); genotypeGvcfsJob.getCommand().setArguments(genotypeGvcfsCommand.getCommand()); // add parents, null if provision file in, not null if parent is a combine gvcf job for (Job j : getRightCollection(combineGvcfs)) { if (j != null) { genotypeGvcfsJob.addParent(j); } } if (vcfs.put(chrSize, Pair.of(genotypeGvcfsCommand, genotypeGvcfsJob)) != null) { throw new RuntimeException("Unexpected state: duplicate vcf."); } } if (vcfs.size() > 1) { //GATK CatVariants ( https://www.broadinstitute.org/gatk/guide/tooldocs/org_broadinstitute_gatk_tools_CatVariants.php ) CatVariants.Builder catVariantsBuilder = new CatVariants.Builder(java, gatkCombineGVCFsXmx + "g", tmpDir, gatk, gatkKey, dataDir).setReferenceSequence(refFasta) //individual vcf files sorted by genotype gvcfs; order of input vcf concatenation is determined by chr_sizes order (assumed to be sorted) .disableSorting().setOutputFileName(identifier + ".raw"); for (GenotypeGVCFs cmd : getLeftCollection(vcfs.values())) { catVariantsBuilder.addInputFile(cmd.getOutputFile()); } CatVariants catVariantsCommand = catVariantsBuilder.build(); Job combineGVCFsJob = getWorkflow().createBashJob("GATKCombineGVCFs") .setMaxMemory(Integer.toString((gatkCombineGVCFsXmx + gatkOverhead) * 1024)).setQueue(queue); combineGVCFsJob.getParents().addAll(getRightCollection(vcfs.values())); combineGVCFsJob.getCommand().setArguments(catVariantsCommand.getCommand()); combineGVCFsJob.addFile( createOutputFile(catVariantsCommand.getOutputFile(), "application/vcf-gz", manualOutput)); combineGVCFsJob.addFile( createOutputFile(catVariantsCommand.getOutputIndex(), "application/tbi", manualOutput)); } else if (vcfs.size() == 1) { Pair<GenotypeGVCFs, Job> p = Iterables.getOnlyElement(vcfs.values()); GenotypeGVCFs cmd = p.getLeft(); Job genotypeGvcfsJob = p.getRight(); genotypeGvcfsJob.addFile(createOutputFile(cmd.getOutputFile(), "application/vcf-gz", manualOutput)); genotypeGvcfsJob.addFile(createOutputFile(cmd.getOutputIndex(), "application/tbi", manualOutput)); } else { throw new RuntimeException("Unexpected state: No VCFs"); } }
From source file:ubic.gemma.datastructure.matrix.ExpressionDataMatrixColumnSort.java
/** * Divide the biomaterials up into chunks based on the experimental factor given, keeping everybody in order. * /*from w w w . ja v a 2 s . c om*/ * @param ef * @param bms * @return ordered map of fv->bm where fv is of ef, or null if it couldn't be done properly. */ private static LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunkOnFactorVO( ExperimentalFactor ef, List<BioMaterialValueObject> bms) { if (bms == null) { return null; } LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>> chunks = new LinkedHashMap<FactorValueValueObject, List<BioMaterialValueObject>>(); /* * Get the factor values in the order we have things right now */ Collection<Long> factorValueIds = EntityUtils.getIds(ef.getFactorValues()); for (BioMaterialValueObject bm : bms) { for (FactorValueValueObject fv : bm.getFactorValueObjects()) { if (!factorValueIds.contains(fv.getId())) { continue; } if (chunks.keySet().contains(fv)) { continue; } chunks.put(fv, new ArrayList<BioMaterialValueObject>()); } } /* * What if bm doesn't have a value for the factorvalue. Need a dummy value. */ FactorValueValueObject dummy = new FactorValueValueObject(); dummy.setFactorId(ef.getId()); dummy.setValue(""); dummy.setId(-1L); chunks.put(dummy, new ArrayList<BioMaterialValueObject>()); for (BioMaterialValueObject bm : bms) { boolean found = false; for (FactorValueValueObject fv : bm.getFactorValueObjects()) { if (factorValueIds.contains(fv.getId())) { found = true; assert chunks.containsKey(fv); chunks.get(fv).add(bm); } } if (!found) { if (log.isDebugEnabled()) log.debug(bm + " has no value for factor=" + ef + "; using dummy value"); chunks.get(dummy).add(bm); } } if (chunks.get(dummy).size() == 0) { if (log.isDebugEnabled()) log.debug("removing dummy"); chunks.remove(dummy); } log.debug(chunks.size() + " chunks for " + ef + ", from current chunk of size " + bms.size()); /* * Sanity check */ int total = 0; for (FactorValueValueObject fv : chunks.keySet()) { List<BioMaterialValueObject> chunk = chunks.get(fv); total += chunk.size(); } assert total == bms.size() : "expected " + bms.size() + ", got " + total; return chunks; }
From source file:dev.memento.MainActivity.java
@Override protected Dialog onCreateDialog(int id) { Dialog dialog = null;/* w w w . j av a 2 s .com*/ AlertDialog.Builder builder = new AlertDialog.Builder(this); switch (id) { case DIALOG_ERROR: builder.setMessage("error message").setCancelable(false).setPositiveButton("OK", null); dialog = builder.create(); break; case DIALOG_MEMENTO_YEARS: builder.setTitle(R.string.select_year); final TreeMap<Integer, Integer> yearCount = mMementos.getAllYears(); if (Log.LOG) Log.d(LOG_TAG, "Dialog: num of years = " + yearCount.size()); // This shouldn't happen, but just in case if (yearCount.size() == 0) { showToast("There are no years to choose from... something is wrong."); if (Log.LOG) Log.d(LOG_TAG, "Num of mementos: " + mMementos.size()); return null; } // Build a list that shows how many dates are available for each year final CharSequence[] yearText = new CharSequence[yearCount.size()]; // Parallel arrays used to determine which entry was selected. // Could also have used a regular expression. final int years[] = new int[yearCount.size()]; final int count[] = new int[yearCount.size()]; int selectedYear = -1; int displayYear = mDateDisplayed.getYear(); int i = 0; for (Map.Entry<Integer, Integer> entry : yearCount.entrySet()) { Integer year = entry.getKey(); // Select the year of the Memento currently displayed if (displayYear == year) selectedYear = i; years[i] = year; count[i] = entry.getValue(); yearText[i] = Integer.toString(year) + " (" + entry.getValue() + ")"; i++; } builder.setSingleChoiceItems(yearText, selectedYear, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { dialog.dismiss(); mSelectedYear = years[item]; int numItems = count[item]; if (numItems > MAX_NUM_MEMENTOS_PER_MONTH) showDialog(DIALOG_MEMENTO_MONTHS); else showDialog(DIALOG_MEMENTO_DATES); } }); dialog = builder.create(); // Cause the dialog to be freed whenever it is dismissed. // This is necessary because the items are dynamic. dialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { removeDialog(DIALOG_MEMENTO_YEARS); } }); break; case DIALOG_MEMENTO_MONTHS: builder.setTitle(R.string.select_month); final LinkedHashMap<CharSequence, Integer> monthCount = mMementos.getMonthsForYear(mSelectedYear); // This shouldn't happen, but just in case if (monthCount.size() == 0) { showToast("There are no months to choose from... something is wrong."); if (Log.LOG) Log.d(LOG_TAG, "Num of mementos: " + mMementos.size()); return null; } // Build a list that shows how many dates are available for each month final CharSequence[] monthText = new CharSequence[monthCount.size()]; int selectedMonth = mDateDisplayed.getMonth() - 1; i = 0; for (Map.Entry<CharSequence, Integer> entry : monthCount.entrySet()) { CharSequence month = entry.getKey(); monthText[i] = month + " (" + entry.getValue() + ")"; i++; } builder.setSingleChoiceItems(monthText, selectedMonth, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { dialog.dismiss(); // Pull out month name so we can map it back to a number. // This is ugly, but it's necessary because the LinkedHashMap doesn't // give back the order of its keys. Pattern r = Pattern.compile("^(.+) "); Matcher m = r.matcher(monthText[item]); if (m.find()) { String month = m.group(1); mSelectedMonth = Utilities.monthStringToInt(month); showDialog(DIALOG_MEMENTO_DATES); } else { if (Log.LOG) Log.e(LOG_TAG, "Could not find month in [" + monthText[item] + "]"); } } }); dialog = builder.create(); // Cause the dialog to be freed whenever it is dismissed. // This is necessary because the items are dynamic. dialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { removeDialog(DIALOG_MEMENTO_MONTHS); } }); break; case DIALOG_MEMENTO_DATES: builder.setTitle(R.string.select_day); // Which radio button is selected? int selected = -1; final CharSequence[] dates; if (Log.LOG) Log.d(LOG_TAG, "mSelectedMonth = " + mSelectedMonth); if (Log.LOG) Log.d(LOG_TAG, "mSelectedYear = " + mSelectedYear); final Memento[] mementoList; // See if there is a month/year filter if (mSelectedMonth != -1 || mSelectedYear != -1) { if (mSelectedMonth != -1) mementoList = mMementos.getByMonthAndYear(mSelectedMonth, mSelectedYear); else mementoList = mMementos.getByYear(mSelectedYear); if (Log.LOG) Log.d(LOG_TAG, "Number of dates = " + mementoList.length); // Get dates for selected mementos dates = new CharSequence[mementoList.length]; i = 0; for (Memento m : mementoList) { dates[i] = m.getDateAndTimeFormatted(); i++; } // See if any of these items match. This could take a little while if // there are a large number of items unfortunately. Memento m = mMementos.getCurrent(); if (m != null) { CharSequence searchDate = m.getDateAndTimeFormatted(); for (i = 0; i < dates.length; i++) { if (searchDate.equals(dates[i])) { selected = i; break; } } } } else { // No filter, so get all available mementos dates = mMementos.getAllDates(); if (Log.LOG) Log.d(LOG_TAG, "Number of dates = " + dates.length); selected = mMementos.getCurrentIndex(); mementoList = mMementos.toArray(new Memento[0]); } if (Log.LOG) Log.d(LOG_TAG, "Selected index = " + selected); // Reset for future selections mSelectedYear = -1; mSelectedMonth = -1; builder.setSingleChoiceItems(dates, selected, new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int item) { dialog.dismiss(); // Display this Memento Memento m = mementoList[item]; mCurrentMemento = m; final SimpleDateTime dateSelected = m.getDateTime(); mDateDisplayed = dateSelected; setChosenDate(mDateDisplayed); if (Log.LOG) Log.d(LOG_TAG, "User selected Memento with date " + dateSelected.dateFormatted()); showToast("Time traveling to " + mDateDisplayed.dateFormatted()); refreshDisplayedDate(); // Load memento into the browser String redirectUrl = m.getUrl(); surfToUrl(redirectUrl); setEnableForNextPrevButtons(); mNowButton.setEnabled(true); // Potentially lengthly operation new Thread() { public void run() { int index = mMementos.getIndex(dateSelected); if (index == -1) { // This should never happen if (Log.LOG) Log.e(LOG_TAG, "!! Couldn't find " + dateSelected + " in the memento list!"); } else mMementos.setCurrentIndex(index); } }.start(); } }); dialog = builder.create(); // Cause the dialog to be freed whenever it is dismissed. // This is necessary because the items are dynamic. I couldn't find // a better way to solve this problem. dialog.setOnDismissListener(new OnDismissListener() { @Override public void onDismiss(DialogInterface arg0) { removeDialog(DIALOG_MEMENTO_DATES); } }); break; } return dialog; }
From source file:com.vmware.bdd.manager.ClusterConfigManager.java
private LinkedHashMap<NetTrafficType, List<ClusterNetConfigInfo>> validateAndConvertNetNamesToNetConfigs( Map<NetTrafficType, List<String>> netNamesInfo, boolean isMaprDistro) { LinkedHashMap<NetTrafficType, List<ClusterNetConfigInfo>> netConfigs = new LinkedHashMap<NetTrafficType, List<ClusterNetConfigInfo>>(); LinkedHashMap<String, Set<String>> port2names = new LinkedHashMap<String, Set<String>>(); for (NetTrafficType type : netNamesInfo.keySet()) { netConfigs.put(type, new ArrayList<ClusterNetConfigInfo>()); for (String name : netNamesInfo.get(type)) { NetworkEntity networkEntity = networkMgr.getNetworkEntityByName(name); String pg = networkEntity.getPortGroup(); Boolean isGenerateHostname = networkEntity.getIsGenerateHostname(); String hostnamePrefix = HostnameManager.getHostnamePrefix(); ClusterNetConfigInfo netConfig = new ClusterNetConfigInfo(type, name, pg, networkEntity.getDnsType(), isGenerateHostname, hostnamePrefix); netConfigs.get(type).add(netConfig); if (!port2names.containsKey(pg)) { port2names.put(pg, new HashSet<String>()); }//w w w . ja v a2 s . c om port2names.get(pg).add(name); } } if (isMaprDistro && port2names.size() > 1) { throw BddException.MULTI_NETWORKS_FOR_MAPR_DISTRO(); } // if nw1,nw2 are both refer to pg1, should not use them in one cluster for (String pg : port2names.keySet()) { if (port2names.get(pg).size() > 1) { throw BddException.PG_REFERENCED_MULTI_TIMES(); } } return netConfigs; }