List of usage examples for java.util Hashtable keySet
Set keySet
To view the source code for java.util Hashtable keySet.
Click Source Link
From source file:edu.ku.brc.af.ui.forms.persist.ViewDef.java
/** * @param enableRules//from www.ja v a 2 s.c om * @return */ protected String createEnableRulesXML(final Hashtable<String, String> enableRules) { if (enableRules.keySet().size() > 0) { StringBuilder sb = new StringBuilder("<enableRules>"); for (String key : enableRules.keySet()) { sb.append("<rule id=\""); sb.append(key); sb.append("\"><![CDATA["); sb.append(enableRules.get(key)); sb.append("]]></rule>"); } sb.append("</enableRules>"); return sb.toString(); } return null; }
From source file:org.globus.workspace.network.defaults.DefaultAssociationAdapter.java
public String[] getAssociationNames() throws ManageException { final Hashtable associations = this.persistence.currentAssociations(false); if (associations == null || associations.isEmpty()) { return zeroLen; } else {/* w w w .ja v a 2 s .c om*/ final Set keys = associations.keySet(); return (String[]) keys.toArray(new String[keys.size()]); } }
From source file:org.hyperic.hq.product.RtPlugin.java
public Collection getTimes(Integer svcID, Properties alreadyParsedFiles, String logdir, String logmask, String logfmt, int svcType, String transforms, ArrayList noLog, boolean collectIPs) throws IOException { Hashtable urls = new Hashtable(); lp = getParser();/*from w ww. j a v a 2 s .c o m*/ lp.setTimeMultiplier(this.getTimeMultiplier()); lp.urlDontLog(noLog); ParsedFile[] flist = generateFileList(alreadyParsedFiles, logdir, logmask); for (int i = 0; i < flist.length; i++) { long flen[] = new long[1]; ParsedFile f = flist[i]; this.log.debug("Parsing log: " + f.fname); Hashtable rv = lp.parseLog(f.fname, convertFormat(logfmt), f.oldLen, svcID, svcType, flen, collectIPs); this.log.debug("Done parsing log, " + rv.keySet().size() + " elements in table"); alreadyParsedFiles.put(f.fname, Long.toString(flen[0])); combineUrls(rv, urls, transforms); } this.log.debug("Returning parsed data " + urls.values().size() + " entries"); return urls.values(); }
From source file:org.mahasen.util.AbstractCommonUtil.java
/** * @param userDefinedProperties/*from w w w .jav a 2 s . com*/ * @return */ public Hashtable<String, String> removeSystemMetadataFromUserMetadata( Hashtable<String, String> userDefinedProperties) { List systemDefinedProperties = new ArrayList(); systemDefinedProperties.add(MahasenConstants.FILE_SIZE); systemDefinedProperties.add(MahasenConstants.UPLOADED_DATE); systemDefinedProperties.add(MahasenConstants.FILE_NAME); systemDefinedProperties.add(MahasenConstants.FILE_PATH); Iterator iterator = userDefinedProperties.keySet().iterator(); while (iterator.hasNext()) { String userKey = (String) iterator.next(); if (systemDefinedProperties.contains(userKey)) { iterator.remove(); } } return userDefinedProperties; }
From source file:org.hyperic.hq.plugin.iis.IisRtPlugin.java
/** * Main method for parsing the log/* w ww . j a v a 2s . c o m*/ * * Much of this is duplicated from the BaseRTPlugin, mainly due * to the file format being specified in the log file itself. This * needs to be abstracted. * */ public Collection getTimes(Integer svcID, Properties alreadyParsedFiles, String logdir, String logmask, String logfmt, int svcType, String transforms, ArrayList noLog, boolean collectIPs) throws IOException { Hashtable urls = new Hashtable(); // Setup the parser lp = getParser(); lp.setTimeMultiplier(this.getTimeMultiplier()); lp.urlDontLog(noLog); // Get the list of logs to parse ParsedFile[] flist = generateFileList(alreadyParsedFiles, logdir, logmask); // For each log, parse out the response time info for (int i = 0; i < flist.length; i++) { long flen[] = new long[1]; ParsedFile f = flist[i]; logfmt = getLogFormat(f.fname); if (logfmt == "") { // If we cannot determine the log format, don't bother // passing the file through the parser. log.debug("Not parsing " + f.fname + ": No log format"); continue; } long start = System.currentTimeMillis(); log.debug("Parsing log: " + f.fname); Hashtable rv = lp.parseLog(f.fname, logfmt, f.oldLen, svcID, svcType, flen, collectIPs); if (log.isDebugEnabled()) { long elapsed = System.currentTimeMillis() - start; log.debug("Done parsing log, " + rv.keySet().size() + " elements (" + StringUtil.formatDuration(elapsed, 0, true) + ")"); } alreadyParsedFiles.put(f.fname, Long.toString(flen[0])); combineUrls(rv, urls, transforms); } log.debug("Returning parsed data " + urls.values().size() + " entries"); return urls.values(); }
From source file:org.mahasen.util.SearchUtil.java
/** * @param searchParameters//from w ww . ja v a2s. c o m * @return */ private ArrayList<SearchBean> createSearchBeans(Hashtable<String, Vector<String>> searchParameters) { ArrayList<SearchBean> searchValues = new ArrayList<SearchBean>(); Iterator iterator = searchParameters.keySet().iterator(); while (iterator.hasNext()) { String propertyName = (String) iterator.next(); String initialValue = ""; String finalValue = ""; boolean isRangeBased = false; for (String propertyValue : searchParameters.get(propertyName)) { if (searchParameters.get(propertyName) != null && propertyValue.contains(",")) { String propertyValues[] = new String[2]; propertyValues = propertyValue.split(","); initialValue = propertyValues[0]; finalValue = propertyValues[1]; isRangeBased = true; } else { initialValue = propertyValue; finalValue = null; isRangeBased = false; } SearchBean bean = new SearchBean(propertyName, initialValue, finalValue, isRangeBased); searchValues.add(bean); } } return searchValues; }
From source file:org.agnitas.web.CampaignAction.java
private void setSortedMailingList(Campaign.Stats stat, HttpServletRequest req, CampaignForm aForm) { LinkedList<Number> resultList = new LinkedList<Number>(); MailingDao mailDao = (MailingDao) getBean("MailingDao"); // this hashmap contains the mapping from a Date back to the Mail-ID. HashMap<Date, Number> tmpDate2MailIDMapping = new HashMap<Date, Number>(); LinkedList<Date> sortedMailingList = new LinkedList<Date>(); Hashtable map = stat.getMailingData(); // holds the complete mailing Data map.keySet(); // all keys for the mailingData (mailIDs) Number tmpMailID = null;/*from w w w. j a v a 2s. c om*/ MaildropEntry tmpEntry = null; Mailing tmpMailing = null; // loop over all keys. Iterator it = map.keySet().iterator(); while (it.hasNext()) { LinkedList<Date> sortDates = new LinkedList<Date>(); tmpMailID = (Number) it.next(); // get the mailID // get one Mailing with tmpMailID tmpMailing = (Mailing) mailDao.getMailing(tmpMailID.intValue(), getCompanyID(req)); // check if it is a World-Mailing. We have testmailings and dont care about them! if (tmpMailing.isWorldMailingSend() == true) { // loop over all tmpMailingdropStatus. // we look over all mails and take the first send mailing Time. // unfortunately is the set not sorted, so we have to sort it ourself. Iterator it2 = tmpMailing.getMaildropStatus().iterator(); while (it2.hasNext()) { tmpEntry = (MaildropEntry) it2.next(); sortDates.add(tmpEntry.getSendDate()); } // check if sortDates has entries and put the one into the Hashmap. if (sortDates.size() != 0) { Collections.sort(sortDates); tmpDate2MailIDMapping.put(sortDates.get(0), tmpMailID); sortedMailingList.add(sortDates.get(0)); } } } // at this point, we have a Hashmap with all Dates and Mailing ID's and a List with all Date's. // now we sort this List and put the result into the Form (sort with reverse Order ;-) ). Collections.sort(sortedMailingList, Collections.reverseOrder()); // loop over the List and put the corresponding MailID into the List. for (int i = 0; i < sortedMailingList.size(); i++) { resultList.add(tmpDate2MailIDMapping.get(sortedMailingList.get(i))); } aForm.setSortedKeys(resultList); }
From source file:org.kepler.kar.KARBuilder.java
/** * //w w w . j ava2 s. c o m * @param entries * @return */ public Vector<KeplerLSID> getKAREntryLSIDs(Hashtable<KAREntry, InputStream> entries) { Vector<KeplerLSID> lsids = new Vector<KeplerLSID>(); for (KAREntry entry : entries.keySet()) { KeplerLSID lsid = entry.getLSID(); lsids.add(lsid); } return lsids; }
From source file:unalcol.termites.boxplots.BestAgentsPercentageInfoCollected.java
private BoxAndWhiskerCategoryDataset addDataSet(Hashtable<String, List> info) { DefaultBoxAndWhiskerCategoryDataset dataset = new DefaultBoxAndWhiskerCategoryDataset(); for (String key : info.keySet()) { System.out.println(key + ":" + info.get(key).size()); String[] keyData = key.split(Pattern.quote("+")); dataset.add(info.get(key), keyData[1], getTechniqueName(keyData[0])); }//from ww w. j a v a2 s.c o m //System.out.println("dataset" + dataset); return dataset; }
From source file:org.kepler.kar.KARBuilder.java
private void addEntriesToPrivateItems(Hashtable<KAREntry, InputStream> entries) { if (isDebugging) log.debug("addEntriesToPrivateItems(" + entries.size() + ")"); for (KAREntry karEntryKey : entries.keySet()) { _karItems.put(karEntryKey, entries.get(karEntryKey)); _karItemLSIDs.add(karEntryKey.getLSID()); _karItemNames.add(karEntryKey.getName()); }//from ww w . j av a 2 s . co m }