List of usage examples for java.util HashSet iterator
public Iterator<E> iterator()
From source file:importer.handler.post.stages.Splitter.java
/** * Convert a hashset of versions to a string * @param set the set of version names/*from ww w.j a va 2s.c o m*/ * @return a simple space-delimited string */ private String hashsetToString(HashSet<String> set) { StringBuilder sb = new StringBuilder(); if (set.size() > 0) { sb.append(" "); Iterator<String> iter = set.iterator(); while (iter.hasNext()) { sb.append(iter.next()); if (iter.hasNext()) sb.append(" "); } } return sb.toString(); }
From source file:wilos.business.services.misc.project.ProjectService.java
/** * This method returns all the projects with no process. * /*from w w w . ja va 2s .co m*/ * @return A set of Project */ @Transactional(readOnly = true) public Set<Project> getAllProjectsWithNoProcess() { HashSet<Project> projectList = new HashSet<Project>(); HashSet<Project> tmpList = new HashSet<Project>(); tmpList = (HashSet<Project>) this.projectDao.getAllProjects(); for (Iterator iter = tmpList.iterator(); iter.hasNext();) { Project project = (Project) iter.next(); if (project.getProcess() == null) projectList.add(project); } return projectList; }
From source file:wilos.business.services.misc.project.ProjectService.java
/** * Returns the projects that aren't associated to a process. * /* w ww .j av a 2 s.c om*/ * @return A set of Project */ @Transactional(readOnly = true) public Set<Project> getAllProjectsWithProcess() { HashSet<Project> projectList = new HashSet<Project>(); HashSet<Project> tmpList = new HashSet<Project>(); tmpList = (HashSet<Project>) this.projectDao.getAllProjects(); for (Iterator iter = tmpList.iterator(); iter.hasNext();) { Project project = (Project) iter.next(); if (project.getProcess() != null) projectList.add(project); } return projectList; }
From source file:org.unitime.timetable.test.StudentSectioningTest.java
private static void exportDependencies(Element choiceEl, Choice choice, Set parentSections) { if (parentSections == null || parentSections.isEmpty()) { if (choice.getOffering().getConfigs().size() == 1) return; HashSet configs = new HashSet(); for (Iterator i = choice.getSections().iterator(); i.hasNext();) { Section section = (Section) i.next(); configs.add(section.getSubpart().getConfig()); }//from w w w .ja v a 2 s .c o m if (choice.getOffering().getConfigs().size() == configs.size()) return; HashSet depends = new HashSet(); for (Iterator e = choice.getOffering().getConfigs().iterator(); e.hasNext();) { Config config = (Config) e.next(); if (!configs.contains(config)) continue; Subpart subpartThisConfig = null; for (Iterator f = config.getSubparts().iterator(); f.hasNext();) { Subpart subpart = (Subpart) f.next(); if (subpart.getInstructionalType().equals(choice.getInstructionalType())) { subpartThisConfig = subpart; break; } } for (Iterator f = config.getSubparts().iterator(); f.hasNext();) { Subpart subpart = (Subpart) f.next(); if (subpart.compareTo(subpartThisConfig) >= 0) continue; if (subpart.getParent() != null) continue; for (Iterator g = subpart.getSections().iterator(); g.hasNext();) { Section section = (Section) g.next(); if (depends.add(section.getChoice())) { Element depEl = choiceEl.addElement("depends"); depEl.addAttribute("class", section.getChoice().getInstructionalType()); depEl.addAttribute("choice", section.getChoice().getId()); //depEl.addAttribute("name", section.getChoice().getName()); } } } } } else { HashSet parentChoices = new HashSet(); for (Iterator i = parentSections.iterator(); i.hasNext();) { Section parentSection = (Section) i.next(); parentChoices.add(parentSection.getChoice()); } for (Iterator i = parentChoices.iterator(); i.hasNext();) { Choice parentChoice = (Choice) i.next(); Element depEl = choiceEl.addElement("depends"); depEl.addAttribute("class", parentChoice.getInstructionalType()); depEl.addAttribute("choice", parentChoice.getId()); //depEl.addAttribute("name", parentChoice.getName()); HashSet parentParentSections = new HashSet(); for (Iterator j = parentSections.iterator(); j.hasNext();) { Section parentSection = (Section) j.next(); if (parentSection.getChoice().equals(parentChoice) && parentSection.getParent() != null) parentParentSections.add(parentSection.getParent()); } exportDependencies(depEl, parentChoice, parentParentSections); } } }
From source file:web.SearchphotosController.java
/** * This method is called by the spring framework. The configuration * for this controller to be invoked is based on the pagetype and * is set in the urlMapping property in the spring config file. * * @param request the <code>HttpServletRequest</code> * @param response the <code>HttpServletResponse</code> * @throws ServletException/* www .j a v a 2s. co m*/ * @throws IOException * @return ModelAndView this instance is returned to spring */ public synchronized ModelAndView handleRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { try { ModelAndView m = super.handleRequest(request, response); } catch (Exception e) { return handleError("error in handleRequest"); } //outOfSession(request, response); String searchText = request.getParameter(DbConstants.SEARCH); if (RegexStrUtil.isNull(searchText)) { Map myModel = new HashMap(); String viewName = DbConstants.SEARCH; myModel.put(DbConstants.LOGIN_INFO, loginInfo); myModel.put(DbConstants.DIR_EXISTS, rbDirectoryExists); myModel.put(DbConstants.USER_PAGE, userpage); myModel.put(DbConstants.SHARE_INFO, shareInfo); myModel.put(DbConstants.VISITOR_PAGE, memberUserpage); myModel.put(DbConstants.BUSINESS_EXISTS, isBizExists(login)); return new ModelAndView(viewName, "model", myModel); } searchText = RegexStrUtil.goodText(searchText); SearchDao searchDao = (SearchDao) daoMapper.getDao(DbConstants.SEARCH); if (searchDao == null) { return handleError("searchDao is null in SearchphotosController"); } /** * admin can search in any business. */ boolean isBizAware = false; String bid = null; if (WebUtil.isProductPremiumSubscription()) { if (DiaryAdmin.isDiaryAdmin(login)) { isBizAware = false; } else { isBizAware = true; } if (login != null && userpage != null) { bid = userpage.getValue(DbConstants.BID); } } DirectoryDao dirDao = (DirectoryDao) daoMapper.getDao(DbConstants.DIRECTORY); if (dirDao == null) { return handleError("dirDao is null in SearchphotosController"); } HashSet photoSet = null; CarryonDao carryonDao = (CarryonDao) daoMapper.getDao(DbConstants.CARRYON); if (carryonDao == null) { return handleError("carryonDao is null in SearchphotosController"); } HashSet tagList = null; HashSet dirBlobResult = null; List carryonResult = null; try { if (isBizAware && bid != null) { photoSet = searchDao.bizSearchCarryon(DbConstants.READ_FROM_MASTER, searchText, bid, login); } else { photoSet = searchDao.searchCarryon(DbConstants.READ_FROM_MASTER, searchText); } if (rbDirectoryExists.equals("1")) { HashSet dirTags = searchDao.searchDirectoryBlobs(DbConstants.READ_FROM_SLAVE, searchText); if (dirTags != null && dirTags.size() > 0) { dirBlobResult = dirDao.getDirBlobsFromTags(dirTags); } } searchDao.addTags(searchText); if (photoSet != null) { carryonResult = new ArrayList(photoSet); tagList = carryonDao.getUniqueTags(carryonResult); if (tagList != null) { if (tagList.size() < 10) { List hitList = null; if (isBizAware && bid != null) { hitList = carryonDao.getCarryonHitsBizAware(bid, DbConstants.READ_FROM_SLAVE); } else { hitList = carryonDao.getCarryonHits(); } HashSet hitSet = carryonDao.getUniqueTags(hitList); if (hitSet != null) { Iterator it1 = hitSet.iterator(); for (int i = 0; i < hitSet.size(); i++) { tagList.add((String) it1.next()); } } } // tagList.size() < 10 } } } catch (BaseDaoException e) { return handleError("Exception in either searchDirectory" + searchText, e); } Map myModel = new HashMap(); String viewName = DbConstants.PHOTOS; if (dirBlobResult == null && carryonResult == null) { if (tagList != null) { viewName = DbConstants.TAGS; } } if (tagList != null) { myModel.put(DbConstants.USER_TAGS, RegexStrUtil.goodText(tagList.toString())); } myModel.put(DbConstants.TOP_CARRYON, carryonResult); myModel.put(DbConstants.TAG_LIST, tagList); myModel.put(DbConstants.LOGIN_INFO, loginInfo); myModel.put(DbConstants.USER_PAGE, userpage); myModel.put(DbConstants.SHARE_INFO, shareInfo); myModel.put(DbConstants.VISITOR_PAGE, memberUserpage); myModel.put(DbConstants.BUSINESS_EXISTS, isBizExists(login)); myModel.put(DbConstants.DIR_EXISTS, rbDirectoryExists); if (rbDirectoryExists.equals("1") && dirBlobResult != null) { myModel.put(DbConstants.DIR_BLOB, dirBlobResult); } return new ModelAndView(viewName, "model", myModel); }
From source file:org.squale.squalix.tools.mccabe.CppMcCabeTask.java
/** * Obtention des fichiers headers/*from w ww . j ava2 s .com*/ * * @return liste des fichiers header */ protected Collection getHeaderFiles() { // Construction de la liste des fichiers .h non exclus // On parcourt chaque rpertoire source sous la vue avec // l'extension requise File root = new File((String) getData().getData(TaskData.VIEW_PATH)); List srcs = ((ListParameterBO) getProject().getParameters().getParameters() .get(ParametersConstants.SOURCES)).getParameters(); List paths = BuildProjectPath.buildProjectPath((String) getData().getData(TaskData.VIEW_PATH), srcs); HashSet filesList = new HashSet(); // Parcours de chaque rpertoire source for (int i = 0; i < paths.size(); i++) { McCabeFileFilter filter = new McCabeFileFilter(root.getAbsolutePath(), mConfiguration.getEntetes()); HashSet fileList = new HashSet(); File pDirectory = new File((String) paths.get(i)); FileUtility.createRecursiveListOfFiles(pDirectory, filter, fileList); Iterator it = fileList.iterator(); String filename = null; int rootLength = root.getAbsolutePath().length() + File.separator.length(); // On ne retient que le nom du fichier while (it.hasNext()) { filename = new File((String) it.next()).getName(); filesList.add(filename); } } return filesList; }
From source file:org.skb.util.types.composite.util.TSPropertyMap.java
public String writeToFile(String fn) { try {//from w ww . j av a 2 s . c om AbstractConfiguration cfg; String prefix = ""; if (fn.endsWith(".ini")) { cfg = new INIConfiguration(); prefix = "tribe."; } else if (fn.endsWith(".xml")) cfg = new XMLConfiguration(); else if (fn.endsWith(".properties")) cfg = new PropertiesConfiguration(); else return "unknown configuration file format, use '.ini' or '.xml' or '.properties'"; File file = new File(fn); file.createNewFile(); if (!file.canWrite()) return "can't write configuration file <" + fn + ">"; HashSet<String> rows = new HashSet<String>(this.getRows()); for (Iterator<String> i = rows.iterator(); i.hasNext(); i.hasNext()) { String row = i.next(); if (this.get(row, TSPropertyMap.pmValCliOptionShort) != null || this.get(row, TSPropertyMap.pmValCliOptionLong) != null) cfg.setProperty(prefix + row, this.getValue(row)); } if (fn.endsWith(".ini")) ((INIConfiguration) cfg).save(file); if (fn.endsWith(".xml")) ((XMLConfiguration) cfg).save(file); if (fn.endsWith(".properties")) ((PropertiesConfiguration) cfg).save(file); } catch (Exception e) { // ReportManager repMgr=ReportManager.getInstance(); // repMgr.reportErrorNoFile(e.toString()); } return null; }
From source file:org.hyperic.hq.ui.action.resource.common.monitor.alerts.config.RemoveOthersAction.java
/** * Handles the actual work of removing emails from the action. *//*from w ww. j a v a2 s . c o m*/ protected ActionForward handleRemove(ActionMapping mapping, HttpServletRequest request, Map<String, Object> params, Integer sessionID, ActionValue action, EmailActionConfig ea, EventsBoss eb, RemoveNotificationsForm rnForm) throws Exception { String[] emails = rnForm.getEmails(); if (null != emails) { log.debug("emails.length=" + emails.length); HashSet<Object> storedEmails = new HashSet<Object>(); storedEmails.addAll(ea.getUsers()); log.debug("storedEmails (pre): " + storedEmails); for (int i = 0; i < emails.length; ++i) { storedEmails.remove(emails[i]); } log.debug("storedEmails (post): " + storedEmails); ea.setNames(StringUtil.iteratorToString(storedEmails.iterator(), ",")); action.setConfig(ea.getConfigResponse().encode()); eb.updateAction(sessionID.intValue(), action); } return returnSuccess(request, mapping, params); }
From source file:main.java.workload.WorkloadExecutor.java
public static Transaction streamOneTransaction(Database db, Cluster cluster, Workload wrl, WorkloadBatch wb) { Set<Integer> trTupleSet = null; Set<Integer> trDataSet = null; int min = 0, i = 0, n = 0, tr_id = 0; int type = trDistribution.sample(); Transaction tr = null;//from w ww. j av a 2 s .co m if (!wb.getTrMap().containsKey(type)) wb.getTrMap().put(type, new TreeMap<Integer, Transaction>()); // new double rand_val = Global.rand.nextDouble(); int toBeRemovedKey = -1; /** * Implementing the new Workload Generation model * (Finalised as per November 20, 2014 and later improved on February 13-14, 2015) */ ++Global.global_trCount; // Transaction birth if (wb.getTrMap().get(type).isEmpty() || rand_val <= Global.percentageChangeInWorkload) { trTupleSet = wrl.getTrTupleSet(db, type); trDataSet = Workload.getTrDataSet(db, cluster, wb, trTupleSet); ++Global.global_trSeq; tr = new Transaction(Global.global_trSeq, type, trDataSet, Sim.time()); // Add the incident transaction id wb.addIncidentTrId(cluster, trDataSet, Global.global_trSeq); // Add the newly created Transaction in the Workload Transaction map wb.getTrMap().get(type).put(tr.getTr_id(), tr); // New improvements------------------------------------------------------------------------------ double initial_period = (double) WorkloadExecutor.uNmax; // initialisation tr.setTr_period(initial_period); perfm.Period.put(tr.getTr_id(), initial_period); Time.put(tr.getTr_id(), Sim.time()); // Transaction repetition and retention of old transaction } else { ArrayList<Integer> idx2_id = new ArrayList<Integer>(); ArrayList<Integer> idx_value = new ArrayList<Integer>(); ArrayList<Integer> uT = new ArrayList<Integer>(); TreeMap<Integer, Integer> idx2 = new TreeMap<Integer, Integer>(new ValueComparator<Integer>(idx)); idx2.putAll(idx); min = Math.min(idx.size(), uNmax); // uNmax or uNmaxT i = 0; Iterator<Entry<Integer, Integer>> itr = idx2.entrySet().iterator(); while (i < min) { idx2_id.add(itr.next().getKey()); ++i; } // Deleting old Transactions if (idx2.size() > min) { toBeRemovedKey = idx2.lastKey(); Transaction tr_old = wb.getTransaction(toBeRemovedKey); tr_old.calculateSpans(cluster); wb.removeTransaction(cluster, tr_old); idx.remove(toBeRemovedKey); } i = 0; while (i < idx2_id.size()) { idx_value.add(idx.get(idx2_id.get(i))); ++i; } i = 0; while (i < idx_value.size()) { uT.add(T.get(idx_value.get(i) - 1)); ++i; } if (uT.size() == 1) n = 0; else n = Global.rand.nextInt(uT.size()); tr_id = uT.get(n); tr = wb.getTransaction(tr_id); tr.setProcessed(false); // New improvements------------------------------------------------------------------------------ double prev_period = perfm.Period.get(tr.getTr_id()); double prev_time = Time.get(tr.getTr_id()); double new_period = Global.expAvgWt * prev_period + (1 - Global.expAvgWt) * (Sim.time() - prev_time); tr.setTr_period(new_period); perfm.Period.remove(tr.getTr_id()); perfm.Period.put(tr.getTr_id(), new_period); Time.remove(tr.getTr_id()); Time.put(tr.getTr_id(), Sim.time()); } // end-if-else() // Calculate latest Span tr.calculateSpans(cluster); // Update Idt tr.calculateIdt(); if (perfm.Span.containsKey(tr.getTr_id())) perfm.Span.remove(tr.getTr_id()); perfm.Span.put(tr.getTr_id(), tr.getTr_serverSpanCost()); // Create an index entry for each newly created Transaction idx.put(tr.getTr_id(), Global.global_trCount); T.add(tr.getTr_id()); // New improvements------------------------------------------------------------------------------ if (Global.global_trCount > Global.observationWindow) { _i = Global.global_trCount; // _i ~ Sim.time() _W = Global.observationWindow; // _W ~ time HashSet<Integer> unq = new HashSet<Integer>(T); for (int _n = (_i - _W); n <= _i; n++) { unq.add(T.get(_n)); } // Captures the number of total unique transaction for this observation window perfm.Unqlen.put((_i - _W), unq.size()); // Calculate the impact of distributed transaction per transaction basis double sum_of_span_by_period = 0.0; sum_of_one_by_period = 0.0; Iterator<Integer> unq_itr = unq.iterator(); while (unq_itr.hasNext()) { int unq_T = unq_itr.next(); int span = perfm.Span.get(unq_T); double period = perfm.Period.get(unq_T); double span_by_period = span / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second) double one_by_period = 1 / period; // Frequency = 1/Period (f=1/t) per unit time (i.e. 1 second) sum_of_span_by_period += span_by_period; sum_of_one_by_period += one_by_period; } double i_dt = (sum_of_span_by_period) / (Global.servers * sum_of_one_by_period); perfm.I_Dt.put((_i - _W), i_dt); if (Double.isNaN(i_dt)) currentIDt = 0; else currentIDt = i_dt; // Reset repartitioning cooling off period if (WorkloadExecutor.repartitioningCoolingOff && Sim.time() >= WorkloadExecutor.RepartitioningCoolingOffPeriod) { WorkloadExecutor.repartitioningCoolingOff = false; Global.LOGGER.info("-----------------------------------------------------------------------------"); Global.LOGGER.info("Simulation time: " + Sim.time() / (double) Global.observationWindow + " hrs"); Global.LOGGER.info("Repartitioning cooling off period ends."); Global.LOGGER .info("System will now check whether another repartitioning is required at this moment."); Global.LOGGER.info("Current IDt: " + currentIDt); Global.LOGGER.info("User defined IDt threshold: " + Global.userDefinedIDtThreshold); if (currentIDt < Global.userDefinedIDtThreshold) { Global.LOGGER.info("Repartitioning is not required at this moment."); //This is to disable on-demand atomic repartitioning for A-ARHC only if (Global.adaptive) { Global.LOGGER.info("Disabling on-demand atomic repartitioning for A-ARHC ..."); WorkloadExecutor.isAdaptive = false; } Global.LOGGER.info("Continuing transaction processing ..."); } } perfm.time.put((_i - _W), Sim.time()); } // Add a hyperedge to workload hypergraph wb.addHGraphEdge(cluster, tr); // Collect transactional streams if data stream mining is enabled if (Global.streamCollection) Global.dsm.collectStream(cluster, tr); return tr; }
From source file:org.apache.torque.util.SqlExpression.java
/** * Takes a columnName and criteria (which must be an array) and builds a SQL 'IN' expression taking into account the ignoreCase * flag./*from w w w . j a va 2s . c om*/ * * @param columnName A column. * @param criteria The value to compare the column against. * @param comparison Either " IN " or " NOT IN ". * @param ignoreCase If true and columns represent Strings, the appropriate function defined for the database will be used to * ignore differences in case. * @param db Represents the database in use, for vendor specific functions. * @param whereClause A StringBuffer to which the sql expression will be appended. */ static void buildIn(String columnName, Object criteria, SqlEnum comparison, boolean ignoreCase, DB db, StringBuffer whereClause) { if (ignoreCase) { whereClause.append(db.ignoreCase(columnName)); } else { whereClause.append(columnName); } whereClause.append(comparison); HashSet inClause = new HashSet(); if (criteria instanceof List) { Iterator iter = ((List) criteria).iterator(); while (iter.hasNext()) { Object value = iter.next(); // The method processInValue() quotes the string // and/or wraps it in UPPER(). inClause.add(processInValue(value, ignoreCase, db)); } } else if (criteria instanceof String) { // subquery inClause.add(criteria); } else { // Assume array. for (int i = 0; i < Array.getLength(criteria); i++) { Object value = Array.get(criteria, i); // The method processInValue() quotes the string // and/or wraps it in UPPER(). inClause.add(processInValue(value, ignoreCase, db)); } } whereClause.append('(').append(StringUtils.join(inClause.iterator(), ",")).append(')'); }