List of usage examples for java.util HashSet size
public int size()
From source file:com.juick.android.MainActivity.java
private void logoutFromSomeServices() { new Thread("Get logged in accounts") { @Override// ww w . j a va2s.com public void run() { final HashSet<AccountProof> accountProofs = JAXMPPClient.getAccountProofs(MainActivity.this, false); runOnUiThread(new Runnable() { @Override public void run() { if (accountProofs.size() == 0) { Toast.makeText(MainActivity.this, getString(R.string.NoAuth), Toast.LENGTH_LONG).show(); return; } final CharSequence[] arr = new CharSequence[accountProofs.size()]; int ix = 0; for (AccountProof accountProof : accountProofs) { arr[ix++] = accountProof.getProofAccountType(); } new AlertDialog.Builder(MainActivity.this) .setItems(arr, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { String what = arr[which].toString(); ArrayList<Utils.URLAuth> authorizers = Utils.authorizers; for (Utils.URLAuth authorizer : authorizers) { if (authorizer.isForBlog(what)) { authorizer.reset(MainActivity.this, handler); } } Toast.makeText(MainActivity.this, getString(R.string.AuthHasBeenClearedFor) + " " + what, Toast.LENGTH_LONG).show(); } }).setCancelable(true).create().show(); } }); } }.start(); }
From source file:org.kuali.kfs.sys.context.DataDictionaryConfigurationTest.java
public void testAllDataDictionaryDocumentTypesExistInWorkflowDocumentTypeTable() throws Exception { HashSet<String> workflowDocumentTypeNames = new HashSet<String>(); DataSource mySource = SpringContext.getBean(DataSource.class); Connection dbCon = null;/*w w w . j a v a 2 s . c o m*/ try { dbCon = mySource.getConnection(); Statement dbAsk = dbCon.createStatement(); ResultSet dbAnswer = dbAsk.executeQuery("select DOC_TYP_NM from KREW_DOC_TYP_T where CUR_IND = 1"); while (dbAnswer.next()) { String docName = dbAnswer.getString(1); if (StringUtils.isNotBlank(docName)) { workflowDocumentTypeNames.add(docName); } } } catch (Exception e) { throw (e); } // Using HashSet since duplicate objects would otherwise be returned HashSet<DocumentEntry> documentEntries = new HashSet<DocumentEntry>( dataDictionary.getDocumentEntries().values()); List<String> ddEntriesWithMissingTypes = new ArrayList<String>(); for (DocumentEntry documentEntry : documentEntries) { String name = documentEntry.getDocumentTypeName(); String testName = new String(" "); if (documentEntry instanceof FinancialSystemMaintenanceDocumentEntry) { testName = ((FinancialSystemMaintenanceDocumentEntry) documentEntry).getBusinessObjectClass() .getName(); } else { testName = documentEntry.getDocumentClass().getName(); } if (!workflowDocumentTypeNames.contains(name) && !"RiceUserMaintenanceDocument".equals(name) && !testName.contains("rice")) { ddEntriesWithMissingTypes.add(name); } else { workflowDocumentTypeNames.remove(name); } } if (workflowDocumentTypeNames.size() > 0) { try { //If documents are parent docs, then they aren't superfluous. String queryString = "select distinct doc_typ_nm from KREW_DOC_TYP_T" + " where doc_typ_id in (select parnt_id from KREW_DOC_TYP_T" + " where actv_ind = 1" + " and cur_ind = 1)"; Statement dbAsk = dbCon.createStatement(); ResultSet dbAnswer = dbAsk.executeQuery(queryString); while (dbAnswer.next()) { String docName = dbAnswer.getString(1); if (StringUtils.isNotBlank(docName)) { workflowDocumentTypeNames.remove(docName); } } } catch (Exception e) { throw (e); } System.err.print("superfluousTypesDefinedInWorkflowDatabase: " + workflowDocumentTypeNames); } assertEquals("documentTypesNotDefinedInWorkflowDatabase: " + ddEntriesWithMissingTypes, 0, ddEntriesWithMissingTypes.size()); }
From source file:com.joyent.manta.http.MantaHttpHeaders.java
/** * Gets the header defining RBAC roles used for this object. * * @return roles associated with object/*from www . jav a 2s. c o m*/ */ public Set<String> getRoles() { final Object value = get(HTTP_ROLE_TAG); if (value == null) { return Collections.emptySet(); } final HashSet<String> roles = new HashSet<>(); if (value instanceof Iterable<?>) { ((Iterable<?>) value).forEach(o -> { if (o != null) { roles.add(o.toString()); } }); } else if (value.getClass().isArray()) { for (Object o : (Object[]) value) { if (o != null) { roles.add(o.toString()); } } } else { String line = value.toString(); roles.addAll(MantaUtils.fromCsv(line)); } /* The result may come to us as a CSV. In that case we treat each * value separated by a comma as a single role. */ if (roles.size() == 1) { String line = roles.iterator().next(); roles.clear(); roles.addAll(MantaUtils.fromCsv(line)); } return Collections.unmodifiableSet(roles); }
From source file:ded.ui.DiagramController.java
/** Deselect all controllers and return the number that were previously selected. */ public int deselectAll() { // Get selection set first, so we only change state after iterating. HashSet<Controller> toDeselect = getSelectionSet(); // Unselect them. setMultipleSelected(toDeselect, SelectionState.SS_UNSELECTED); return toDeselect.size(); }
From source file:org.gcaldaemon.gui.config.MainConfig.java
public final String[] getCalendarURLs(AccountInfo account, boolean loadFromGoogle) throws Exception { if (account == null || account.username == null) { return new String[0]; }/* ww w . ja va 2s . com*/ HashSet set = new HashSet(); String[] urls = null; int i; if (loadFromGoogle) { Request request = new Request(); request.username = account.username; request.password = account.password; urls = GCalUtilities.getCalendarURLs(request, getWorkDirectory()); urlMap.put(account.username, urls); } else { urls = (String[]) urlMap.get(account.username); } if (urls != null) { for (i = 0; i < urls.length; i++) { set.add(urls[i]); } } FileSync[] configs = getFileSyncConfigs(); FileSync config; for (i = 0; i < configs.length; i++) { config = configs[i]; if (account.username.equals(config.username)) { if (config.privateIcalUrl != null && config.privateIcalUrl.endsWith(".ics") && !containsURL(set, config.privateIcalUrl)) { set.add(config.privateIcalUrl); } } } String[] array = new String[set.size()]; set.toArray(array); Arrays.sort(array, String.CASE_INSENSITIVE_ORDER); return array; }
From source file:org.oscarehr.web.MisReportUIBean.java
private void addBedProgramInfo(ArrayList<DataRow> results) { // only pertains to bed programs int totalDaysElderly = 0; int totalDaysAdult = 0; int totalNewAdmissionsElderly = 0; int totalNewAdmissionsAdult = 0; int totalNewDischargeAdult = 0; HashSet<Integer> uniqueElderlyClients = new HashSet<Integer>(); HashSet<Integer> uniqueAdultClients = new HashSet<Integer>(); for (Admission admission : admissions) { if (isProgramType(admission.getProgramId(), Program.BED_TYPE)) { int residentDays = calculateResidentDays(admission); boolean isNewAdmission = isNewAdmission(admission); Demographic demographic = demographicDao.getDemographicById(admission.getClientId()); Integer age = DateUtils.getAge(demographic.getBirthDay(), endDate); if (age != null && age.intValue() >= ELDERLY_AGE) { totalDaysElderly = totalDaysElderly + residentDays; if (isNewAdmission) totalNewAdmissionsElderly++; uniqueElderlyClients.add(admission.getClientId()); } else { totalDaysAdult = totalDaysAdult + residentDays; if (isNewAdmission) totalNewAdmissionsAdult++; if (isNewDischarge(admission)) totalNewDischargeAdult++; uniqueAdultClients.add(admission.getClientId()); }//from w w w . jav a2 s .c o m } } results.add(new DataRow(4034520, "Resident days - Elderly", totalDaysElderly)); results.add(new DataRow(4034540, "Resident days - Adult", totalDaysAdult)); results.add(new DataRow(4014520, "New Resident Admission - Elderly", totalNewAdmissionsElderly)); results.add(new DataRow(4014540, "New Resident Admission - Adult", totalNewAdmissionsAdult)); results.add(new DataRow(4104540, "New Resident discharged - Adult", totalNewDischargeAdult)); results.add(new DataRow(4554520, "Unique Individuals Served by FC - Elderly", uniqueElderlyClients.size())); results.add(new DataRow(4554540, "Unique Individuals Served by FC - Adult", uniqueAdultClients.size())); }
From source file:com.chinamobile.bcbsp.bspcontroller.JobInProgress.java
@Override public int getCheckNum() { // ljn add for migrate staff. if (this.openMigrateMode && this.migrateFlag) { LOG.info("ljn test : migrateFlag is true"); }//w w w. j a v a 2 s . c o m HashSet<String> workersAfter = new HashSet<String>(); workersAfter.addAll(convert().values()); // if (this.openMigrateMode && this.migrateFlag) { if (workersAfter.size() == this.workersToStaffs.size()) { return this.workersToStaffs.size(); } else { return workersAfter.size(); } // } else { // return this.workersToStaffs.size(); // } }
From source file:dao.DirectoryAuthorDaoDb.java
/** * This methods lists all users that match with this alphabet * @param alphabet - alphabet//from w ww . j a v a 2s. c o m * @param directoryId - directoryId * @param userId - userId * @param userLogin - userLogin * @param accessFlag the acess flag to read slave(0) or master (1) * @return List - list of all users * @throws BaseDaoException */ public List getUsers(String alphabet, String directoryId, String userId, String userLogin, int accessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } /** alphabet default is "a" */ if (RegexStrUtil.isNull(alphabet)) { alphabet = "A"; } /** * check user permissions */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { throw new BaseDaoException("User does not have permission to list users for this directory, " + directoryId + " userId = " + userId); } Fqn fqn = cacheUtil.fqn(DbConstants.USERS_ALPHABET); if (treeCache.exists(fqn, alphabet)) { HashSet dirauthors = new HashSet(); List result = (List) treeCache.get(fqn, alphabet); if (result != null && result.size() > 0) { HashSet authorSet = listAuthorsOfDirectory(directoryId, userId, userLogin, accessFlag); if (authorSet != null && authorSet.size() > 0) { Iterator it = authorSet.iterator(); while (it.hasNext()) { Directory author = (Directory) it.next(); if (author != null) { dirauthors.add(author.getValue(DbConstants.LOGIN)); } } } } if (result != null && result.size() > 0 && dirauthors.size() > 0) { List newUsers = new ArrayList(); for (int i = 0; i < result.size(); i++) { Hdlogin hdlogin = (Hdlogin) result.get(i); if (hdlogin != null) { if (!RegexStrUtil.isNull(hdlogin.getValue(DbConstants.LOGIN))) { if (!dirauthors.contains(hdlogin.getValue(DbConstants.LOGIN))) { newUsers.add(result.get(i)); // hdlogin.setValue(DbConstants.AUTHOR, "0"); } } } } treeCache.put(fqn, alphabet, newUsers); return newUsers; } } String queryName = null; if (accessFlag == 1) { queryName = scalabilityManager.getWriteZeroScalability("showusersquery"); } else { queryName = scalabilityManager.getReadZeroScalability("showusersquery"); } showUsersQuery = getQueryMapper().getQuery(queryName); StringBuffer alphabetString = new StringBuffer("%"); alphabetString.append(alphabet); alphabetString.append("%"); List result = null; try { Object[] params = { (Object) alphabetString.toString(), (Object) alphabetString.toString(), (Object) alphabetString.toString() }; result = showUsersQuery.execute(params); } catch (Exception e) { throw new BaseDaoException("error in getUsers() " + showUsersQuery.getSql(), e); } HashSet authorSet = listAuthorsOfDirectory(directoryId, userId, userLogin, accessFlag); if (authorSet != null && authorSet.size() > 0) { HashSet dirauthors = new HashSet(); Iterator it = authorSet.iterator(); while (it.hasNext()) { Directory author = (Directory) it.next(); if (author != null) { dirauthors.add(author.getValue(DbConstants.LOGIN)); } } if (result != null && result.size() > 0 && dirauthors.size() > 0) { List newUsers = new ArrayList(); for (int i = 0; i < result.size(); i++) { Hdlogin hdlogin = (Hdlogin) result.get(i); if (hdlogin != null) { if (!RegexStrUtil.isNull(hdlogin.getValue(DbConstants.LOGIN))) { if (!dirauthors.contains(hdlogin.getValue(DbConstants.LOGIN))) { newUsers.add(result.get(i)); // hdlogin.setValue(DbConstants.AUTHOR, "0"); } } } } treeCache.put(fqn, alphabet, newUsers); return newUsers; } } return null; }
From source file:dao.DirectoryAuthorDaoDb.java
/** * This methods lists all users // ww w . j av a 2s.co m * @param directoryId - directoryId * @param userId - userId * @param userLogin - userLogin * @param accessFlag the acess flag to read slave(0) or master (1) * @return List - list of all users alphabets (first,last,login) * @throws BaseDaoException */ public List getAllUsersAlphabet(String directoryId, String userId, String userLogin, int accessFlag) throws BaseDaoException { if (RegexStrUtil.isNull(userLogin) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(directoryId)) { throw new BaseDaoException("params are null"); } /** * check user permissions */ if (!diaryAdmin.isDiaryAdmin(userLogin) && !isAuthor(directoryId, userId)) { throw new BaseDaoException("User does not have permission to list users for this directory, " + directoryId + " userId = " + userId); } List result = null; Fqn fqn = cacheUtil.fqn(DbConstants.ALPHABET_ALL_USERS); if (treeCache.exists(fqn, DbConstants.ALPHABET_ALL_USERS)) { // return (List)treeCache.get(fqn, DbConstants.ALPHABET_ALL_USERS); result = (List) treeCache.get(fqn, DbConstants.ALPHABET_ALL_USERS); } else { String queryName = null; if (accessFlag == 1) { queryName = scalabilityManager.getWriteZeroScalability("showallusersalphabetquery"); } else { queryName = scalabilityManager.getReadZeroScalability("showallusersalphabetquery"); } showAllUsersAlphabetQuery = getQueryMapper().getQuery(queryName); try { result = showAllUsersAlphabetQuery.execute(); } catch (Exception e) { throw new BaseDaoException("error in getAllUsersAlphabet() " + showAllUsersAlphabetQuery.getSql(), e); } } if (result == null) { return null; } else { HashSet authorSet = listAuthorsOfDirectory(directoryId, userId, userLogin, accessFlag); HashSet dirauthors = new HashSet(); if (authorSet != null && authorSet.size() > 0) { Iterator it = authorSet.iterator(); while (it.hasNext()) { Directory author = (Directory) it.next(); if (author != null) { dirauthors.add(author.getValue(DbConstants.LOGIN)); } } if (result != null && result.size() > 0 && dirauthors.size() > 0) { List newUsers = new ArrayList(); for (int i = 0; i < result.size(); i++) { Hdlogin hdlogin = (Hdlogin) result.get(i); if (hdlogin != null) { if (!RegexStrUtil.isNull(hdlogin.getValue(DbConstants.LOGIN))) { if (!dirauthors.contains(hdlogin.getValue(DbConstants.LOGIN))) { newUsers.add(result.get(i)); // hdlogin.setValue(DbConstants.AUTHOR, "0"); } } } } treeCache.put(fqn, DbConstants.ALPHABET_ALL_USERS, newUsers); return newUsers; } } } return null; }
From source file:org.mule.devkit.doclet.ClassInfo.java
public ClassInfo[] getInterfaces() { if (mInterfaces == null) { if (checkLevel()) { HashSet<ClassInfo> interfaces = new HashSet<ClassInfo>(); ClassInfo superclass = mRealSuperclass; while (superclass != null && !superclass.checkLevel()) { gatherHiddenInterfaces(superclass, interfaces); superclass = superclass.mRealSuperclass; }/*from w w w . j av a 2s. c om*/ gatherHiddenInterfaces(this, interfaces); mInterfaces = interfaces.toArray(new ClassInfo[interfaces.size()]); } else { // put something here in case someone uses it mInterfaces = new ClassInfo[mRealInterfaces.size()]; mRealInterfaces.toArray(mInterfaces); } Arrays.sort(mInterfaces, ClassInfo.qualifiedComparator); } return mInterfaces; }