List of usage examples for java.util HashSet addAll
boolean addAll(Collection<? extends E> c);
From source file:org.hyperic.hq.authz.server.session.RoleManagerImpl.java
/** * List the roles that this subject is not in and that are not one of the * specified roles.//from ww w .j a va2 s. c o m * * @param whoami The current running user. * @param system If true, then only system roles are returned. If false, * then only non-system roles are returned. * @param groupId The id of the subject. * @return List of roles. * @throws NotFoundException Unable to find a given or dependent entities. * @throws PermissionException whoami is not allowed to perform listRoles on * this role. * @throws NotFoundException if the sort attribute was not recognized * */ @Transactional(readOnly = true) public PageList<RoleValue> getAvailableGroupRoles(AuthzSubject whoami, Integer groupId, Integer[] roleIds, PageControl pc) throws PermissionException, NotFoundException { Collection<Role> foundRoles; pc = PageControl.initDefaults(pc, SortAttribute.ROLE_NAME); int attr = pc.getSortattribute(); switch (attr) { case SortAttribute.ROLE_NAME: foundRoles = roleDAO.findAvailableForGroup(false, groupId); break; default: throw new NotFoundException("Unrecognized sort attribute: " + attr); } log.debug("Found " + foundRoles.size() + " available roles for group " + groupId + " before permission checking"); HashSet<Integer> index = new HashSet<Integer>(); if (roleIds != null) { index.addAll(Arrays.asList(roleIds)); } // Grep out the specified roles ArrayList<Role> roles = new ArrayList<Role>(); for (Role r : foundRoles) { if (!index.contains(r.getId())) { roles.add(r); } } log.debug("Found " + roles.size() + " available roles for group " + groupId + " after exclusions"); // AUTHZ Check - filter the viewable roles roles = (ArrayList) filterViewableRoles(whoami, roles); if (isRootRoleMember(whoami) && pc.getPagenum() == 0 && !index.contains(AuthzConstants.rootRoleId)) { foundRoles = roleDAO.findAvailableForGroup(true, groupId); for (Role role : foundRoles) { if (role.getId().equals(AuthzConstants.rootRoleId)) { roles.add(role); } } } if (pc.isDescending()) { Collections.reverse(roles); } log.debug("Found " + roles.size() + " available roles for group " + groupId + " after permission checking"); PageList<RoleValue> plist = rolePager.seek(roles, pc.getPagenum(), pc.getPagesize()); plist.setTotalSize(roles.size()); return plist; }
From source file:ch.unil.genescore.pathway.GeneSetLibrary.java
License:asdf
/** calculates the weights for sampling the genes: * /*from w ww . j a va2 s . co m*/ * for each gene x: * 1) we use samplingWeightHelper_ to find all gene sets where the gene is present (as gene or as part of metagene) * 2) for each of these genesets : we update gene x's samplingWeight by adding 1/(sizeofGeneSet^2) * * for each metaGene : replace 1) above with :find all gene sets where any uf the subgene is present. * */ protected void calculateSamplingWeights() { calculateSamplingWeightHelper(); HashSet<GeneSet> gSetSet = null; double size; double weight; for (Gene g : genes_) { if (getSamplingWeightHelper().containsKey(g.id_)) { gSetSet = getSamplingWeightHelper().get(g.id_); for (GeneSet gSet : gSetSet) { size = gSet.genes_.size(); weight = 1.0 / (size * size); g.updateSamplingWeight(weight); } } } if (metaGenes_ != null) { for (MetaGene mg : metaGenes_.values()) { gSetSet = new HashSet<GeneSet>(); for (Gene g : mg.getGenes()) { gSetSet.addAll(getSamplingWeightHelper().get(g.id_)); } for (GeneSet gSet : gSetSet) { size = gSet.genes_.size(); weight = 1.0 / (size * size); mg.updateSamplingWeight(weight); } } } }
From source file:org.apache.axis.encoding.SerializationContext.java
/** * The serialize method uses hrefs to reference all non-primitive * values. These values are stored and serialized by calling * outputMultiRefs after the serialize method completes. *///www . j a v a2 s . c o m public void outputMultiRefs() throws IOException { if (!doMultiRefs || (multiRefValues == null) || soapConstants == SOAPConstants.SOAP12_CONSTANTS) return; outputMultiRefsFlag = true; AttributesImpl attrs = new AttributesImpl(); attrs.addAttribute("", "", "", "", ""); String encodingURI = soapConstants.getEncodingURI(); // explicitly state that this attribute is not a root String prefix = getPrefixForURI(encodingURI); String root = prefix + ":root"; attrs.addAttribute(encodingURI, Constants.ATTR_ROOT, root, "CDATA", "0"); // Make sure we put the encodingStyle on each multiref element we // output. String encodingStyle; if (msgContext != null) { encodingStyle = msgContext.getEncodingStyle(); } else { encodingStyle = soapConstants.getEncodingURI(); } String encStyle = getPrefixForURI(soapConstants.getEnvelopeURI()) + ':' + Constants.ATTR_ENCODING_STYLE; attrs.addAttribute(soapConstants.getEnvelopeURI(), Constants.ATTR_ENCODING_STYLE, encStyle, "CDATA", encodingStyle); // Make a copy of the keySet because it could be updated // during processing HashSet keys = new HashSet(); keys.addAll(multiRefValues.keySet()); Iterator i = keys.iterator(); while (i.hasNext()) { while (i.hasNext()) { AttributesImpl attrs2 = new AttributesImpl(attrs); Object val = i.next(); MultiRefItem mri = (MultiRefItem) multiRefValues.get(val); attrs2.setAttribute(0, "", Constants.ATTR_ID, "id", "CDATA", mri.id); forceSer = mri.value; // Now serialize the value. // The sendType parameter is defaulted for interop purposes. // Some of the remote services do not know how to // ascertain the type in these circumstances (though Axis does). serialize(multirefQName, attrs2, mri.value, mri.xmlType, null, this.sendNull, Boolean.TRUE); // mri.sendType } // Done processing the iterated values. During the serialization // of the values, we may have run into new nested values. These // were placed in the secondLevelObjects map, which we will now // process by changing the iterator to locate these values. if (secondLevelObjects != null) { i = secondLevelObjects.iterator(); secondLevelObjects = null; } } // Reset maps and flags forceSer = null; outputMultiRefsFlag = false; multiRefValues = null; multiRefIndex = -1; secondLevelObjects = null; }
From source file:com.yahoo.ycsb.workloads.MailAppCassandraWorkload.java
public void doTransactionPopDelete(DB db) throws WorkloadException { printDebug("------------POP Delete Transaction-------------"); //choose a random key int keynum = nextKeynum(); String keynameInbox = buildKeyName(keynum, inboxSuffix); HashSet<String> fields = null; HashSet<String> counterColumnNames = new HashSet<String>(); counterColumnNames.add(messagecountfieldkey); counterColumnNames.add(mailboxsizefieldkey); HashMap<String, ByteIterator> counterResult = new HashMap<String, ByteIterator>(); HashMap<String, ByteIterator> uidlResult = new HashMap<String, ByteIterator>(); HashMap<String, ByteIterator> result = new HashMap<String, ByteIterator>(); HashSet<String> uidlKeySet = new HashSet<String>(); long st = System.nanoTime(); //read counter Table (POP3 STAT Command) db.read(incrementTag + countertable, keynameInbox, counterColumnNames, counterResult); int statCount = Integer.valueOf(counterResult.get(messagecountfieldkey).toString()); if (statCount <= 0) { long en = System.nanoTime(); printDebug("POP Delete: Inbox empty, closing..."); Measurements.getMeasurements().measure("POP_DELETE-Transaction", (int) ((en - st) / 1000)); return;//from ww w .j av a 2 s . c om } //get UIDL/LIST-List printDebug("POP Delete: reading uidl-List"); db.read(sizetable, keynameInbox, fields, uidlResult); //DELE d-times //delete Messages. DB.delete() only allows deletion of whole rows, so columnNames are appended with delimiter //min. deletion: 1, max. deletion: messagecount in Mailbox uidlKeySet.addAll(uidlResult.keySet()); printDebug("POP Read: setting up delete count generator..."); int deleteCount = getMessageDeleteCountGenerator(0, uidlKeySet.size()).nextInt(); printDebug("POP Read: deleting " + deleteCount + " messages from " + keynameInbox); if (deleteCount > 0) { Iterator<String> retrievedIdsIterator = uidlResult.keySet().iterator(); for (int i = 0; i < deleteCount; i++) { String columnNameDelete = hashAndRangeKeyDelimiter + retrievedIdsIterator.next(); //System.out.println("deleting column: "+columnNameDelete); db.delete(mailboxtable, keynameInbox + columnNameDelete); db.delete(sizetable, keynameInbox + columnNameDelete); } //Update Counter HashMap<String, ByteIterator> countUpdateValue = new HashMap<String, ByteIterator>(); countUpdateValue.put(messagecountfieldkey, new StringByteIterator("-" + deleteCount)); countUpdateValue.put(mailboxsizefieldkey, new StringByteIterator("-" + deleteCount * 100));//100: dummy size //indicate use of counterColumns with incremetTag in from of tablename db.update(incrementTag + countertable, keynameInbox, countUpdateValue); } //--- long en = System.nanoTime(); Measurements.getMeasurements().measure("POP_DELETE-Transaction", (int) ((en - st) / 1000)); }
From source file:com.eTilbudsavis.etasdk.SyncManager.java
private void mergeListsToDb(List<Shoppinglist> serverList, List<Shoppinglist> localList, User user) { if (serverList.isEmpty() && localList.isEmpty()) { return;// ww w . j a va2s. c om } DbHelper db = DbHelper.getInstance(); HashMap<String, Shoppinglist> localMap = new HashMap<String, Shoppinglist>(); HashMap<String, Shoppinglist> serverMap = new HashMap<String, Shoppinglist>(); HashSet<String> union = new HashSet<String>(); for (Shoppinglist sl : localList) { localMap.put(sl.getId(), sl); } for (Shoppinglist sl : serverList) { serverMap.put(sl.getId(), sl); } union.addAll(serverMap.keySet()); union.addAll(localMap.keySet()); for (String key : union) { if (localMap.containsKey(key)) { Shoppinglist localSl = localMap.get(key); if (serverMap.containsKey(key)) { Shoppinglist serverSl = serverMap.get(key); if (localSl.getModified().before(serverSl.getModified())) { serverSl.setState(SyncState.SYNCED); mNotification.edit(serverSl); db.editList(serverSl, user); db.cleanShares(serverSl, user); } else { // Don't do anything, next iteration will put local changes to API } } else { mNotification.del(localSl); for (ShoppinglistItem sli : db.getItems(localSl, user)) { mNotification.del(sli); } db.deleteItems(localSl.getId(), null, user); db.deleteList(localSl, user); } } else { Shoppinglist add = serverMap.get(key); add.setState(SyncState.TO_SYNC); mNotification.add(add); db.insertList(add, user); } } for (Shoppinglist sl : mNotification.getAddedLists()) { syncItems(sl, user); } for (Shoppinglist sl : mNotification.getEditedLists()) { syncItems(sl, user); } }
From source file:com.eTilbudsavis.etasdk.SyncManager.java
private void mergeItemsToDb(List<ShoppinglistItem> serverItems, List<ShoppinglistItem> localItems, User user) { if (serverItems.isEmpty() && localItems.isEmpty()) { return;//from w w w .j a v a2 s.com } DbHelper db = DbHelper.getInstance(); HashMap<String, ShoppinglistItem> localMap = new HashMap<String, ShoppinglistItem>(); HashMap<String, ShoppinglistItem> serverMap = new HashMap<String, ShoppinglistItem>(); HashSet<String> union = new HashSet<String>(); for (ShoppinglistItem sli : localItems) { localMap.put(sli.getId(), sli); } for (ShoppinglistItem sli : serverItems) { serverMap.put(sli.getId(), sli); } union.addAll(serverMap.keySet()); union.addAll(localMap.keySet()); for (String key : union) { if (localMap.containsKey(key)) { ShoppinglistItem localSli = localMap.get(key); if (serverMap.containsKey(key)) { ShoppinglistItem serverSli = serverMap.get(key); if (localSli.getModified().before(serverSli.getModified())) { mNotification.edit(serverSli); db.editItem(serverSli, user); } else if (!localSli.getMeta().toString().equals(serverSli.getMeta().toString())) { // Migration code, to get comments into the DB mNotification.edit(serverSli); db.editItem(serverSli, user); } else if (localSli.equals(serverSli)) { EtaLog.d(TAG, "We have a mismatch"); } } else { ShoppinglistItem delSli = localMap.get(key); if (delSli.getState() == SyncState.TO_SYNC) { /* * Item have been added while request was in flight * ignore it for now */ } else { /* Else delete the item */ mNotification.del(delSli); db.deleteItem(delSli, user); } } } else { ShoppinglistItem serverSli = serverMap.get(key); mNotification.add(serverSli); db.insertItem(serverSli, user); } } }
From source file:org.kuali.kra.award.budget.AwardBudgetServiceImpl.java
public List<AwardBudgetExt> getAllBudgetsForAward(Award award) { HashSet<AwardBudgetExt> result = new HashSet<>(); List<VersionHistory> versions = getVersionHistoryService().loadVersionHistory(Award.class, award.getAwardNumber());//w w w . j a va2s. c o m for (VersionHistory version : versions) { if (version.getSequenceOwnerSequenceNumber() <= award.getSequenceNumber() && !(version.getSequenceOwner() == null) && !(((Award) version.getSequenceOwner()).getAwardDocument() == null)) { result.addAll(((Award) version.getSequenceOwner()).getCurrentVersionBudgets()); } } List<AwardBudgetExt> listResult = new ArrayList<>(result); Collections.sort(listResult); return listResult; }
From source file:com.pinterest.arcee.aws.EC2HostInfoDAOImpl.java
@Override public Set<String> getTerminatedHosts(Set<String> staleIds) throws Exception { HashSet<String> ids = new HashSet<>(staleIds); HashSet<String> terminatedHosts = new HashSet<>(); while (!ids.isEmpty()) { DescribeInstancesRequest request = new DescribeInstancesRequest(); request.setInstanceIds(ids);/*from w ww . ja v a 2 s.com*/ try { do { DescribeInstancesResult results = ec2Client.describeInstances(request); List<Reservation> reservations = results.getReservations(); for (Reservation reservation : reservations) { for (Instance instance : reservation.getInstances()) { int stateCode = instance.getState().getCode(); String id = instance.getInstanceId(); if (stateCode == TERMINATED_CODE || stateCode == STOPPED_CODE) { LOG.info(String.format("Instance %s has already been terminated or stopped.", id)); terminatedHosts.add(id); } ids.remove(id); } } if (results.getNextToken() == null || results.getNextToken().isEmpty()) { break; } request = new DescribeInstancesRequest(); request.setInstanceIds(ids); request.setNextToken(results.getNextToken()); } while (true); LOG.debug("Cannot find the following ids in AWS:", ids); terminatedHosts.addAll(ids); return terminatedHosts; } catch (AmazonServiceException ex) { Collection<String> invalidHostIds = handleInvalidInstanceId(ex); ids.removeAll(invalidHostIds); // add invalid host ids to the terminated host list. terminatedHosts.addAll(invalidHostIds); } catch (AmazonClientException ex) { LOG.error(String.format("Get AmazonClientException, exit with terminiatedHost %s", terminatedHosts.toString()), ex); return terminatedHosts; } } return terminatedHosts; }
From source file:com.thoughtworks.go.config.BasicCruiseConfig.java
@Override public Set<ResourceConfig> getAllResources() { final HashSet<ResourceConfig> resourceConfigs = new HashSet<>(); accept((pipelineConfig, stageConfig, jobConfig) -> resourceConfigs.addAll(jobConfig.resourceConfigs())); for (AgentConfig agent : agents) { resourceConfigs.addAll(agent.getResourceConfigs()); }//from w w w.j a v a 2 s.c o m return resourceConfigs; }
From source file:nl.mpcjanssen.simpletask.Simpletask.java
private void deleteSavedFilter(String prefsName) { SharedPreferences saved_filters = getSharedPreferences("filters", MODE_PRIVATE); HashSet<String> ids = new HashSet<String>(); ids.addAll(saved_filters.getStringSet("ids", new HashSet<String>())); ids.remove(prefsName);//from w ww .j a va 2 s. c o m saved_filters.edit().putStringSet("ids", ids).apply(); SharedPreferences filter_prefs = getSharedPreferences(prefsName, MODE_PRIVATE); ActiveFilter deleted_filter = new ActiveFilter(); deleted_filter.initFromPrefs(filter_prefs); filter_prefs.edit().clear().apply(); File prefs_path = new File(this.getFilesDir(), "../shared_prefs"); File prefs_xml = new File(prefs_path, prefsName + ".xml"); final boolean deleted = prefs_xml.delete(); if (!deleted) { Log.w(TAG, "Failed to delete saved filter: " + deleted_filter.getName()); } updateRightDrawer(); }