List of usage examples for java.util HashSet removeAll
boolean removeAll(Collection<?> c);
From source file:dao.DirectoryUserDaoDb.java
/** * Adds the users in directory for access list * @param memberList memberList// w w w . j a va 2s . co m * @param directoryId directoryId * @param userId the user Login is used to check if this user has the permission to add user * @param userLogin the user Login is used to check if this user has the permission to add user * @return List - list of users who are not members * @throws BaseDaoException */ public String addUsers(List memberList, String directoryId, String userId, String userLogin) throws BaseDaoException { if ((memberList == null) || RegexStrUtil.isNull(directoryId) || RegexStrUtil.isNull(userId) || RegexStrUtil.isNull(userLogin)) { throw new BaseDaoException("params are null"); } /* get existing users */ List userList = listUsers(directoryId, DbConstants.READ_FROM_SLAVE); List existingUserList = null; if (userList != null) { existingUserList = new ArrayList(); for (int i = 0; i < userList.size(); i++) { if ((Directory) userList.get(i) != null) { existingUserList.add(((Directory) userList.get(i)).getValue(DbConstants.LOGIN)); } } } logger.info("memberList = " + memberList.toString()); logger.info("existingUserList = " + existingUserList.toString()); /* remove existing users from the memberList */ List idList = null; if ((existingUserList != null) && (existingUserList.size() > 0)) { HashSet hs1 = new HashSet(memberList); HashSet hs2 = new HashSet(existingUserList); if (hs1.removeAll(hs2)) { if (hs1 != null) { logger.info("hs1 = " + hs1.toString()); } idList = new ArrayList(hs1); } else { idList = memberList; } } else { idList = memberList; } /* add only new valid users in the directory */ StringBuffer notMembers = new StringBuffer(); try { if (idList != null && idList.size() > 0) { logger.info("idList = " + idList.toString()); for (int i = 0; i < idList.size(); i++) { if (idList.get(i) != null) { logger.info("idList.get(i)=" + idList.get(i) + " i = " + i); String mLogin = (String) idList.get(i); Hdlogin hdlogin = getLoginid(mLogin); if (hdlogin == null) { notMembers.append(mLogin); notMembers.append(" "); } else { addUser(directoryId, mLogin, userId, userLogin); } } } } } catch (BaseDaoException e) { throw new BaseDaoException( "Exception occured in addUser(), DirectoryAddUser for userLogin " + userLogin, e); } logger.info("notMembers = " + notMembers); if (notMembers != null) { return notMembers.toString(); } else { return null; } }
From source file:com.enonic.cms.business.core.content.index.ContentIndexServiceImpl.java
public Map<String, List<ContentIndexEntity>> createIndexLists( final HashMap<String, List<ContentIndexEntity>> existingEntities, final HashMap<String, List<ContentIndexEntity>> newEntities) { Map<String, List<ContentIndexEntity>> values = new HashMap<String, List<ContentIndexEntity>>(); final List<ContentIndexEntity> unchanged = new ArrayList<ContentIndexEntity>(); final List<ContentIndexEntity> changed = new ArrayList<ContentIndexEntity>(); final List<ContentIndexEntity> newIndexes = new ArrayList<ContentIndexEntity>(); List<ContentIndexEntity> removed = new ArrayList<ContentIndexEntity>(); // for ( ContentIndexEntity contentIndexEntity : newEntities.keySet() ) for (String path : newEntities.keySet()) { IndexAndState indexAndState = findMatchingContentIndexEntityAndCheckIndexState( existingEntities.get(path), newEntities.get(path)); if (indexAndState.state == IndexState.UNCHANGED) { unchanged.addAll(indexAndState.entity); } else if (indexAndState.state == IndexState.CHANGED) { changed.addAll(indexAndState.entity); } else if (indexAndState.state == IndexState.NEW) { newIndexes.addAll(indexAndState.entity); } else if (indexAndState.state == IndexState.CHANGED_AND_LENGTHENED) { changed.addAll(indexAndState.entity); newIndexes.addAll(indexAndState.extra); } else if (indexAndState.state == IndexState.CHANGED_AND_SHORTENED) { changed.addAll(indexAndState.entity); removed.addAll(indexAndState.removed); }/*from w w w. j a va 2s .c o m*/ } HashSet<String> removedPaths = new HashSet<String>(existingEntities.keySet()); removedPaths.removeAll(newEntities.keySet()); for (String removedPath : removedPaths) { removed.addAll(existingEntities.get(removedPath)); } values.put("unchanged", unchanged); values.put("changed", changed); values.put("new", newIndexes); values.put("removed", removed); return values; }
From source file:org.apache.hyracks.algebricks.rewriter.rules.subplan.IntroduceGroupByForSubplanRule.java
@Override public boolean rewritePost(Mutable<ILogicalOperator> opRef, IOptimizationContext context) throws AlgebricksException { AbstractLogicalOperator op0 = (AbstractLogicalOperator) opRef.getValue(); if (op0.getOperatorTag() != LogicalOperatorTag.SUBPLAN) { return false; }/*from ww w . j av a2 s . c om*/ SubplanOperator subplan = (SubplanOperator) op0; Iterator<ILogicalPlan> plansIter = subplan.getNestedPlans().iterator(); ILogicalPlan p = null; while (plansIter.hasNext()) { p = plansIter.next(); } if (p == null) { return false; } if (p.getRoots().size() != 1) { return false; } Mutable<ILogicalOperator> subplanRoot = p.getRoots().get(0); AbstractLogicalOperator op1 = (AbstractLogicalOperator) subplanRoot.getValue(); Mutable<ILogicalOperator> botRef = subplanRoot; AbstractLogicalOperator op2; // Project is optional if (op1.getOperatorTag() != LogicalOperatorTag.PROJECT) { op2 = op1; } else { ProjectOperator project = (ProjectOperator) op1; botRef = project.getInputs().get(0); op2 = (AbstractLogicalOperator) botRef.getValue(); } if (op2.getOperatorTag() != LogicalOperatorTag.AGGREGATE) { return false; } AggregateOperator aggregate = (AggregateOperator) op2; Set<LogicalVariable> free = new HashSet<LogicalVariable>(); VariableUtilities.getUsedVariables(aggregate, free); Mutable<ILogicalOperator> op3Ref = aggregate.getInputs().get(0); AbstractLogicalOperator op3 = (AbstractLogicalOperator) op3Ref.getValue(); while (op3.getInputs().size() == 1) { Set<LogicalVariable> prod = new HashSet<LogicalVariable>(); VariableUtilities.getProducedVariables(op3, prod); free.removeAll(prod); VariableUtilities.getUsedVariables(op3, free); botRef = op3Ref; op3Ref = op3.getInputs().get(0); op3 = (AbstractLogicalOperator) op3Ref.getValue(); } if (op3.getOperatorTag() != LogicalOperatorTag.INNERJOIN && op3.getOperatorTag() != LogicalOperatorTag.LEFTOUTERJOIN) { return false; } AbstractBinaryJoinOperator join = (AbstractBinaryJoinOperator) op3; if (join.getCondition().getValue() == ConstantExpression.TRUE) { return false; } VariableUtilities.getUsedVariables(join, free); AbstractLogicalOperator b0 = (AbstractLogicalOperator) join.getInputs().get(0).getValue(); // see if there's an NTS at the end of the pipeline NestedTupleSourceOperator outerNts = getNts(b0); if (outerNts == null) { AbstractLogicalOperator b1 = (AbstractLogicalOperator) join.getInputs().get(1).getValue(); outerNts = getNts(b1); if (outerNts == null) { return false; } } Set<LogicalVariable> pkVars = computeGbyVars(outerNts, free, context); if (pkVars == null || pkVars.size() < 1) { // there is no non-trivial primary key, group-by keys are all live variables // that were produced by descendant or self ILogicalOperator subplanInput = subplan.getInputs().get(0).getValue(); pkVars = new HashSet<LogicalVariable>(); //get live variables VariableUtilities.getLiveVariables(subplanInput, pkVars); //get produced variables Set<LogicalVariable> producedVars = new HashSet<LogicalVariable>(); VariableUtilities.getProducedVariablesInDescendantsAndSelf(subplanInput, producedVars); //retain the intersection pkVars.retainAll(producedVars); } AlgebricksConfig.ALGEBRICKS_LOGGER.fine("Found FD for introducing group-by: " + pkVars); Mutable<ILogicalOperator> rightRef = join.getInputs().get(1); LogicalVariable testForNull = null; AbstractLogicalOperator right = (AbstractLogicalOperator) rightRef.getValue(); switch (right.getOperatorTag()) { case UNNEST: { UnnestOperator innerUnnest = (UnnestOperator) right; // Select [ $y != null ] testForNull = innerUnnest.getVariable(); break; } case RUNNINGAGGREGATE: { ILogicalOperator inputToRunningAggregate = right.getInputs().get(0).getValue(); Set<LogicalVariable> producedVars = new ListSet<LogicalVariable>(); VariableUtilities.getProducedVariables(inputToRunningAggregate, producedVars); if (!producedVars.isEmpty()) { // Select [ $y != null ] testForNull = producedVars.iterator().next(); } break; } case DATASOURCESCAN: { DataSourceScanOperator innerScan = (DataSourceScanOperator) right; // Select [ $y != null ] if (innerScan.getVariables().size() == 1) { testForNull = innerScan.getVariables().get(0); } break; } default: break; } if (testForNull == null) { testForNull = context.newVar(); AssignOperator tmpAsgn = new AssignOperator(testForNull, new MutableObject<ILogicalExpression>(ConstantExpression.TRUE)); tmpAsgn.getInputs().add(new MutableObject<ILogicalOperator>(rightRef.getValue())); rightRef.setValue(tmpAsgn); context.computeAndSetTypeEnvironmentForOperator(tmpAsgn); } IFunctionInfo finfoEq = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.IS_MISSING); ILogicalExpression isNullTest = new ScalarFunctionCallExpression(finfoEq, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(testForNull))); IFunctionInfo finfoNot = context.getMetadataProvider().lookupFunction(AlgebricksBuiltinFunctions.NOT); ScalarFunctionCallExpression nonNullTest = new ScalarFunctionCallExpression(finfoNot, new MutableObject<ILogicalExpression>(isNullTest)); SelectOperator selectNonNull = new SelectOperator(new MutableObject<ILogicalExpression>(nonNullTest), false, null); GroupByOperator g = new GroupByOperator(); Mutable<ILogicalOperator> newSubplanRef = new MutableObject<ILogicalOperator>(subplan); NestedTupleSourceOperator nts = new NestedTupleSourceOperator(new MutableObject<ILogicalOperator>(g)); opRef.setValue(g); selectNonNull.getInputs().add(new MutableObject<ILogicalOperator>(nts)); List<Mutable<ILogicalOperator>> prodInpList = botRef.getValue().getInputs(); prodInpList.clear(); prodInpList.add(new MutableObject<ILogicalOperator>(selectNonNull)); ILogicalPlan gPlan = new ALogicalPlanImpl(new MutableObject<ILogicalOperator>(subplanRoot.getValue())); g.getNestedPlans().add(gPlan); subplanRoot.setValue(op3Ref.getValue()); g.getInputs().add(newSubplanRef); HashSet<LogicalVariable> underVars = new HashSet<LogicalVariable>(); VariableUtilities.getLiveVariables(subplan.getInputs().get(0).getValue(), underVars); underVars.removeAll(pkVars); Map<LogicalVariable, LogicalVariable> mappedVars = buildVarExprList(pkVars, context, g, g.getGroupByList()); context.updatePrimaryKeys(mappedVars); for (LogicalVariable uv : underVars) { g.getDecorList().add(new Pair<LogicalVariable, Mutable<ILogicalExpression>>(null, new MutableObject<ILogicalExpression>(new VariableReferenceExpression(uv)))); } OperatorPropertiesUtil.typeOpRec(subplanRoot, context); OperatorPropertiesUtil.typeOpRec(gPlan.getRoots().get(0), context); context.computeAndSetTypeEnvironmentForOperator(g); return true; }
From source file:edu.uga.cs.fluxbuster.clustering.DomainCluster.java
/** * Adds the candidate flux domain to the cluster * * @param cfd the candidate flux domain to add *//* w w w .j a va 2 s . com*/ public void addCandidateFluxDomain(CandidateFluxDomain cfd) { this.candidateDomains.add(cfd); this.domains.add(cfd.getDomainName()); this.ips.addAll(cfd.getIps()); // NOTE bases diversity solely on IPv4 addresses this.setIpDiversity(IPDiversityCalculator.ipDiversity(IPDiversityCalculator.getV4Ips(ips))); this.queries += cfd.getNumQueries(); this.avgTTLs.add(cfd.getAvgTTL()); this.growthRatios.add((double) cfd.getNumIPs() / (double) cfd.getNumQueries()); if (cfd.getLastGrowthRatioSingleEntry() != null) { this.lastGrowthRatioSingleEntries.add(cfd.getLastGrowthRatioSingleEntry()); } if (cfd.getLastGrowthEntriesIPs().size() > 0) { this.lastGrowthEntriesIPs.add(cfd.getLastGrowthEntriesIPs()); this.lastGrowthEntriesQueries.add(cfd.getLastGrowthEntriesQueries()); } if (this.candidateDomains.size() > 1) { Collections.sort(this.candidateDomains, new Comparator<CandidateFluxDomain>() { @Override public int compare(CandidateFluxDomain arg0, CandidateFluxDomain arg1) { return arg0.getLastSeen().compareTo(arg1.getLastSeen()); } }); HashSet<InetAddress> prevIps = new HashSet<InetAddress>(); for (int i = 0; i < this.candidateDomains.size() - 1; i++) { prevIps.addAll(this.candidateDomains.get(i).getIps()); } CandidateFluxDomain lastCFD = this.candidateDomains.get(this.candidateDomains.size() - 1); HashSet<InetAddress> temp = new HashSet<InetAddress>(); temp.addAll(lastCFD.getIps()); temp.removeAll(prevIps); this.lastGrowthClusterIPs = temp; this.lastGrowthClusterQueries = lastCFD.getNumQueries(); } }
From source file:com.trigger_context.Main_Service.java
public void senderSync(DataInputStream in, DataOutputStream out, String folder) { String tfolder = folder + (folder.charAt(folder.length() - 1) == '/' ? "" : "/"); File f = new File(folder); File file[] = f.listFiles();// w ww . j a v a 2s . com // noti(file.toString(),""); String md5 = null; HashMap<String, File> hm = new HashMap<String, File>(); HashSet<String> A = new HashSet<String>(); for (File element : file) { hm.put(md5 = calculateMD5(element), element); A.add(md5); } // noti(hm.toString(),""); int numB = 0; try { numB = in.readInt(); } catch (IOException e) { // TODO Auto-generated catch block noti("error reading 1st int in sendersync", ""); e.printStackTrace(); } HashSet<String> B = new HashSet<String>(); for (int i = 0; i < numB; i++) { try { B.add(in.readUTF()); } catch (IOException e1) { noti("error in readins md5", ""); e1.printStackTrace(); } } HashSet<String> aMb = new HashSet<String>(A); aMb.removeAll(B); int l1 = aMb.size(); try { out.writeInt(l1); } catch (IOException e) { // TODO Auto-generated catch block noti("error in writing 1st int", ""); e.printStackTrace(); } Iterator<String> itr = aMb.iterator(); while (itr.hasNext()) { f = hm.get(itr.next()); sendFile(out, f.getPath()); } HashSet<String> bMa = new HashSet<String>(B); bMa.removeAll(A); int l2 = bMa.size(); try { out.writeInt(l2); } catch (IOException e) { // TODO Auto-generated catch block noti("error in writing 2nd int", ""); e.printStackTrace(); } itr = bMa.iterator(); while (itr.hasNext()) { md5 = itr.next(); try { out.writeUTF(md5); } catch (IOException e) { // TODO Auto-generated catch block noti("error in sending md5", ""); e.printStackTrace(); } recvFile(in, folder); } }
From source file:org.paxle.parser.impl.SubParserManager.java
/** * @see ISubParserManager#disabledMimeType() *//*from w ww.j ava 2 s . c o m*/ public Set<String> disabledMimeTypes() { // get all available mime-types and remove enabled mime-types HashSet<String> mimeTypes = new HashSet<String>(this.subParserList.keySet()); mimeTypes.removeAll(enabledMimeTypes()); return mimeTypes; }
From source file:edu.cens.loci.classes.LociWifiFingerprint.java
public boolean hasNewBssid(LociWifiFingerprint sig) { HashSet<String> diff = new HashSet<String>(sig.getBssids()); diff.removeAll(this.getBssids()); /*//from w ww .j a v a 2 s . co m Iterator<String> iter = diff.iterator(); String debugMsg = ""; while (iter.hasNext()) { String key = iter.next(); debugMsg = debugMsg + " " + this.getSsid(key); } */ return !diff.isEmpty(); }
From source file:pathwaynet.PathwayCalculator.java
private <E> HashMap<E, TestResultForEachVertex> testForEachComponent(Graph<E, String> graph, Collection<E> componentsInGroup, Collection<E> componentsConsidered, boolean onlyFromSource) { HashMap<E, TestResultForEachVertex> significance = new HashMap<>(); // calculate and cache all distances DijkstraDistance<E, String> distances = new DijkstraDistance<>(graph); HashMap<E, Map<E, Number>> distancesMap = new HashMap<>(); graph.getVertices().stream().forEach((component) -> { Map<E, Number> distancesFromThis = distances.getDistanceMap(component); distancesMap.put(component, distancesFromThis); });/*from w w w .j a v a 2 s .c o m*/ // calculate real in-group and out-group distances HashMap<E, Map<E, Number>> distancesInsideGroup = getDistancesWithGroup(distancesMap, componentsInGroup, componentsConsidered, onlyFromSource, true); HashMap<E, Map<E, Number>> distancesOutsideGroup = getDistancesWithGroup(distancesMap, componentsInGroup, componentsConsidered, onlyFromSource, false); if (distancesInsideGroup.isEmpty() || distancesOutsideGroup.isEmpty()) { System.err.println("WARNING: Please double check the enzyme list!"); } else { HashMap<E, ArrayList<Double>> differencesProp = new HashMap<>(); distancesInsideGroup.keySet().stream().forEach((component) -> { ArrayList<Double> diffIncreaseProp = estimateDifferenceOfProportionAtDistances( distancesInsideGroup.get(component).values(), distancesOutsideGroup.get(component).values()); differencesProp.put(component, diffIncreaseProp); //System.err.println(enzyme.getID()+"\t"+diffIncreaseProp); }); // for each enzyme in the given group, estimate its significance of neighbor enrichment of enzymes in the group //System.err.println(); distancesInsideGroup.keySet().stream().forEach((component) -> { // do permutation (for numPermutations times) to generate random group with the same size and with this enzyme HashSet<E> allComponentsAvailable = new HashSet<>(); allComponentsAvailable.addAll(graph.getVertices()); allComponentsAvailable.retainAll(componentsConsidered); ArrayList<HashSet<E>> componentsInGroupPermutations = generatePermutatedGroupsWithFixedNode( component, allComponentsAvailable, distancesInsideGroup.size()); // for each permutation, calculate the differences of proportion between within-group and between-group path at each path length ArrayList<ArrayList<Double>> differencesPropPermutations = new ArrayList<>(); componentsInGroupPermutations.stream().forEach((componentsInGroupThisPermutation) -> { HashSet<E> componentsOutGroupThisPermutation = new HashSet<>(); componentsOutGroupThisPermutation.addAll(graph.getVertices()); componentsOutGroupThisPermutation.removeAll(componentsInGroupThisPermutation); HashMap<E, Number> distancesInPermut = new HashMap<>(); HashMap<E, Number> distancesOutPermut = new HashMap<>(); allComponentsAvailable.forEach((component2) -> { Number minDist = getShortestDistance(distancesMap, component, component2, onlyFromSource); if (componentsInGroupThisPermutation.contains(component2) && (!component.equals(component2)) && minDist != null) distancesInPermut.put(component2, minDist); else if (componentsOutGroupThisPermutation.contains(component2) && minDist != null) distancesOutPermut.put(component2, minDist); }); differencesPropPermutations.add(estimateDifferenceOfProportionAtDistances( distancesInPermut.values(), distancesOutPermut.values())); }); // calculate the significance // P: based on Pearson's correlation between differences of proportions and distances // domain: based on the quantile of difference at each distance //System.err.println(component); double p = calculatePValue(differencesProp.get(component), differencesPropPermutations); int radius = estimateDomainRadius(differencesProp.get(component), differencesPropPermutations, 0.9); significance.put(component, new TestResultForEachVertex(p, radius)); if (cache) { } }); } return significance; }
From source file:pathwaynet.PathwayCalculator.java
private <T> HashMap<T, Map<T, Number>> getDistancesWithGroup(HashMap<T, Map<T, Number>> distancesMap, Collection<T> componentsInGroup, Collection<T> componentsConsidered, boolean onlyFromSource, boolean insideGroup) { // get the in-group set and out-group set of enzymes HashSet<T> componentsOutsideGroupAvailable = new HashSet<>(); componentsOutsideGroupAvailable.addAll(distancesMap.keySet()); componentsOutsideGroupAvailable.retainAll(componentsConsidered); componentsOutsideGroupAvailable.removeAll(componentsInGroup); HashSet<T> componentsInGroupAvailable = new HashSet<>(); componentsInGroupAvailable.addAll(distancesMap.keySet()); componentsInGroupAvailable.retainAll(componentsConsidered); componentsInGroupAvailable.removeAll(componentsOutsideGroupAvailable); // obtain distance HashMap<T, Map<T, Number>> distancesFromGroup = new HashMap<>(); if (insideGroup && componentsInGroupAvailable.size() < 2) System.err.println("WARNING: Fewer than TWO given components are involved in the pathway."); else if ((!insideGroup) && (componentsInGroupAvailable.isEmpty() || componentsOutsideGroupAvailable.isEmpty())) System.err.println(/* w w w.j a v a 2s .c om*/ "WARNING: There is either no or full overlap between the given components and the ones involving in the pathway."); else { componentsInGroupAvailable.stream().forEach((component1) -> { distancesFromGroup.put(component1, new HashMap<>()); distancesMap.keySet().stream().forEach((component2) -> { Number minDist = getShortestDistance(distancesMap, component1, component2, onlyFromSource); if (insideGroup && (componentsInGroupAvailable.contains(component2) && (!component1.equals(component2))) && minDist != null) { distancesFromGroup.get(component1).put(component2, minDist); } else if ((!insideGroup) && componentsOutsideGroupAvailable.contains(component2) && minDist != null) { distancesFromGroup.get(component1).put(component2, minDist); } }); //System.err.println(component1 + "\t" + componentsInGroupAvailable.size() +"\t" + componentsOutsideGroupAvailable.size() + "\t" + distancesFromGroup.get(component1).values()); }); } return distancesFromGroup; }
From source file:org.hippoecm.repository.jackrabbit.SessionImplHelper.java
/** * Before this method the JackRabbiit Session.checkPermission is called. * That function checks the validity of absPath and the default JCR permissions: * read, remove, add_node and set_property. So we don't have to check for those * things again here.//from w w w. ja v a 2 s. co m * @param absPath * @param actions * @throws AccessControlException * @throws RepositoryException */ public void checkPermission(String absPath, String actions) throws AccessControlException, RepositoryException { AccessControlManager acMgr = session.getAccessControlManager(); // build the set of actions to be checked HashSet<Privilege> privileges = new HashSet<Privilege>(); for (String action : actions.split(",")) { privileges.add(acMgr.privilegeFromName(action)); } privileges.removeAll(jcrPrivileges); if (privileges.size() > 0) { if (!acMgr.hasPrivileges(absPath, privileges.toArray(new Privilege[privileges.size()]))) { throw new AccessControlException("Privileges '" + actions + "' denied for " + absPath); } } }