Example usage for java.util Set removeAll

List of usage examples for java.util Set removeAll

Introduction

In this page you can find the example usage for java.util Set removeAll.

Prototype

boolean removeAll(Collection<?> c);

Source Link

Document

Removes from this set all of its elements that are contained in the specified collection (optional operation).

Usage

From source file:com.evolveum.icf.dummy.resource.DummyObject.java

public void removeAttributeValues(String name, Collection<Object> values)
        throws SchemaViolationException, ConnectException, FileNotFoundException {
    checkModifyBreak();// w w w. j a  v  a 2  s.  co m
    Set<Object> currentValues = attributes.get(name);
    if (currentValues == null) {
        currentValues = new HashSet<Object>();
        attributes.put(name, currentValues);
    }

    Set<Object> valuesToCheck = new HashSet<Object>();
    valuesToCheck.addAll(currentValues);
    valuesToCheck.removeAll(values);
    checkSchema(name, valuesToCheck, "remove");

    Iterator<Object> iterator = currentValues.iterator();
    while (iterator.hasNext()) {
        Object currentValue = iterator.next();
        boolean found = false;
        for (Object value : values) {
            if (resource.isCaseIgnoreValues() && currentValue instanceof String && value instanceof String) {
                if (StringUtils.equalsIgnoreCase((String) currentValue, (String) value)) {
                    found = true;
                    break;
                }
            } else {
                if (currentValue.equals(value)) {
                    found = true;
                    break;
                }
            }
        }
        if (found) {
            iterator.remove();
        }
    }

    recordModify();
}

From source file:com.rmn.qa.servlet.BmpServlet.java

/**
 * Starts a new BrowserMobProxy. Note that either recordHar must be set to true or some credentials are provided or
 * a proxy will not be created.// w  w  w  . jav  a2 s  . c  om
 *
 * Content should be a json object in the following form.
 * 
 * <pre>
{
  "uuid": "my-uuid",//required
  "recordHar" : "true",
  "credentials": [{
    "domain" : "",
    "username" : "",
    "password" : "",
  }]
}
 * </pre>
 * 
 * @return Responds with a 201 Created and the url ins the Location header if proxy is created.
 * 
 * @return Responds with a 400 Bad Request if the uuid is not specified or there is no reason to create the proxy
 *         (see above).
 * 
 */
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse response)
        throws ServletException, IOException {
    // if (request.getContentType().equals("application/json"))
    try {
        JsonNode input = getNodeFromRequest(request);
        String uuid = getJsonString(input, "uuid");
        if (StringUtils.isBlank(uuid)) {
            log.error("uuid not  present");
            response.sendError(HttpServletResponse.SC_BAD_REQUEST, "uuid must be specified in json");
            return;
        }

        JsonNode harRecording = input.get("recordHar");
        boolean recorrdingHar = harRecording != null && harRecording.asBoolean(false);
        BrowserMobProxy proxy = null;
        if (recorrdingHar) {
            proxy = new BrowserMobProxyServer();
            Set<CaptureType> set = new HashSet<CaptureType>(CaptureType.getRequestCaptureTypes());
            set.addAll(CaptureType.getResponseCaptureTypes());
            set.removeAll(CaptureType.getBinaryContentCaptureTypes());
            proxy.setHarCaptureTypes(set);
        }
        JsonNode creds = input.get("credentials");
        if (creds != null) {
            if (proxy == null) {
                proxy = new BrowserMobProxyServer();
            }
            if (creds.isArray()) {
                ArrayNode array = (ArrayNode) creds;
                Iterator<JsonNode> elements = array.elements();
                while (elements.hasNext()) {
                    JsonNode cred = elements.next();
                    addCredentials(proxy, cred);
                }
            } else {
                addCredentials(proxy, creds);
            }
        }
        if (proxy == null) {
            log.error("Nothing for proxy to do");
            response.sendError(HttpServletResponse.SC_BAD_REQUEST,
                    "Har recording or credentials not specified. There is no reason to start a proxy.");
            return;
        } else {
            String localhostname;
            // Try and get the IP address from the system property
            String runTimeHostName = System.getProperty(AutomationConstants.IP_ADDRESS);
            try {
                if (runTimeHostName == null) {
                    log.warn("Host name could not be determined from system property.");
                }
                localhostname = (runTimeHostName != null) ? runTimeHostName
                        : InetAddress.getLocalHost().getHostName();
            } catch (UnknownHostException e) {
                log.error("Error parsing out host name", e);
                response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR,
                        "Host name could not be determined: " + e);
                return;
            }

            // build the response
            BmpProxyRegistry.getInstance().addProxy(uuid, proxy);
            proxy.start();
            response.setStatus(HttpServletResponse.SC_CREATED);
            response.setHeader("Location", localhostname + ":" + proxy.getPort());
        }
    } catch (Exception e) {
        log.error("Error starting proxy: " + e, e);
        response.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, "Error starting proxy: " + e);
    }
}

From source file:main.java.edu.isistan.genCom.redSocial.RedSocial.java

/**
 * Returns the degree of reachability by getting the distances of every pair of vertices in the network when a vertices set is removed
 * network//from   w  w w  .  j a v  a2s . co m
 * 
 * Degree of reachability by Borgatti
 * 
 * @param comission
 *            Nodes to be removed
 * @return List of distances
 */
public List<Double> getDegreeOfReachability(List<Investigador> comission) {
    List<Double> distancias = new ArrayList<>();

    try {
        RedSocial reduced = null;

        reduced = (RedSocial) this.clone();

        Set<Investigador> invSet = new HashSet<>(getNodos());
        invSet.removeAll(comission);

        reduced.reducirA(invSet);

        UnweightedShortestPath<Investigador, String> uWSP = new UnweightedShortestPath(reduced.getRed());

        List<Investigador> nodos = reduced.getNodos();

        for (int i = 0; i < nodos.size(); i++) {
            Investigador inv1 = nodos.get(i);

            for (int j = 0; j < i; j++) {
                Investigador inv2 = nodos.get(j);

                Number dist = uWSP.getDistance(inv1, inv2);

                Double d = dist != null ? dist.doubleValue() : Double.POSITIVE_INFINITY;

                distancias.add(d);
            }
        }

    } catch (CloneNotSupportedException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

    return distancias;
}

From source file:com.puppycrawl.tools.checkstyle.checks.TranslationCheck.java

/**
 * Compares the key sets of the given property files (arranged in a map)
 * with the specified key set. All missing keys are reported.
 * @param keys the set of keys to compare with
 * @param fileMap a Map from property files to their key sets
 *///from   ww  w. ja  va2  s. c  o  m
private void compareKeySets(Set<Object> keys, SetMultimap<File, Object> fileMap) {

    for (File currentFile : fileMap.keySet()) {
        final MessageDispatcher dispatcher = getMessageDispatcher();
        final String path = currentFile.getPath();
        dispatcher.fireFileStarted(path);
        final Set<Object> currentKeys = fileMap.get(currentFile);

        // Clone the keys so that they are not lost
        final Set<Object> keysClone = Sets.newHashSet(keys);
        keysClone.removeAll(currentKeys);

        // Remaining elements in the key set are missing in the current file
        if (!keysClone.isEmpty()) {
            for (Object key : keysClone) {
                log(0, MSG_KEY, key);
            }
        }
        fireErrors(path);
        dispatcher.fireFileFinished(path);
    }
}

From source file:com.netflix.spinnaker.halyard.config.config.v1.HalconfigParser.java

/**
 * Deletes all files in the staging directory that are not referenced in the hal config.
 *///from   ww w. j  a v  a  2  s  . c  o  m
public void cleanLocalFiles(Path stagingDirectoryPath) {
    if (!GlobalApplicationOptions.getInstance().isUseRemoteDaemon()) {
        return;
    }
    Halconfig halconfig = getHalconfig();
    Set<String> referencedFiles = new HashSet<String>();
    Consumer<Node> fileFinder = n -> referencedFiles.addAll(n.localFiles().stream().map(f -> {
        try {
            f.setAccessible(true);
            return (String) f.get(n);
        } catch (IllegalAccessException e) {
            throw new RuntimeException("Failed to clean staging directory: " + e.getMessage(), e);
        } finally {
            f.setAccessible(false);
        }
    }).filter(Objects::nonNull).collect(Collectors.toSet()));
    halconfig.recursiveConsume(fileFinder);

    Set<String> existingStagingFiles = ((List<File>) FileUtils.listFiles(stagingDirectoryPath.toFile(),
            TrueFileFilter.INSTANCE, TrueFileFilter.INSTANCE)).stream().map(f -> f.getAbsolutePath())
                    .collect(Collectors.toSet());

    existingStagingFiles.removeAll(referencedFiles);

    try {
        for (String f : existingStagingFiles) {
            FileUtils.forceDelete(new File(f));
        }
    } catch (IOException e) {
        throw new HalException(FATAL, "Failed to clean staging directory: " + e.getMessage(), e);
    }
}

From source file:hudson.plugins.clearcase.action.AbstractCheckoutAction.java

protected AbstractCheckoutAction.LoadRulesDelta getLoadRulesDelta(Set<String> configSpecLoadRules,
        Launcher launcher) {// w  w  w  .j  a  va  2  s.c om
    Set<String> removedLoadRules = new LinkedHashSet<String>(configSpecLoadRules);
    Set<String> addedLoadRules = new LinkedHashSet<String>();
    if (!ArrayUtils.isEmpty(loadRules)) {
        for (String loadRule : loadRules) {
            addedLoadRules.add(ConfigSpec.cleanLoadRule(loadRule, launcher.isUnix()));
        }
        removedLoadRules.removeAll(addedLoadRules);
        addedLoadRules.removeAll(configSpecLoadRules);
        PrintStream logger = launcher.getListener().getLogger();
        for (String removedLoadRule : removedLoadRules) {
            logger.println("Removed load rule : " + removedLoadRule);
        }
        for (String addedLoadRule : addedLoadRules) {
            logger.println("Added load rule : " + addedLoadRule);
        }
    }
    return new AbstractCheckoutAction.LoadRulesDelta(removedLoadRules, addedLoadRules);
}

From source file:org.syncope.core.rest.data.UserDataBinder.java

@Transactional(readOnly = true)
public SyncopeUser getUserFromId(final Long userId) throws NotFoundException, UnauthorizedRoleException {

    if (userId == null) {
        throw new NotFoundException("Null user id");
    }//from w  w w  .j a v  a2 s  .  c  o  m

    SyncopeUser user = userDAO.find(userId);
    if (user == null) {
        throw new NotFoundException("User " + userId);
    }

    Set<Long> roleIds = user.getRoleIds();
    Set<Long> adminRoleIds = EntitlementUtil.getRoleIds(EntitlementUtil.getOwnedEntitlementNames());
    roleIds.removeAll(adminRoleIds);
    if (!roleIds.isEmpty()) {
        throw new UnauthorizedRoleException(roleIds);
    }

    return user;
}

From source file:org.syncope.core.rest.data.UserDataBinder.java

@Transactional(readOnly = true)
public SyncopeUser getUserFromUsername(final String username)
        throws NotFoundException, UnauthorizedRoleException {

    if (username == null) {
        throw new NotFoundException("Null username");
    }//from   www . j  a v  a2  s  . co m

    SyncopeUser user = userDAO.find(username);
    if (user == null) {
        throw new NotFoundException("User " + username);
    }

    Set<Long> roleIds = user.getRoleIds();
    Set<Long> adminRoleIds = EntitlementUtil.getRoleIds(EntitlementUtil.getOwnedEntitlementNames());
    roleIds.removeAll(adminRoleIds);
    if (!roleIds.isEmpty()) {
        throw new UnauthorizedRoleException(roleIds);
    }

    return user;
}

From source file:edu.stanford.muse.index.Lexicon.java

private Set<Document> getDocsWithNoEmotions(Indexer indexer, Collection<Document> docs,
        boolean originalContentOnly) {
    Set<Document> result = new LinkedHashSet<Document>(docs);
    result.removeAll(getDocsWithAnyEmotions(indexer, docs, originalContentOnly));
    return result;
}

From source file:dk.netarkivet.harvester.harvesting.HarvestController.java

/**
 * Get an index for deduplication. This will make a call to the index server, requesting an index for the given IDs.
 * The files will then be cached locally.
 * <p>//w  w  w  .ja  va  2 s.  c  o  m
 * If we request index for IDs that don't exist/have problems, we get a smaller set of IDs in our cache files, and
 * next time we ask for the same index, we will call the index server again. This will be handled well, though,
 * because if the ids are still missing, we will get a reply telling us to use the cached smaller index anyway.
 *
 * @param metadataEntries list of metadataEntries top get jobIDs from.
 * @return a directory containing the index itself.
 * @throws IOFailure on errors retrieving the index from the client. FIXME Better forgiving handling of no index
 * available Add setting for disable deduplication if no index available
 */
private File fetchDeduplicateIndex(List<MetadataEntry> metadataEntries) {
    // Get list of jobs, which should be used for duplicate reduction
    // and retrieve a luceneIndex from the IndexServer
    // based on the crawl.logs from these jobs and their CDX'es.
    Set<Long> jobIDsForDuplicateReduction = new HashSet<Long>(
            parseJobIDsForDuplicateReduction(metadataEntries));

    // The client for requesting job index.
    JobIndexCache jobIndexCache = IndexClientFactory.getDedupCrawllogInstance();

    // Request the index and return the index file.
    Index<Set<Long>> jobIndex = jobIndexCache.getIndex(jobIDsForDuplicateReduction);
    // Check which jobs didn't become part of the index.
    Set<Long> diffSet = new HashSet<Long>(jobIDsForDuplicateReduction);
    diffSet.removeAll(jobIndex.getIndexSet());
    if (log.isDebugEnabled()) {
        log.debug("Received deduplication index containing {} jobs. {}", jobIndex.getIndexSet().size(),
                ((diffSet.size() > 0) ? "Missing jobs: " + StringUtils.conjoin(",", diffSet) : ""));
    }

    return jobIndex.getIndexFile();
}