Example usage for com.google.common.collect Sets intersection

List of usage examples for com.google.common.collect Sets intersection

Introduction

In this page you can find the example usage for com.google.common.collect Sets intersection.

Prototype

public static <E> SetView<E> intersection(final Set<E> set1, final Set<?> set2) 

Source Link

Document

Returns an unmodifiable view of the intersection of two sets.

Usage

From source file:org.apache.cassandra.cql.CFPropDefs.java

public void validate() throws InvalidRequestException, ConfigurationException {
    compactionStrategyClass = CFMetaData.DEFAULT_COMPACTION_STRATEGY_CLASS;

    // we need to remove parent:key = value pairs from the main properties
    Set<String> propsToRemove = new HashSet<String>();

    // check if we have compaction/compression options
    for (String property : properties.keySet()) {
        if (!property.contains(":"))
            continue;

        String key = property.split(":")[1];
        String val = properties.get(property);

        if (property.startsWith(COMPACTION_OPTIONS_PREFIX)) {
            compactionStrategyOptions.put(key, val);
            propsToRemove.add(property);
        }/* w  w  w .  j  a v a2 s.c  om*/

        if (property.startsWith(COMPRESSION_PARAMETERS_PREFIX)) {
            compressionParameters.put(key, val);
            propsToRemove.add(property);
        }
    }

    for (String property : propsToRemove)
        properties.remove(property);
    // Catch the case where someone passed a kwarg that is not recognized.
    for (String bogus : Sets.difference(properties.keySet(), allowedKeywords))
        throw new InvalidRequestException(bogus + " is not a valid keyword argument for CREATE COLUMNFAMILY");
    for (String obsolete : Sets.intersection(properties.keySet(), obsoleteKeywords))
        logger.warn("Ignoring obsolete property {}", obsolete);

    // Validate min/max compaction thresholds
    Integer minCompaction = getPropertyInt(KW_MINCOMPACTIONTHRESHOLD, null);
    Integer maxCompaction = getPropertyInt(KW_MAXCOMPACTIONTHRESHOLD, null);

    if ((minCompaction != null) && (maxCompaction != null)) // Both min and max are set
    {
        if ((minCompaction > maxCompaction) && (maxCompaction != 0))
            throw new InvalidRequestException(String.format("%s cannot be larger than %s",
                    KW_MINCOMPACTIONTHRESHOLD, KW_MAXCOMPACTIONTHRESHOLD));
    } else if (minCompaction != null) // Only the min threshold is set
    {
        if (minCompaction > CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD)
            throw new InvalidRequestException(
                    String.format("%s cannot be larger than %s, (default %s)", KW_MINCOMPACTIONTHRESHOLD,
                            KW_MAXCOMPACTIONTHRESHOLD, CFMetaData.DEFAULT_MAX_COMPACTION_THRESHOLD));
    } else if (maxCompaction != null) // Only the max threshold is set
    {
        if ((maxCompaction < CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD) && (maxCompaction != 0))
            throw new InvalidRequestException(
                    String.format("%s cannot be smaller than %s, (default %s)", KW_MAXCOMPACTIONTHRESHOLD,
                            KW_MINCOMPACTIONTHRESHOLD, CFMetaData.DEFAULT_MIN_COMPACTION_THRESHOLD));
    }

    Integer defaultTimeToLive = getPropertyInt(KW_DEFAULT_TIME_TO_LIVE, null);

    if (defaultTimeToLive != null) {
        if (defaultTimeToLive < 0)
            throw new InvalidRequestException(String.format("%s cannot be smaller than %s, (default %s)",
                    KW_DEFAULT_TIME_TO_LIVE, 0, CFMetaData.DEFAULT_DEFAULT_TIME_TO_LIVE));
    }

    CFMetaData.validateCompactionOptions(compactionStrategyClass, compactionStrategyOptions);
}

From source file:com.opengamma.livedata.server.AbstractPersistentSubscriptionManager.java

/**
 * Creates a persistent subscription on the server for any persistent
 * subscriptions which are not yet there.
 *//*from  w ww  .ja va 2 s .  c o  m*/
private synchronized void updateServer(boolean catchExceptions) {
    Collection<LiveDataSpecification> specs = getSpecs(_persistentSubscriptions);
    Set<LiveDataSpecification> persistentSubscriptionsToMake = new HashSet<LiveDataSpecification>(specs);

    OperationTimer operationTimer = new OperationTimer(s_logger,
            "Updating server's persistent subscriptions {}", persistentSubscriptionsToMake.size());

    int partitionSize = 50; //Aim is to make sure we can convert subscriptions quickly enough that nothing expires, and to leave the server responsive, and make retrys not take too long

    List<List<LiveDataSpecification>> partitions = Lists
            .partition(Lists.newArrayList(persistentSubscriptionsToMake), partitionSize);
    for (List<LiveDataSpecification> partition : partitions) {

        Map<LiveDataSpecification, MarketDataDistributor> marketDataDistributors = _server
                .getMarketDataDistributors(persistentSubscriptionsToMake);
        for (Entry<LiveDataSpecification, MarketDataDistributor> distrEntry : marketDataDistributors
                .entrySet()) {
            if (distrEntry.getValue() != null) {
                //Upgrade or no/op should be fast, lets do it to avoid expiry
                createPersistentSubscription(catchExceptions, distrEntry.getKey());
                persistentSubscriptionsToMake.remove(distrEntry.getKey());
            }
        }

        SetView<LiveDataSpecification> toMake = Sets.intersection(new HashSet<LiveDataSpecification>(partition),
                persistentSubscriptionsToMake);
        if (!toMake.isEmpty()) {
            createPersistentSubscription(catchExceptions, toMake); //PLAT-1632 
            persistentSubscriptionsToMake.removeAll(toMake);
        }
    }
    operationTimer.finished();
    s_logger.info("Server updated");
}

From source file:com.opengamma.strata.engine.config.FunctionConfig.java

private Map<String, Object> mergedArguments(Map<String, Object> arguments) {
    Set<String> intersection = Sets.intersection(this.arguments.keySet(), arguments.keySet());

    if (!intersection.isEmpty()) {
        throw new IllegalArgumentException(
                Messages.format("Built-in function arguments cannot be overridden: {}", intersection));
    }// w  w  w .  j a  v  a2  s  .c o  m
    return ImmutableMap.<String, Object>builder().putAll(this.arguments).putAll(arguments).build();
}

From source file:tech.beshu.ror.acl.blocks.rules.impl.JwtAuthSyncRule.java

@Override
public CompletableFuture<RuleExitResult> match(__old_RequestContext rc) {
    Optional<String> token = Optional.of(rc.getHeaders()).map(m -> m.get(settings.getHeaderName()))
            .flatMap(JwtAuthSyncRule::extractToken);

    /*//from   w w  w. j a v  a  2s .  co m
      JWT ALGO    FAMILY
      =======================
      NONE        None
            
      HS256       HMAC
      HS384       HMAC
      HS512       HMAC
            
      RS256       RSA
      RS384       RSA
      RS512       RSA
      PS256       RSA
      PS384       RSA
      PS512       RSA
            
      ES256       EC
      ES384       EC
      ES512       EC
    */

    if (!token.isPresent()) {
        logger.debug("Authorization header is missing or does not contain a bearer token");
        return CompletableFuture.completedFuture(NO_MATCH);
    }

    try {

        JwtParser parser = Jwts.parser();

        // Defaulting to HMAC for backward compatibility
        String algoFamily = settings.getAlgo().map(String::toUpperCase).orElse("HMAC");
        if (settings.getKey() == null) {
            algoFamily = "NONE";
        }

        if (!"NONE".equals(algoFamily)) {

            if ("RSA".equals(algoFamily)) {
                try {
                    byte[] keyBytes = Base64.decodeBase64(settings.getKey());
                    KeyFactory kf = KeyFactory.getInstance("RSA");
                    PublicKey pubKey = kf.generatePublic(new X509EncodedKeySpec(keyBytes));
                    parser.setSigningKey(pubKey);
                } catch (GeneralSecurityException gso) {
                    throw new RuntimeException(gso);
                }
            }

            else if ("EC".equals(algoFamily)) {
                try {
                    byte[] keyBytes = Base64.decodeBase64(settings.getKey());
                    KeyFactory kf = KeyFactory.getInstance("EC");
                    PublicKey pubKey = kf.generatePublic(new X509EncodedKeySpec(keyBytes));
                    parser.setSigningKey(pubKey);
                } catch (GeneralSecurityException gso) {
                    throw new RuntimeException(gso);
                }
            }

            else if ("HMAC".equals(algoFamily)) {
                parser.setSigningKey(settings.getKey());
            } else {
                throw new RuntimeException("unrecognised algorithm family " + algoFamily
                        + ". Should be either of: HMAC, EC, RSA, NONE");
            }
        }

        Claims jws;
        if (settings.getKey() != null) {
            jws = parser.parseClaimsJws(token.get()).getBody();
        } else {
            String[] ar = token.get().split("\\.");
            if (ar.length < 2) {
                // token is not a valid JWT
                return CompletableFuture.completedFuture(NO_MATCH);
            }
            String tokenNoSig = ar[0] + "." + ar[1] + ".";
            jws = parser.parseClaimsJwt(tokenNoSig).getBody();
        }

        Claims finalJws = jws;
        Optional<String> user = settings.getUserClaim().map(claim -> finalJws.get(claim, String.class));
        if (settings.getUserClaim().isPresent())
            if (!user.isPresent()) {
                return CompletableFuture.completedFuture(NO_MATCH);
            } else {
                rc.setLoggedInUser(new LoggedUser(user.get()));
            }

        Optional<Set<String>> roles = this.extractRoles(jws);
        if (settings.getRolesClaim().isPresent() && !roles.isPresent()) {
            return CompletableFuture.completedFuture(NO_MATCH);
        }
        if (!settings.getRoles().isEmpty()) {
            if (!roles.isPresent()) {
                return CompletableFuture.completedFuture(NO_MATCH);
            } else {
                Set<String> r = roles.get();
                if (r.isEmpty() || Sets.intersection(r, settings.getRoles()).isEmpty())
                    return CompletableFuture.completedFuture(NO_MATCH);
            }
        }

        if (settings.getExternalValidator().isPresent()) {
            return httpClient.authenticate("x", token.get()).thenApply(resp -> resp ? MATCH : NO_MATCH);
        }
        return CompletableFuture.completedFuture(MATCH);

    } catch (ExpiredJwtException | UnsupportedJwtException | MalformedJwtException | SignatureException e) {
        return CompletableFuture.completedFuture(NO_MATCH);
    }
}

From source file:org.wso2.msf4j.internal.router.HttpResourceModel.java

/**
 * Gathers all parameters' annotations for the given method, starting from the third parameter.
 *///from ww w  . ja v a 2 s.  c  o  m
private List<ParameterInfo<?>> makeParamInfoList(Method method) {
    List<ParameterInfo<?>> paramInfoList = new ArrayList<>();

    Type[] paramTypes = method.getGenericParameterTypes();
    Annotation[][] paramAnnotations = method.getParameterAnnotations();

    for (int i = 0; i < paramAnnotations.length; i++) {
        Annotation[] annotations = paramAnnotations[i];

        //Can have only one from @PathParam, @QueryParam, @HeaderParam or @Context.
        if (Sets.intersection(SUPPORTED_PARAM_ANNOTATIONS, ImmutableSet.of(annotations)).size() > 1) {
            throw new IllegalArgumentException(
                    String.format("Must have exactly one annotation from %s for parameter %d in method %s",
                            SUPPORTED_PARAM_ANNOTATIONS, i, method));
        }

        Annotation annotation = null;
        Type parameterType = paramTypes[i];
        Function<?, Object> converter = null;
        String defaultVal = null;
        for (Annotation annotation0 : annotations) {
            annotation = annotation0;
            Class<? extends Annotation> annotationType = annotation.annotationType();
            if (PathParam.class.isAssignableFrom(annotationType)) {
                converter = ParamConvertUtils.createPathParamConverter(parameterType);
            } else if (QueryParam.class.isAssignableFrom(annotationType)) {
                converter = ParamConvertUtils.createQueryParamConverter(parameterType);
            } else if (FormParam.class.isAssignableFrom(annotationType)) {
                converter = ParamConvertUtils.createFormParamConverter(parameterType);
            } else if (FormDataParam.class.isAssignableFrom(annotationType)) {
                converter = ParamConvertUtils.createFormDataParamConverter(parameterType);
            } else if (HeaderParam.class.isAssignableFrom(annotationType)) {
                converter = ParamConvertUtils.createHeaderParamConverter(parameterType);
            } else if (CookieParam.class.isAssignableFrom(annotationType)) {
                converter = ParamConvertUtils.createCookieParamConverter(parameterType);
            } else if (DefaultValue.class.isAssignableFrom(annotationType)) {
                defaultVal = ((DefaultValue) annotation).value();
            }
        }
        ParameterInfo<?> parameterInfo = ParameterInfo.create(parameterType, annotation, defaultVal, converter);
        paramInfoList.add(parameterInfo);
    }

    return Collections.unmodifiableList(paramInfoList);
}

From source file:org.locationtech.geogig.geotools.plumbing.ExportOp.java

/**
 * Executes the export operation using the parameters that have been specified.
 * /*w ww.  ja v  a2s. c o m*/
 * @return a FeatureCollection with the specified features
 */
@Override
protected SimpleFeatureStore _call() {
    final ObjectDatabase database = objectDatabase();
    if (filterFeatureTypeId != null) {
        RevObject filterType = database.getIfPresent(filterFeatureTypeId);
        checkArgument(filterType instanceof RevFeatureType, "Provided filter feature type is does not exist");
    }

    final SimpleFeatureStore targetStore = getTargetStore();

    final String refspec = resolveRefSpec();
    final String treePath = refspec.substring(refspec.indexOf(':') + 1);
    final RevTree rootTree = resolveRootTree(refspec);
    final NodeRef typeTreeRef = resolTypeTreeRef(refspec, treePath, rootTree);

    final ObjectId defaultMetadataId = typeTreeRef.getMetadataId();

    final RevTree typeTree = database.getTree(typeTreeRef.objectId());

    final ProgressListener progressListener = getProgressListener();

    progressListener.started();
    progressListener
            .setDescription("Exporting from " + path + " to " + targetStore.getName().getLocalPart() + "... ");

    final Iterator<SimpleFeature> filtered;
    {
        final Iterator<SimpleFeature> plainFeatures = getFeatures(typeTree, database,

                defaultMetadataId, progressListener);

        Iterator<SimpleFeature> adaptedFeatures = adaptToArguments(plainFeatures, defaultMetadataId);

        Iterator<Optional<Feature>> transformed = Iterators.transform(adaptedFeatures, ExportOp.this.function);

        Iterator<SimpleFeature> result = Iterators
                .filter(Iterators.transform(transformed, new Function<Optional<Feature>, SimpleFeature>() {
                    @Override
                    public SimpleFeature apply(Optional<Feature> input) {
                        return (SimpleFeature) input.orNull();
                    }
                }), Predicates.notNull());

        // check the resulting schema has something to contribute
        PeekingIterator<SimpleFeature> peekingIt = Iterators.peekingIterator(result);
        if (peekingIt.hasNext()) {
            Function<AttributeDescriptor, String> toString = new Function<AttributeDescriptor, String>() {
                @Override
                public String apply(AttributeDescriptor input) {
                    return input.getLocalName();
                }
            };
            SimpleFeature peek = peekingIt.peek();
            Set<String> sourceAtts = new HashSet<String>(
                    Lists.transform(peek.getFeatureType().getAttributeDescriptors(), toString));
            Set<String> targetAtts = new HashSet<String>(
                    Lists.transform(targetStore.getSchema().getAttributeDescriptors(), toString));
            if (Sets.intersection(sourceAtts, targetAtts).isEmpty()) {
                throw new GeoToolsOpException(StatusCode.UNABLE_TO_ADD,
                        "No common attributes between source and target feature types");
            }
        }

        filtered = peekingIt;
    }
    FeatureCollection<SimpleFeatureType, SimpleFeature> asFeatureCollection = new BaseFeatureCollection<SimpleFeatureType, SimpleFeature>() {

        @Override
        public FeatureIterator<SimpleFeature> features() {

            return new DelegateFeatureIterator<SimpleFeature>(filtered);
        }
    };

    // add the feature collection to the feature store
    final Transaction transaction;
    if (transactional) {
        transaction = new DefaultTransaction("create");
    } else {
        transaction = Transaction.AUTO_COMMIT;
    }
    try {
        targetStore.setTransaction(transaction);
        try {
            targetStore.addFeatures(asFeatureCollection);
            transaction.commit();
        } catch (final Exception e) {
            if (transactional) {
                transaction.rollback();
            }
            Throwables.propagateIfInstanceOf(e, GeoToolsOpException.class);
            throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
        } finally {
            transaction.close();
        }
    } catch (IOException e) {
        throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
    }

    progressListener.complete();

    return targetStore;

}

From source file:com.addthis.hydra.task.pipeline.PipelineTask.java

public void validateWritableRootPaths() {
    if (!validateDirs) {
        return;//from ww  w  . j  a  v a2  s . c  o  m
    }
    for (StreamMapper phase : phases) {
        phase.validateWritableRootPaths();
    }
    Set<Path>[] outputDirs = new Set[phases.length];
    StringBuilder builder = new StringBuilder();
    for (int i = 0; i < phases.length; i++) {
        if ((disable != null) && disable[i]) {
            continue;
        }
        outputDirs[i] = new HashSet<>();
        outputDirs[i].addAll(phases[i].writableRootPaths());
        for (int j = 0; j < i; j++) {
            Sets.SetView<Path> intersect = Sets.intersection(outputDirs[i], outputDirs[j]);
            if (intersect.size() > 0) {
                String message = String.format("Phases %d and %d have overlapping output directories: \"%s\"\n",
                        (j + 1), (i + 1), intersect.toString());
                builder.append(message);
            }
        }
    }
    if (builder.length() > 0) {
        throw new IllegalArgumentException(builder.toString());
    }
}

From source file:bear.core.Stage.java

public Collection<? extends Address> getHostsForRoles(List<String> stringRoles) {
    List<Role> roles;

    try {/* w  ww . ja v  a  2 s  .c  om*/
        roles = newArrayList(transform(stringRoles, forMap(stages.get().rolenameToRole)));
    } catch (IllegalArgumentException e) {
        throw new StagesException("role not found: " + e.getMessage() + " on stage '" + name + "'");
    }

    LinkedHashSet<Address> hashSet = null;

    hashSet = Sets.newLinkedHashSet(concat(transform(roles, forMap(stages.get().roleToAddresses.asMap()))));

    return Lists.newArrayList(Sets.intersection(hashSet, addresses));
}

From source file:org.apache.cassandra.service.ActiveRepairService.java

/**
 * Return all of the neighbors with whom we share the provided range.
 *
 * @param keyspaceName keyspace to repair
 * @param toRepair token to repair//from  w  w w  . j a va  2s . c om
 * @param dataCenters the data centers to involve in the repair
 *
 * @return neighbors with whom we share the provided range
 */
public static Set<InetAddress> getNeighbors(String keyspaceName, Range<Token> toRepair,
        Collection<String> dataCenters, Collection<String> hosts) {
    StorageService ss = StorageService.instance;
    Map<Range<Token>, List<InetAddress>> replicaSets = ss.getRangeToAddressMap(keyspaceName);
    Range<Token> rangeSuperSet = null;
    for (Range<Token> range : ss.getLocalRanges(keyspaceName)) {
        if (range.contains(toRepair)) {
            rangeSuperSet = range;
            break;
        } else if (range.intersects(toRepair)) {
            throw new IllegalArgumentException(
                    "Requested range intersects a local range but is not fully contained in one; this would lead to imprecise repair");
        }
    }
    if (rangeSuperSet == null || !replicaSets.containsKey(rangeSuperSet))
        return Collections.emptySet();

    Set<InetAddress> neighbors = new HashSet<>(replicaSets.get(rangeSuperSet));
    neighbors.remove(FBUtilities.getBroadcastAddress());

    if (dataCenters != null && !dataCenters.isEmpty()) {
        TokenMetadata.Topology topology = ss.getTokenMetadata().cloneOnlyTokenMap().getTopology();
        Set<InetAddress> dcEndpoints = Sets.newHashSet();
        Multimap<String, InetAddress> dcEndpointsMap = topology.getDatacenterEndpoints();
        for (String dc : dataCenters) {
            Collection<InetAddress> c = dcEndpointsMap.get(dc);
            if (c != null)
                dcEndpoints.addAll(c);
        }
        return Sets.intersection(neighbors, dcEndpoints);
    } else if (hosts != null && !hosts.isEmpty()) {
        Set<InetAddress> specifiedHost = new HashSet<>();
        for (final String host : hosts) {
            try {
                final InetAddress endpoint = InetAddress.getByName(host.trim());
                if (endpoint.equals(FBUtilities.getBroadcastAddress()) || neighbors.contains(endpoint))
                    specifiedHost.add(endpoint);
            } catch (UnknownHostException e) {
                throw new IllegalArgumentException("Unknown host specified " + host, e);
            }
        }

        if (!specifiedHost.contains(FBUtilities.getBroadcastAddress()))
            throw new IllegalArgumentException("The current host must be part of the repair");

        if (specifiedHost.size() <= 1) {
            String msg = "Repair requires at least two endpoints that are neighbours before it can continue, the endpoint used for this repair is %s, "
                    + "other available neighbours are %s but these neighbours were not part of the supplied list of hosts to use during the repair (%s).";
            throw new IllegalArgumentException(String.format(msg, specifiedHost, neighbors, hosts));
        }

        specifiedHost.remove(FBUtilities.getBroadcastAddress());
        return specifiedHost;

    }

    return neighbors;
}

From source file:org.eclipse.tracecompass.internal.segmentstore.core.treemap.TreeMapStore.java

@Override
public Iterable<E> getIntersectingElements(long start, long end) {
    fLock.readLock().lock();/*from   w  ww  .  j a v  a 2 s.  co  m*/
    try {
        Iterable<E> matchStarts = Iterables.concat(fStartTimesIndex.asMap().headMap(end, true).values());
        Iterable<E> matchEnds = Iterables.concat(fEndTimesIndex.asMap().tailMap(start, true).values());
        return checkNotNull(Sets.intersection(Sets.newHashSet(matchStarts), Sets.newHashSet(matchEnds)));
    } finally {
        fLock.readLock().unlock();
    }
}