Example usage for java.util Set addAll

List of usage examples for java.util Set addAll

Introduction

In this page you can find the example usage for java.util Set addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:Main.java

public static Set<String> findMatches(List<String> searchList, Set<String> targetSet)
        throws InterruptedException, ExecutionException {
    Set<String> locatedMatchSet = new HashSet<String>();

    int threadCount = Runtime.getRuntime().availableProcessors();
    List<List<String>> partitionList = getChunkList(searchList, threadCount);

    if (partitionList.size() == 1) {
        // if we only have one "chunk" then don't bother with a thread-pool
        locatedMatchSet = new ListSearcher(searchList, targetSet).call();
    } else {/* w  ww . j a va2  s . c  om*/
        ExecutorService executor = Executors.newFixedThreadPool(threadCount);
        CompletionService<Set<String>> completionService = new ExecutorCompletionService<Set<String>>(executor);
        for (List<String> chunkList : partitionList)
            completionService.submit(new ListSearcher(chunkList, targetSet));

        for (int x = 0; x < partitionList.size(); x++) {
            Set<String> threadMatchSet = completionService.take().get();
            locatedMatchSet.addAll(threadMatchSet);
        }
        executor.shutdown();
    }
    return locatedMatchSet;
}

From source file:amie.keys.CombinationsExplorationNew.java

/**
 * It determines whether there exists a more general version of the given
 * conditional key, a version of the key with the exact same relations but
 * fewer instantiations. For instance if the key states lastname |
 * nationality=French, field=Databases but the map contains a key lastname
 * nationality | field=Databases (here nationality is not instantiated), the
 * method will report this a subsumption case and return true.
 *
 * @param conditionalKey//from   ww  w  . j a  va2 s  .c o m
 * @param conditionRule
 * @param conditions2Keys2
 * @return
 */
private static boolean isSubsumedByKey(Rule conditionalKey, Rule conditionRule,
        MultiMap<Rule, Rule> conditions2Keys2) {
    if (conditionRule.getLength() < 2) {
        return false;
    }

    Set<ByteString> instantiations = new LinkedHashSet<>();
    Set<ByteString> instantiatedRelations = new LinkedHashSet<>();
    Set<ByteString> nonInstantiatedRelations = new LinkedHashSet<>();
    Utilities.parseConditionalKey(conditionalKey, nonInstantiatedRelations, instantiations,
            instantiatedRelations);

    /**
     * Now get all possible simpler versions of the condition If the
     * condition is field=Databases, residence=Paris, gender=female the
     * method returns: field=Databases, residence=Paris field=Database,
     * gender=female residence=Paris, gender=female residence=Paris
     * gender=female field=Databases *
     */
    List<Rule> properSubconditions = getAllProperSubconditions(conditionRule);
    for (Rule subCondition : properSubconditions) {
        List<Rule> potentialParents = conditions2Keys2.get(subCondition);
        if (potentialParents != null) {
            for (Rule potentialParent : potentialParents) {
                if (potentialParent.getLength() != conditionalKey.getLength()) {
                    // System.out.println("potentialParent:" + potentialParent);
                    continue;
                }
                Set<ByteString> instantiatedRelationsParent = new LinkedHashSet<>();
                Set<ByteString> nonInstantiatedRelationsParent = new LinkedHashSet<>();
                Set<ByteString> instantiationsParent = new LinkedHashSet<>();
                Utilities.parseConditionalKey(potentialParent, nonInstantiatedRelationsParent,
                        instantiationsParent, instantiatedRelationsParent);
                Set<ByteString> instansiatedNonInstantiatedRelations = new LinkedHashSet<>();
                instansiatedNonInstantiatedRelations.addAll(instantiatedRelations);
                instansiatedNonInstantiatedRelations.addAll(nonInstantiatedRelations);
                Set<ByteString> instansiatedNonInstantiatedRelationsParent = new LinkedHashSet<>();
                instansiatedNonInstantiatedRelationsParent.addAll(instantiatedRelationsParent);
                instansiatedNonInstantiatedRelationsParent.addAll(nonInstantiatedRelationsParent);
                if (instantiatedRelations.containsAll(instantiatedRelationsParent)
                        && nonInstantiatedRelationsParent.containsAll(nonInstantiatedRelations)
                        && instansiatedNonInstantiatedRelationsParent
                                .containsAll(instansiatedNonInstantiatedRelations)) {
                    return true;
                }
            }
        }
    }

    return false;
}

From source file:ch.puzzle.itc.mobiliar.business.generator.control.TemplateUtils.java

private static void getPropertyDescriptorsForContext(ContextEntity context,
        Set<PropertyDescriptorEntity> result) {
    Set<PropertyDescriptorEntity> properties = context.getPropertyDescriptors();
    if (properties != null) {
        result.addAll(properties);
    }//from w  ww  . j av  a  2s  .c  om
}

From source file:edu.rice.cs.bioinfo.programs.phylonet.algos.network.NetworkPseudoLikelihoodFromGTT.java

public static Map<String, double[]> computeTripleFrequenciesFromSingleGT(Tree gt,
        Map<String, String> allele2species) {
    Set<String> allAlleles = new HashSet<>();
    for (String allele : gt.getLeaves()) {
        allAlleles.add(allele);//  w  w  w  .jav a  2 s  .com
    }

    Map<TNode, Set<String>> node2leaves = new HashMap<>();
    Map<String, double[]> triple2counts = new HashMap<>();
    for (TNode node : gt.postTraverse()) {
        Set<String> leavesUnder = new HashSet<>();
        node2leaves.put(node, leavesUnder);
        if (node.isLeaf()) {
            leavesUnder.add(node.getName());
        } else {
            List<Set<String>> childLeavesList = new ArrayList<>();
            for (TNode child : node.getChildren()) {
                Set<String> childLeaves = node2leaves.get(child);
                leavesUnder.addAll(childLeaves);
                childLeavesList.add(childLeaves);
            }

            allAlleles.removeAll(leavesUnder);

            for (int i = 0; i < childLeavesList.size(); i++) {
                Set<String> childLeaves1 = childLeavesList.get(i);
                for (int j = i + 1; j < childLeavesList.size(); j++) {
                    Set<String> childLeaves2 = childLeavesList.get(j);
                    for (String allele1 : childLeaves1) {
                        String species1 = allele2species.get(allele1);
                        for (String allele2 : childLeaves2) {
                            String species2 = allele2species.get(allele2);
                            if (!species1.equals(species2)) {
                                for (String allele3 : allAlleles) {
                                    String species3 = allele2species.get(allele3);
                                    if (!species1.equals(species3) && !species2.equals(species3)) {
                                        addHighestFrequency(species1, species2, species3, triple2counts);
                                    }
                                }
                            }
                        }
                    }
                    //non-binary node
                    for (int k = j + 1; k < childLeavesList.size(); k++) {
                        Set<String> childLeaves3 = childLeavesList.get(k);
                        for (String allele1 : childLeaves1) {
                            String species1 = allele2species.get(allele1);
                            for (String allele2 : childLeaves2) {
                                String species2 = allele2species.get(allele2);
                                if (!species1.equals(species2)) {
                                    for (String allele3 : childLeaves3) {
                                        String species3 = allele2species.get(allele3);
                                        if (!species1.equals(species3) && !species2.equals(species3)) {
                                            addEqualFrequency(species1, species2, species3, triple2counts);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

            }

            allAlleles.addAll(leavesUnder);
        }

    }
    return triple2counts;
}

From source file:com.hurence.logisland.plugin.PluginManager.java

private static void installPlugin(String artifact, String logislandHome) {
    Optional<ModuleInfo> moduleInfo = findPluginMeta().entrySet().stream()
            .filter(e -> artifact.equals(e.getKey().getArtifact())).map(Map.Entry::getKey).findFirst();
    if (moduleInfo.isPresent()) {
        System.err/* w w  w  .java2 s. c  o m*/
                .println("A component already matches the artifact " + artifact + ". Please remove it first.");
        System.exit(-1);
    }

    try {

        IvySettings settings = new IvySettings();
        settings.load(new File(logislandHome, "conf/ivy.xml"));

        Ivy ivy = Ivy.newInstance(settings);
        ivy.bind();

        System.out.println("\nDownloading dependencies. Please hold on...\n");

        String parts[] = Arrays.stream(artifact.split(":")).map(String::trim).toArray(a -> new String[a]);
        if (parts.length != 3) {
            throw new IllegalArgumentException(
                    "Unrecognized artifact format. It should be groupId:artifactId:version");
        }
        ModuleRevisionId revisionId = new ModuleRevisionId(new ModuleId(parts[0], parts[1]), parts[2]);
        Set<ArtifactDownloadReport> toBePackaged = downloadArtifacts(ivy, revisionId,
                new String[] { "default", "compile", "runtime" });

        ArtifactDownloadReport artifactJar = toBePackaged.stream()
                .filter(a -> a.getArtifact().getModuleRevisionId().equals(revisionId)).findFirst()
                .orElseThrow(() -> new IllegalStateException("Unable to find artifact " + artifact
                        + ". Please check the name is correct and the repositories on ivy.xml are correctly configured"));

        Manifest manifest = new JarFile(artifactJar.getLocalFile()).getManifest();
        File libDir = new File(logislandHome, "lib");

        if (manifest.getMainAttributes().containsKey(ManifestAttributes.MODULE_ARTIFACT)) {
            org.apache.commons.io.FileUtils.copyFileToDirectory(artifactJar.getLocalFile(), libDir);
            //we have a logisland plugin. Just copy it
            System.out.println(String.format("Found logisland plugin %s version %s\n" + "It will provide:",
                    manifest.getMainAttributes().getValue(ManifestAttributes.MODULE_NAME),
                    manifest.getMainAttributes().getValue(ManifestAttributes.MODULE_VERSION)));
            Arrays.stream(manifest.getMainAttributes().getValue(ManifestAttributes.MODULE_EXPORTS).split(","))
                    .map(String::trim).forEach(s -> System.out.println("\t" + s));

        } else {
            System.out.println("Repackaging artifact and its dependencies");
            Set<ArtifactDownloadReport> environment = downloadArtifacts(ivy, revisionId,
                    new String[] { "provided" });
            Set<ArtifactDownloadReport> excluded = toBePackaged.stream()
                    .filter(adr -> excludeGroupIds.stream()
                            .anyMatch(s -> s.matches(adr.getArtifact().getModuleRevisionId().getOrganisation()))
                            || excludedArtifactsId.stream().anyMatch(
                                    s -> s.matches(adr.getArtifact().getModuleRevisionId().getName())))
                    .collect(Collectors.toSet());

            toBePackaged.removeAll(excluded);
            environment.addAll(excluded);

            Repackager rep = new Repackager(artifactJar.getLocalFile(), new LogislandPluginLayoutFactory());
            rep.setMainClass("");
            File destFile = new File(libDir, "logisland-component-" + artifactJar.getLocalFile().getName());
            rep.repackage(destFile, callback -> toBePackaged.stream().filter(adr -> adr.getLocalFile() != null)
                    .filter(adr -> !adr.getArtifact().getModuleRevisionId().equals(revisionId))
                    .map(adr -> new Library(adr.getLocalFile(), LibraryScope.COMPILE)).forEach(library -> {
                        try {
                            callback.library(library);
                        } catch (IOException e) {
                            throw new UncheckedIOException(e);
                        }
                    }));
            Thread.currentThread().setContextClassLoader(new URLClassLoader(
                    environment.stream().filter(adr -> adr.getLocalFile() != null).map(adr -> {
                        try {
                            return adr.getLocalFile().toURI().toURL();
                        } catch (Exception e) {
                            throw new RuntimeException(e);
                        }
                    }).toArray(a -> new URL[a]), Thread.currentThread().getContextClassLoader()));
            //now clean up package and write the manifest
            String newArtifact = "com.hurence.logisland.repackaged:" + parts[1] + ":" + parts[2];
            LogislandRepackager.execute(destFile.getAbsolutePath(), "BOOT-INF/lib-provided", parts[2],
                    newArtifact, "Logisland Component for " + artifact, "Logisland Component for " + artifact,
                    new String[] { "org.apache.kafka.*" }, new String[0],
                    "org.apache.kafka.connect.connector.Connector");
        }
        System.out.println("Install done!");
    } catch (Exception e) {
        System.err.println("Unable to install artifact " + artifact);
        e.printStackTrace();
        System.exit(-1);
    }

}

From source file:Fasta.java

/**Splits the fasta sequence into a set of every possible
//sequence of a certain size which can be found in the sequence
including the reverse strand*//*from  ww w .  j  ava  2  s . c o  m*/
public static Set<CharSequence> splitFasta(String[] seq, int length) {

    Set<CharSequence> collect = IntStream.range(0, length).mapToObj(start -> {
        List<CharSequence> primers = new ArrayList<>();
        for (int i = start; i < seq[0].length() - length; i += length) {
            CharSequence s = seq[0].substring(i, i + length);
            primers.add(s);
        }
        return primers;
    }).flatMap((i) -> i.stream()).collect(Collectors.toSet());
    Set<CharSequence> collect2 = IntStream.range(0, length).mapToObj(start -> {
        List<CharSequence> primers = new ArrayList<>();
        for (int i = start; i < seq[1].length() - length; i += length) {
            CharSequence s = seq[1].substring(i, i + length);
            primers.add(s);
        }
        return primers;
    }).flatMap((i) -> i.stream()).collect(Collectors.toSet());
    collect.addAll(collect2);
    return collect;
}

From source file:de.tudarmstadt.ukp.dkpro.tc.core.util.TaskUtils.java

/**
 * Get a list of MetaCollector classes from a list of feature extractors.
 *///  ww  w  .  j a  v a 2  s . co  m
public static Set<Class<? extends MetaCollector>> getMetaCollectorsFromFeatureExtractors(
        List<String> featureSet) throws InstantiationException, IllegalAccessException, ClassNotFoundException {
    Set<Class<? extends MetaCollector>> metaCollectorClasses = new HashSet<Class<? extends MetaCollector>>();

    for (String element : featureSet) {
        FeatureExtractorResource_ImplBase featureExtractor = (FeatureExtractorResource_ImplBase) Class
                .forName(element).newInstance();
        if (featureExtractor instanceof MetaDependent) {
            MetaDependent metaDepFeatureExtractor = (MetaDependent) featureExtractor;
            metaCollectorClasses.addAll(metaDepFeatureExtractor.getMetaCollectorClasses());
        }
    }

    return metaCollectorClasses;
}

From source file:cross.applicationContext.ReflectionApplicationContextGenerator.java

/**
 *
 * @param clazz// w  w w .j av  a  2 s  .c o m
 * @param packages
 * @return
 */
public static Set<BeanDefinition> getImplementationsOf(Class<?> clazz, String... packages) {
    ClassPathScanningCandidateComponentProvider provider = new ClassPathScanningCandidateComponentProvider(
            false);
    provider.addIncludeFilter(new AssignableTypeFilter(clazz));
    Set<BeanDefinition> beanDefinitions = new LinkedHashSet<>();
    for (String pkg : packages) {
        String pkgDeclaration = "";
        if (pkg.contains(".")) {
            pkgDeclaration = pkg.replaceAll("\\.", "/");
        }
        Set<BeanDefinition> components = provider.findCandidateComponents(pkgDeclaration);
        beanDefinitions.addAll(components);
    }
    return beanDefinitions;
}

From source file:ClassFinder.java

private static String[] addJarsInPath(String[] paths) {
    Set<String> fullList = new HashSet<String>();
    for (final String path : paths) {
        fullList.add(path); // Keep the unexpanded path
        // TODO - allow directories to end with .jar by removing this check?
        if (!path.endsWith(DOT_JAR)) {
            File dir = new File(path);
            if (dir.exists() && dir.isDirectory()) {
                String[] jars = dir.list(new FilenameFilter() {
                    public boolean accept(File f, String name) {
                        return name.endsWith(DOT_JAR);
                    }/*from w  w  w .  jav  a 2s.  c  o m*/
                });
                fullList.addAll(Arrays.asList(jars));
            }
        }
    }
    return fullList.toArray(new String[0]);
}

From source file:com.aurel.track.admin.customize.category.filter.execute.loadItems.LoadItemLinksUtil.java

/**
 * /*ww  w. ja  v  a  2 s  .c om*/
 * @param baseWorkItemBeanList
 * @param archived
 * @param deleted
 * @param allItemIDSet
 * @return
 */
private static Set<Integer> loadAncestorDescendantAndDirectLinkedItems(List<TWorkItemBean> baseWorkItemBeanList,
        Integer archived, Integer deleted, Set<Integer> allItemIDSet) {
    Set<Integer> originalItemIDSet = GeneralUtils.createIntegerSetFromBeanList(baseWorkItemBeanList);
    LOGGER.debug("Number of items in filter " + originalItemIDSet.size());
    Set<Integer> ancestorWorkItemIDsSet = getParentHierarchy(baseWorkItemBeanList, archived, deleted);
    LOGGER.debug("Number of ascendent items " + ancestorWorkItemIDsSet.size());
    allItemIDSet.addAll(originalItemIDSet);
    allItemIDSet.addAll(ancestorWorkItemIDsSet);
    Set<Integer> descendantItemIDSet = ItemBL.getChildHierarchy(
            GeneralUtils.createIntArrFromIntegerCollection(allItemIDSet), PARENT_CHILD_EXPRESSION.ALL_CHILDREN,
            archived, deleted, null);
    LOGGER.debug("Total number of descendent items " + descendantItemIDSet.size());
    allItemIDSet.addAll(descendantItemIDSet);
    //gather the MSProject link types
    MsProjectLinkType msProjectLinkType = MsProjectLinkType.getInstance();
    List<Integer> msProjectLinkTypes = LinkTypeBL.getLinkTypesByPluginClass(msProjectLinkType);
    //although Msproject link is unidirectional, we have to load also the predecessors in order to avoid moving back the successor items
    Map<Integer, SortedSet<Integer>> linkDependencyMap = ItemLinkBL.loadByWorkItemsAndLinkType(
            GeneralUtils.createIntegerListFromCollection(allItemIDSet), msProjectLinkTypes,
            msProjectLinkType.getPossibleDirection(), true, archived, deleted);
    Set<Integer> linkedItemIDsSet = getFlatItems(linkDependencyMap);
    LOGGER.debug("Number of linked items from hierarchy " + linkedItemIDsSet.size());
    linkedItemIDsSet.removeAll(allItemIDSet);
    LOGGER.debug("Number of extended linked items " + linkedItemIDsSet.size());
    allItemIDSet.addAll(linkedItemIDsSet);
    return linkedItemIDsSet;
}