Example usage for java.util HashSet addAll

List of usage examples for java.util HashSet addAll

Introduction

In this page you can find the example usage for java.util HashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:org.apache.taverna.activities.dependencyactivity.AbstractAsynchronousDependencyActivity.java

/**
 * Finds either local jar or artifact dependencies' URLs for the given classloader
 * sharing policy (passed inside configuration bean) and a workflowRunID (used to
 * retrieve the workflow) that will be added to this activity classloader's list of URLs.
 */// ww  w  .ja  v  a2  s.c  om
private HashSet<URL> findDependencies(String dependencyType, JsonNode json, String workflowRunID) {
    ClassLoaderSharing classLoaderSharing;
    if (json.has("classLoaderSharing")) {
        classLoaderSharing = ClassLoaderSharing.fromString(json.get("classLoaderSharing").textValue());
    } else {
        classLoaderSharing = ClassLoaderSharing.workflow;
    }
    // Get the WorkflowInstanceFacade which contains the current workflow
    WeakReference<WorkflowInstanceFacade> wfFacadeRef = WorkflowInstanceFacade.workflowRunFacades
            .get(workflowRunID);
    WorkflowInstanceFacade wfFacade = null;
    if (wfFacadeRef != null) {
        wfFacade = wfFacadeRef.get();
    }
    Dataflow wf = null;
    if (wfFacade != null) {
        wf = wfFacade.getDataflow();
    }

    // Files of dependencies for all activities in the workflow that share the classloading policy
    HashSet<File> dependencies = new HashSet<File>();
    // Urls of all dependencies
    HashSet<URL> dependenciesURLs = new HashSet<URL>();

    if (wf != null) {
        // Merge in dependencies from all activities that have the same classloader-sharing
        // as this activity
        for (Processor proc : wf.getProcessors()) {
            // Nested workflow case
            if (!proc.getActivityList().isEmpty() && proc.getActivityList().get(0) instanceof NestedDataflow) {
                // Get the nested workflow
                Dataflow nestedWorkflow = ((NestedDataflow) proc.getActivityList().get(0)).getNestedDataflow();
                dependenciesURLs.addAll(findNestedDependencies(dependencyType, json, nestedWorkflow));
            } else { // Not nested - go through all of the processor's activities
                Activity<?> activity = proc.getActivityList().get(0);
                if (activity instanceof AbstractAsynchronousDependencyActivity) {
                    AbstractAsynchronousDependencyActivity dependencyActivity = (AbstractAsynchronousDependencyActivity) activity;
                    //                     if (dependencyType.equals(LOCAL_JARS)){
                    // Collect the files of all found local dependencies
                    if (dependencyActivity.getConfiguration().has("localDependency")) {
                        for (JsonNode jar : dependencyActivity.getConfiguration().get("localDependency")) {
                            try {
                                dependencies.add(new File(libDir, jar.textValue()));
                            } catch (Exception ex) {
                                logger.warn("Invalid URL for " + jar, ex);
                                continue;
                            }
                        }
                    }
                    //                     } else if (dependencyType.equals(ARTIFACTS) && this.getClass().getClassLoader() instanceof LocalArtifactClassLoader){
                    //                        LocalArtifactClassLoader cl = (LocalArtifactClassLoader) this.getClass().getClassLoader(); // this class is always loaded with LocalArtifactClassLoader
                    //                        // Get the LocalReposotpry capable of finding artifact jar files
                    //                        LocalRepository rep  = (LocalRepository) cl.getRepository();
                    //                        for (BasicArtifact art : ((DependencyActivityConfigurationBean) activity
                    //                                    .getConfiguration())
                    //                                    .getArtifactDependencies()){
                    //                           dependencies.add(rep.jarFile(art));
                    //                        }
                    //                     }
                }
            }
        }
    } else { // Just add dependencies for this activity since we can't get hold of the whole workflow
        //         if (dependencyType.equals(LOCAL_JARS)){
        if (json.has("localDependency")) {
            for (JsonNode jar : json.get("localDependency")) {
                try {
                    dependencies.add(new File(libDir, jar.textValue()));
                } catch (Exception ex) {
                    logger.warn("Invalid URL for " + jar, ex);
                    continue;
                }
            }
        }
        //         }
        //         else if (dependencyType.equals(ARTIFACTS)){
        //            if (this.getClass().getClassLoader() instanceof LocalArtifactClassLoader){ // This should normally be the case
        //               LocalArtifactClassLoader cl = (LocalArtifactClassLoader)this.getClass().getClassLoader();
        //               LocalRepository rep  = (LocalRepository)cl.getRepository();
        //               if (rep != null){
        //                  for (BasicArtifact art : configurationBean.getArtifactDependencies()){
        //                     dependencies.add(rep.jarFile(art));
        //                  }
        //               }
        //            }
        //            else{
        //               // Tests will not be loaded using the LocalArtifactClassLoader as athey are loaded
        //               // outside Raven so there is nothing we can do about this - some tests
        //               // with dependencies will probably fail
        //            }
        //         }
    }

    // Collect the URLs of all found dependencies
    for (File file : dependencies) {
        try {
            dependenciesURLs.add(file.toURI().toURL());
        } catch (Exception ex) {
            logger.warn("Invalid URL for " + file.getAbsolutePath(), ex);
            continue;
        }
    }
    return dependenciesURLs;
}

From source file:org.alfresco.rest.workflow.api.impl.WorkflowRestImpl.java

/**
 * @param taskType type of the task//from w  ww .j a  va 2 s  .  co  m
 * @return all types (and aspects) which properties should not be used for form-model elements
 */
protected Set<QName> getTypesToExclude(TypeDefinition taskType) {
    HashSet<QName> typesToExclude = new HashSet<QName>();

    ClassDefinition parentClassDefinition = taskType.getParentClassDefinition();
    boolean contentClassFound = false;
    while (parentClassDefinition != null) {
        if (contentClassFound) {
            typesToExclude.add(parentClassDefinition.getName());
        } else if (ContentModel.TYPE_CONTENT.equals(parentClassDefinition.getName())) {
            // All parents of "cm:content" should be ignored as well for fetching start-properties 
            typesToExclude.add(ContentModel.TYPE_CONTENT);
            typesToExclude.addAll(parentClassDefinition.getDefaultAspectNames());
            contentClassFound = true;
        }
        parentClassDefinition = parentClassDefinition.getParentClassDefinition();
    }
    return typesToExclude;
}

From source file:LineageSimulator.java

public static void simulateLineageTrees(Args args) {
    int totalNumNodes = 0;
    // --- grow lineage trees --- //
    for (int t = 0; t < Parameters.NUM_TREES; t++) {
        // create the directory to store the results for each generated tree 
        File treeDir = new File(args.simPath + "/tree" + "_" + t);
        treeDir.mkdirs();/*from w  w  w  .jav a2s  .c  o m*/
        // initial tree (only contains the root)
        SimulatedTree lineageTree = new SimulatedTree();
        // -- expand the tree --
        int iter = 0;
        while (iter < Parameters.NUM_ITERATIONS || /* there must be a min number of undead nodes */
                lineageTree.getNumNodes() < lineageTree.getNumDeadNodes() + Parameters.MIN_NUM_NODES + 1) {
            if (lineageTree.getNumNodes() >= lineageTree.getNumDeadNodes() + Parameters.MAX_NUM_NODES + 1) {
                break;
            }
            lineageTree.grow();
            iter++;
        }
        writeOutputFile(treeDir.getAbsolutePath() + "/TREE_plain.txt", lineageTree.toString());
        if (args.generateDOT) {
            writeOutputFile(treeDir.getAbsolutePath() + "/TREE.dot", lineageTree.toDOT());
        }
        logger.fine("Generated tree " + t + " with " + lineageTree.getNumNodes() + " nodes.");
        totalNumNodes += lineageTree.getNumNodes();

        // --- sampling --- //
        for (int s = 0; s < Parameters.NUM_SAMPLES_ARRAY.length; s++) {
            int numSamples = Parameters.NUM_SAMPLES_ARRAY[s];
            ArrayList<TumorSample> samples = new ArrayList<TumorSample>();
            HashSet<CellPopulation> subclones = new HashSet<CellPopulation>();
            HashMap<Mutation.SNV, double[]> multiSampleFrequencies = new HashMap<Mutation.SNV, double[]>();

            // --- collect the samples from the tree ---
            if (Parameters.LOCALIZED_SAMPLING) {
                samples = lineageTree.getKLocalizedSamples(numSamples - 1);
            } else { // randomized
                for (int i = 1; i < numSamples; i++) {
                    samples.add(lineageTree.getSample());
                }
            }
            if (args.generateSampledDOT) {
                writeOutputFile(treeDir.getAbsolutePath() + "/TREE_s" + numSamples + ".dot",
                        lineageTree.toColoredDOT(samples));
            }
            lineageTree.resetColors();

            // --- populate the SNV VAFs for each sample ---
            for (int i = 1; i < numSamples; i++) { // + default normal sample 0
                TumorSample sample = samples.get(i - 1);
                HashMap<Mutation.SNV, Double> freqMap = sample.getSNVFrequencies();
                for (Mutation.SNV snv : freqMap.keySet()) {
                    if (multiSampleFrequencies.containsKey(snv)) {
                        multiSampleFrequencies.get(snv)[i] = freqMap.get(snv);
                    } else {
                        multiSampleFrequencies.put(snv, new double[numSamples]);
                        multiSampleFrequencies.get(snv)[i] = freqMap.get(snv);
                    }
                }
                subclones.addAll(sample.cellPopulationCounts.keySet());
            }
            HashMap<Mutation.SNV, String> binaryProfiles = null;
            if (args.outputSampleProfile) {
                binaryProfiles = getBinaryProfile(multiSampleFrequencies, numSamples);
            }
            // --- store true VAFs --- 
            String VAFFileName = treeDir.getAbsolutePath() + "/VAF_s" + numSamples + "_true.txt";
            writeVAFsToFile(VAFFileName, multiSampleFrequencies, binaryProfiles, numSamples);

            // --- generate VAFs with simulated coverage and sequencing error ---
            for (int c = 0; c < Parameters.COVERAGE_ARRAY.length; c++) {
                int coverage = Parameters.COVERAGE_ARRAY[c];
                VAFFileName = treeDir.getAbsolutePath() + "/VAF_s" + numSamples + "_" + coverage + "X.txt";
                HashMap<Mutation.SNV, double[]> noisyMultiSampleFrequencies = addNoise(multiSampleFrequencies,
                        coverage, numSamples);
                writeVAFsToFile(VAFFileName, noisyMultiSampleFrequencies, binaryProfiles, numSamples);
            }
            // --- store subclone information for evaluation ---
            String lineageFileName = treeDir.getAbsolutePath() + "/SUBCLONES_s" + numSamples + ".txt";
            writeSubclonesToFile(lineageFileName, subclones);
        }
        if ((t + 1) % 1 == 0)
            logger.info("[PROGRESS] Simulated " + (t + 1) + " trees.");
    }
    logger.info("[SUMMARY] Simulated " + Parameters.NUM_TREES + " trees. Average number of nodes / tree = "
            + (double) totalNumNodes / (Parameters.NUM_TREES));
}

From source file:amie.keys.CSAKey.java

/**
 *
 * @param ruleToExtendWith/* ww  w .j a  v a 2 s .  com*/
 * @param ruleToGraphFirstLevel
 * @param ruleToGraphLastLevel
 * @param kb
 */
private void discoverConditionalKeysPerLevel(HashMap<Rule, HashSet<String>> ruleToExtendWith,
        HashMap<Rule, Graph> ruleToGraphFirstLevel, HashMap<Rule, Graph> ruleToGraphLastLevel,
        Set<Rule> output) {
    //System.out.println("discoverConditionalKeysPerLevel()");
    HashMap<Rule, Graph> ruleToGraphThisLevel = new HashMap<>();
    for (Rule currentRule : ruleToExtendWith.keySet()) {
        Graph graph = ruleToGraphLastLevel.get(currentRule);
        //System.out.println("Current rule: " + currentRule+ " Graph:"+graph);
        for (String conditionProperty : ruleToExtendWith.get(currentRule)) {
            if (Utilities.getRelationIds(currentRule, property2Id).last() > property2Id
                    .get(conditionProperty)) {
                Graph currentGraphNew = (Graph) graph.clone();
                Integer propertyId = property2Id.get(conditionProperty);
                HashSet<Integer> propertiesSet = new HashSet<>();
                propertiesSet.add(propertyId);
                Node node = currentGraphNew.createOrGetNode(propertiesSet); //Before it was createNode
                node.toExplore = false;
                Iterable<Rule> conditions = Utilities.getConditions(currentRule, conditionProperty,
                        (int) support, kb);
                for (Rule conditionRule : conditions) {
                    Rule complementaryRule = getComplementaryRule(conditionRule);
                    if (!ruleToGraphFirstLevel.containsKey(complementaryRule)) {
                        // We should never fall in this case
                        for (Rule r : ruleToGraphFirstLevel.keySet()) {
                            System.out.println(r.getDatalogBasicRuleString());
                        }
                        System.out.println(complementaryRule.getDatalogBasicRuleString());
                        System.out.println(complementaryRule + " not found in the first level graph");
                    }

                    Graph complementaryGraphNew = ruleToGraphFirstLevel.get(complementaryRule);
                    //System.out.println("Complementary rule: " + complementaryRule + "\tThread " + Thread.currentThread().getId() + "\t" + complementaryGraphNew);
                    Graph newGraphNew = (Graph) currentGraphNew.clone();
                    HashSet<Integer> conditionProperties = new HashSet<>();
                    conditionProperties.addAll(getRelations(conditionRule, property2Id));
                    conditionProperties.addAll(getRelations(currentRule, property2Id));
                    //System.out.println("currentGraph:"+currentGraphNew);
                    //System.out.println("clone of currentGraph:"+newGraphNew);
                    newGraphNew = mergeGraphs(newGraphNew, complementaryGraphNew, newGraphNew.topGraphNodes(),
                            conditionProperties);
                    //System.out.println("newMergeGraph:"+newGraphNew);
                    discoverConditionalKeysForComplexConditions(newGraphNew, newGraphNew.topGraphNodes(),
                            conditionRule, output);
                    ruleToGraphThisLevel.put(conditionRule, newGraphNew);
                }
            }
        }
    }
    HashMap<Rule, HashSet<String>> newRuleToExtendWith = new HashMap<>();
    for (Rule conRule : ruleToGraphThisLevel.keySet()) {
        Graph newGraphNew = ruleToGraphThisLevel.get(conRule);
        for (Node node : newGraphNew.topGraphNodes()) {
            HashSet<String> properties = new HashSet<>();
            if (node.toExplore) {
                Iterator<Integer> it = node.set.iterator();
                int prop = it.next();
                String propertyStr = id2Property.get(prop);
                properties.add(propertyStr);
            }
            if (properties.size() != 0) {
                newRuleToExtendWith.put(conRule, properties);
            }
        }
    }

    if (newRuleToExtendWith.size() != 0) {
        discoverConditionalKeysPerLevel(newRuleToExtendWith, ruleToGraphFirstLevel, ruleToGraphThisLevel,
                output);
    }

    //System.out.println("discoverConditionalKeysPerLevel()");

}

From source file:org.codehaus.mojo.CppCheckMojo.java

protected String getCommandArgs() {
    String params = commandArgs + " ";
    HashSet<String> excudedSet = new HashSet<String>();

    Iterator it = includeDirs.iterator();
    while (it.hasNext()) {
        FileSet afileSet = new FileSet();

        String dir = it.next().toString();
        params += "-I\"" + dir + "\" ";

        if (StringUtils.isNotEmpty(excludes)) {
            afileSet.setDirectory(new File(dir).getAbsolutePath());
            afileSet.setUseDefaultExcludes(false); // $FB pour viter d'avoir TROP de fichier excludes (inutiles) dans la boucle for ci-aprs
            afileSet.setExcludes(Arrays.asList(excludes.split(",")));
            getLog().debug("cppcheck excludes are :" + Arrays.toString(afileSet.getExcludes().toArray()));

            FileSetManager aFileSetManager = new FileSetManager();
            String[] found = aFileSetManager.getExcludedFiles(afileSet);
            excudedSet.addAll(new HashSet<String>(Arrays.asList(found)));
        }/*from   ww  w.j a v  a2 s.  c  o  m*/
    }
    it = sourceDirs.iterator();
    while (it.hasNext()) {
        FileSet afileSet = new FileSet();

        String dir = it.next().toString();
        params += "-I\"" + dir + "\" ";

        if (StringUtils.isNotEmpty(excludes)) {
            afileSet.setDirectory(new File(dir).getAbsolutePath());
            afileSet.setUseDefaultExcludes(false); // $FB pour viter d'avoir TROP de fichiers exclude (inutile) dans la boucle for ci-aprs
            afileSet.setExcludes(Arrays.asList(excludes.split(",")));
            getLog().debug("cppcheck excludes are :" + Arrays.toString(afileSet.getExcludes().toArray()));

            FileSetManager aFileSetManager = new FileSetManager();
            String[] found = aFileSetManager.getExcludedFiles(afileSet);
            excudedSet.addAll(new HashSet<String>(Arrays.asList(found)));
        }
    }
    for (Iterator<String> iter = excudedSet.iterator(); iter.hasNext();) {
        String s = iter.next();
        //cppcheck only check *.cpp, *.cxx, *.cc, *.c++, *.c, *.tpp, and *.txx files
        // so remove unneeded exclusions
        if (s.matches("(.+\\.cpp)|(.+\\.cxx)|(.+\\.cc)|(.+\\.c\\+\\+)|(.+\\.c)|(.+\\.tpp)|(.+\\.txx)")) {
            params += "-i\"" + s + "\" ";
        }
    }

    it = sourceDirs.iterator();
    while (it.hasNext()) {
        params += "\"" + it.next() + "\" ";
    }

    return params;
}

From source file:aml.match.Alignment.java

/**
 * @param id: the index of the class to check in the Alignment
  * @return the list of all classes mapped to the given class
 *///from  ww w . j a  va  2 s.  c  o m
public Set<Integer> getMappingsBidirectional(int id) {
    HashSet<Integer> mappings = new HashSet<Integer>();
    if (sourceMaps.contains(id))
        mappings.addAll(sourceMaps.keySet(id));
    if (targetMaps.contains(id))
        mappings.addAll(targetMaps.keySet(id));
    return mappings;
}

From source file:com.aimluck.eip.msgboard.MsgboardTopicSelectData.java

@Override
protected SelectQuery<EipTMsgboardTopic> buildSelectQueryForFilter(SelectQuery<EipTMsgboardTopic> query,
        RunData rundata, Context context) {
    if (current_filterMap.containsKey("category")) {
        // ????????
        List<String> categoryIds = current_filterMap.get("category");
        categoryId = categoryIds.get(0).toString();
        List<MsgboardCategoryResultData> categoryList = MsgboardUtils.loadCategoryList(rundata);
        boolean existCategory = false;
        if (categoryList != null && categoryList.size() > 0) {
            for (MsgboardCategoryResultData category : categoryList) {
                if (categoryId.equals(category.getCategoryId().toString())) {
                    existCategory = true;
                    break;
                }//from   w ww. j a va 2s . co m
            }

        }
        if (!existCategory) {
            categoryId = "";
            current_filterMap.remove("category");

        }

        updateCategoryName();
    }

    super.buildSelectQueryForFilter(query, rundata, context);

    if (current_filterMap.containsKey("post")) {
        // ????????

        List<String> postIds = current_filterMap.get("post");
        boolean existPost = false;
        for (int i = 0; i < postList.size(); i++) {
            String pid = postList.get(i).getName().toString();
            if (pid.equals(postIds.get(0).toString())) {
                existPost = true;
                break;
            }
        }
        Map<Integer, ALEipPost> map = ALEipManager.getInstance().getPostMap();
        if (postIds != null && !postIds.isEmpty()) {
            for (Map.Entry<Integer, ALEipPost> item : map.entrySet()) {
                String pid = item.getValue().getGroupName().toString();
                if (pid.equals(postIds.get(0).toString())) {
                    existPost = true;
                    break;
                }
            }
        }
        if (existPost) {
            HashSet<Integer> userIds = new HashSet<Integer>();
            for (String post : postIds) {
                List<Integer> userId = ALEipUtils.getUserIds(post);
                userIds.addAll(userId);
            }
            if (userIds.isEmpty()) {
                userIds.add(-1);
            }
            Expression exp = ExpressionFactory.inExp(EipTMsgboardTopic.OWNER_ID_PROPERTY, userIds);
            query.andQualifier(exp);

            postId = postIds.get(0).toString();
            updatePostName();
        } else {
            postId = "";
            updatePostName();
            current_filterMap.remove("post");
        }
    }

    String search = ALEipUtils.getTemp(rundata, context, LIST_SEARCH_STR);

    if (search != null && !"".equals(search)) {
        current_search = search;
        Expression ex1 = ExpressionFactory.likeExp(EipTMsgboardTopic.NOTE_PROPERTY, "%" + search + "%");
        Expression ex2 = ExpressionFactory.likeExp(EipTMsgboardTopic.TOPIC_NAME_PROPERTY, "%" + search + "%");
        SelectQuery<EipTMsgboardTopic> q = Database.query(EipTMsgboardTopic.class);
        q.andQualifier(ex1.orExp(ex2));
        List<EipTMsgboardTopic> queryList = q.fetchList();
        List<Integer> resultid = new ArrayList<Integer>();
        for (EipTMsgboardTopic item : queryList) {
            if (item.getParentId() != 0 && !resultid.contains(item.getParentId())) {
                resultid.add(item.getParentId());
            } else if (!resultid.contains(item.getTopicId())) {
                resultid.add(item.getTopicId());
            }
        }
        if (resultid.size() == 0) {
            // ??????????-1
            resultid.add(-1);
        }
        Expression ex = ExpressionFactory.inDbExp(EipTMsgboardTopic.TOPIC_ID_PK_COLUMN, resultid);
        query.andQualifier(ex);
    }
    return query;
}

From source file:org.apache.jcs.engine.control.CompositeCache.java

/**
 * Gets the set of keys of objects currently in the group.
 * <p>/*from  ww  w .  j  av  a  2  s.com*/
 * @param group
 * @return A Set of keys, or null.
 */
public Set getGroupKeys(String group) {
    HashSet allKeys = new HashSet();
    allKeys.addAll(memCache.getGroupKeys(group));
    for (int i = 0; i < auxCaches.length; i++) {
        AuxiliaryCache aux = auxCaches[i];
        if (aux != null) {
            try {
                allKeys.addAll(aux.getGroupKeys(group));
            } catch (IOException e) {
                // ignore
            }
        }
    }
    return allKeys;
}

From source file:com.haulmont.cuba.core.app.RdbmsStore.java

protected Set<Class> collectEntityClasses(View view, Set<View> visited) {
    if (visited.contains(view)) {
        return Collections.emptySet();
    } else {//w  w w  . j ava  2  s .c  o  m
        visited.add(view);
    }

    HashSet<Class> classes = new HashSet<>();
    classes.add(view.getEntityClass());
    for (ViewProperty viewProperty : view.getProperties()) {
        if (viewProperty.getView() != null) {
            classes.addAll(collectEntityClasses(viewProperty.getView(), visited));
        }
    }
    return classes;
}

From source file:org.biopax.ols.impl.OBO2FormatParser.java

/**
 * returns the root terms of an ontology
 *
 * @return a collection of OBOClass terms
 * @throws IllegalStateException if the session is not initialized
 *//* w  w w  .ja  v a  2s .c o m*/
public Set<OBOObject> getRootTerms(boolean useGreedy) {

    HashSet<OBOObject> roots = new HashSet<OBOObject>();
    /*
    * {@link RootAlgorithm#GREEDY GREEDY} root algorithm.
     *
     * @param linkDatabase
     *            the linkDatabase to check
     * @param includeTerms
     *            whether to include root terms
     * @param includeProperties
     *            whether to include root properties
     * @param includeObsoletes
     *            whether to include obsolete terms & properties
     * @param includeInstances
     *            whether to include instances
     */

    if (useGreedy) {

        //use greedy root detection
        /**
         * Returns all non-obsolete root terms in a given {@link LinkDatabase}.
         * Equivalent to {@link #getRoots(session.getLinkDatabase(), true, false, false, false)}.
         */
        Collection<OBOClass> tmpRoots = TermUtil.getRoots(session);
        roots.addAll(tmpRoots);

    } else {

        //use strict root detection
        Collection<LinkedObject> tmpRoots = TermUtil.getRoots(RootAlgorithm.STRICT, session.getLinkDatabase());
        for (LinkedObject lnk : tmpRoots) {
            if (lnk instanceof OBOClass) {
                roots.add((OBOClass) lnk);
            }
        }

    }

    return roots;

}