Example usage for java.util HashSet addAll

List of usage examples for java.util HashSet addAll

Introduction

In this page you can find the example usage for java.util HashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:net.sourceforge.fenixedu.presentationTier.Action.internationalRelatOffice.candidacy.erasmus.ErasmusIndividualCandidacyProcessDA.java

public ActionForward doEnrol(ActionMapping mapping, ActionForm form, HttpServletRequest request,
        HttpServletResponse response) throws FenixServiceException {
    ErasmusBolonhaStudentEnrollmentBean erasmusBolonhaStudentEnrollmentBean = (ErasmusBolonhaStudentEnrollmentBean) getRenderedObject();
    try {//from www  .  ja  v  a 2  s  . c om
        final RuleResult ruleResults = EnrolBolonhaStudent.run(
                erasmusBolonhaStudentEnrollmentBean.getStudentCurricularPlan(),
                erasmusBolonhaStudentEnrollmentBean.getExecutionPeriod(),
                erasmusBolonhaStudentEnrollmentBean.getDegreeModulesToEvaluate(),
                erasmusBolonhaStudentEnrollmentBean.getCurriculumModulesToRemove(),
                erasmusBolonhaStudentEnrollmentBean.getCurricularRuleLevel());

        if (!erasmusBolonhaStudentEnrollmentBean.getDegreeModulesToEvaluate().isEmpty()
                || !erasmusBolonhaStudentEnrollmentBean.getCurriculumModulesToRemove().isEmpty()) {
            addActionMessage("success", request, "label.save.success");
        }

        if (ruleResults.isWarning()) {
            addRuleResultMessagesToActionMessages("warning", request, ruleResults);
        }

    } catch (EnrollmentDomainException ex) {
        addRuleResultMessagesToActionMessages("error", request, ex.getFalseResult());

        return enrolStudent(mapping, form, request, response);

    } catch (DomainException ex) {
        addActionMessage("error", request, ex.getKey(), ex.getArgs());

        return enrolStudent(mapping, form, request, response);
    }

    StudentCurricularPlan studentCurricularPlan = erasmusBolonhaStudentEnrollmentBean
            .getStudentCurricularPlan();
    ExecutionSemester executionSemester = erasmusBolonhaStudentEnrollmentBean.getExecutionPeriod();
    NoCourseGroupCurriculumGroup group = studentCurricularPlan
            .getNoCourseGroupCurriculumGroup(NoCourseGroupCurriculumGroupType.STANDALONE);
    Set<CurricularCourse> remaining = new HashSet<CurricularCourse>();
    HashSet<CurricularCourse> set = new HashSet<CurricularCourse>();
    set.addAll(erasmusBolonhaStudentEnrollmentBean.getCandidacy().getCurricularCoursesSet());
    for (Enrolment enrolment : group.getEnrolments()) {
        set.add(enrolment.getCurricularCourse());
    }

    remaining.addAll(set);

    for (ErasmusExtraCurricularEnrolmentBean bean : erasmusBolonhaStudentEnrollmentBean
            .getExtraCurricularEnrolments()) {
        remaining.remove(bean.getCurricularCourse());
        if (group.hasEnrolmentWithEnroledState(bean.getCurricularCourse(),
                erasmusBolonhaStudentEnrollmentBean.getExecutionPeriod())) {
            continue;
        }

        MobilityExtraEnrolmentBean mobilityExtraEnrolmentBean = new MobilityExtraEnrolmentBean(
                studentCurricularPlan, executionSemester);

        mobilityExtraEnrolmentBean.setCurriculumGroup(studentCurricularPlan
                .getNoCourseGroupCurriculumGroup(NoCourseGroupCurriculumGroupType.STANDALONE));
        mobilityExtraEnrolmentBean.setDegree(bean.getCurricularCourse().getDegree());
        mobilityExtraEnrolmentBean.setDegreeType(bean.getCurricularCourse().getDegree().getDegreeType());
        mobilityExtraEnrolmentBean
                .setDegreeCurricularPlan(bean.getCurricularCourse().getDegreeCurricularPlan());
        mobilityExtraEnrolmentBean.setSelectedCurricularCourse(bean.getCurricularCourse());
        mobilityExtraEnrolmentBean.setCurricularRuleLevel(CurricularRuleLevel.EXTRA_ENROLMENT);

        try {
            final RuleResult ruleResult = CreateExtraEnrolment.run(mobilityExtraEnrolmentBean);

            if (ruleResult.isWarning()) {
                addRuleResultMessagesToActionMessages("warning", request, ruleResult);
            }

        } catch (final IllegalDataAccessException e) {
            addActionMessage("error", request, "error.notAuthorized");
            return enrolStudent(mapping, form, request, response);

        } catch (final EnrollmentDomainException ex) {
            addRuleResultMessagesToActionMessages("enrolmentError", request, ex.getFalseResult());
            return enrolStudent(mapping, form, request, response);

        } catch (final DomainException e) {
            addActionMessage("error", request, e.getMessage(), e.getArgs());
            return enrolStudent(mapping, form, request, response);
        }
    }

    // After adding all that I want to add, the ones that I've not added
    // that were enrolled are to be removed.
    for (Enrolment enrolment : group.getEnrolmentsBy(executionSemester)) {
        if (remaining.contains(enrolment.getCurricularCourse())) {
            studentCurricularPlan.removeCurriculumModulesFromNoCourseGroupCurriculumGroup(
                    Collections.<CurriculumModule>singletonList(enrolment), executionSemester,
                    NoCourseGroupCurriculumGroupType.STANDALONE);
        }
    }
    MobilityIndividualApplicationProcess process = getProcess(request);
    MobilityIndividualApplication candidacy = process.getCandidacy();
    ErasmusBolonhaStudentEnrollmentBean bean = new ErasmusBolonhaStudentEnrollmentBean(
            candidacy.getRegistration().getActiveStudentCurricularPlan(),
            erasmusBolonhaStudentEnrollmentBean.getExecutionPeriod(), null,
            CurricularRuleLevel.ENROLMENT_NO_RULES, candidacy);
    RenderUtils.invalidateViewState();
    return enrolStudent(mapping, request, getProcess(request), bean);
}

From source file:edu.uga.cs.fluxbuster.clustering.DomainCluster.java

/**
 * Adds the candidate flux domain to the cluster
 *
 * @param cfd the candidate flux domain to add
 */// w  ww.j  a v  a  2 s  . c o m
public void addCandidateFluxDomain(CandidateFluxDomain cfd) {
    this.candidateDomains.add(cfd);
    this.domains.add(cfd.getDomainName());
    this.ips.addAll(cfd.getIps());

    // NOTE bases diversity solely on IPv4 addresses
    this.setIpDiversity(IPDiversityCalculator.ipDiversity(IPDiversityCalculator.getV4Ips(ips)));

    this.queries += cfd.getNumQueries();
    this.avgTTLs.add(cfd.getAvgTTL());
    this.growthRatios.add((double) cfd.getNumIPs() / (double) cfd.getNumQueries());

    if (cfd.getLastGrowthRatioSingleEntry() != null) {
        this.lastGrowthRatioSingleEntries.add(cfd.getLastGrowthRatioSingleEntry());
    }

    if (cfd.getLastGrowthEntriesIPs().size() > 0) {
        this.lastGrowthEntriesIPs.add(cfd.getLastGrowthEntriesIPs());
        this.lastGrowthEntriesQueries.add(cfd.getLastGrowthEntriesQueries());
    }

    if (this.candidateDomains.size() > 1) {
        Collections.sort(this.candidateDomains, new Comparator<CandidateFluxDomain>() {
            @Override
            public int compare(CandidateFluxDomain arg0, CandidateFluxDomain arg1) {
                return arg0.getLastSeen().compareTo(arg1.getLastSeen());
            }
        });

        HashSet<InetAddress> prevIps = new HashSet<InetAddress>();
        for (int i = 0; i < this.candidateDomains.size() - 1; i++) {
            prevIps.addAll(this.candidateDomains.get(i).getIps());
        }
        CandidateFluxDomain lastCFD = this.candidateDomains.get(this.candidateDomains.size() - 1);
        HashSet<InetAddress> temp = new HashSet<InetAddress>();

        temp.addAll(lastCFD.getIps());
        temp.removeAll(prevIps);

        this.lastGrowthClusterIPs = temp;
        this.lastGrowthClusterQueries = lastCFD.getNumQueries();
    }
}

From source file:org.alfresco.repo.security.authentication.AbstractChainingAuthenticationService.java

/**
 * {@inheritDoc}/*  w  ww.  j  ava  2s. co m*/
 */
@Override
public Set<String> getUsersWithTickets(boolean nonExpiredOnly) {
    HashSet<String> users = new HashSet<String>();
    for (AuthenticationService authService : getUsableAuthenticationServices()) {
        if (authService instanceof AbstractAuthenticationService) {
            users.addAll(((AbstractAuthenticationService) authService).getUsersWithTickets(nonExpiredOnly));
        }
    }
    return users;
}

From source file:io.viewserver.operators.group.GroupByOperator.java

@Override
protected IGroupByConfig mergePendingConfig(IGroupByConfig pendingConfig, IGroupByConfig newConfig) {
    if (!ObjectUtils.equals(pendingConfig.getCountColumnName(), newConfig.getCountColumnName())) {
        throw new IllegalStateException("Cannot merge configs with conflicting count column names");
    }// w  ww  . jav  a2 s .c  o  m
    if (!ObjectUtils.equals(pendingConfig.getGroupBy(), newConfig.getGroupBy())) {
        throw new IllegalStateException("Cannot merge configs with conflicting group by columns");
    }
    if (!ObjectUtils.equals(pendingConfig.getSubtotals(), newConfig.getSubtotals())) {
        throw new IllegalStateException("Cannot merge configs with conflicting subtotals setting");
    }
    return new IGroupByConfig() {
        @Override
        public List<String> getGroupBy() {
            return pendingConfig.getGroupBy();
        }

        @Override
        public List<Summary> getSummaries() {
            HashSet<Summary> summaries = new OrderedHashSet<>();
            summaries.addAll(pendingConfig.getSummaries());
            summaries.addAll(newConfig.getSummaries());
            return new ArrayList<>(summaries);
        }

        @Override
        public String getCountColumnName() {
            return pendingConfig.getCountColumnName();
        }

        @Override
        public List<String> getSubtotals() {
            return pendingConfig.getSubtotals();
        }
    };
}

From source file:org.alfresco.repo.node.MLPropertyInterceptor.java

public Locale getClosestLocale(Collection<?> collection) {
    if (collection.size() == 0) {
        return null;
    }/*from w  w w . j  a v a 2s. c o  m*/
    // Use the available keys as options
    HashSet<Locale> locales = new HashSet<Locale>();
    for (Object o : collection) {
        MLText mlText = (MLText) o;
        locales.addAll(mlText.keySet());
    }
    // Try the content locale
    Locale locale = I18NUtil.getContentLocale();
    Locale match = I18NUtil.getNearestLocale(locale, locales);
    if (match == null) {
        // Try just the content locale language
        locale = I18NUtil.getContentLocaleLang();
        match = I18NUtil.getNearestLocale(locale, locales);
        if (match == null) {
            // No close matches for the locale - go for the default locale
            locale = I18NUtil.getLocale();
            match = I18NUtil.getNearestLocale(locale, locales);
            if (match == null) {
                // just get any locale
                match = I18NUtil.getNearestLocale(null, locales);
            }
        }
    }
    return match;
}

From source file:com.datatorrent.lib.io.fs.AbstractFSDirectoryInputOperatorTest.java

@Test
public void testSinglePartiton() throws Exception {
    FileContext.getLocalFSFileContext().delete(new Path(new File(testMeta.dir).getAbsolutePath()), true);
    HashSet<String> allLines = Sets.newHashSet();
    for (int file = 0; file < 2; file++) {
        HashSet<String> lines = Sets.newHashSet();
        for (int line = 0; line < 2; line++) {
            lines.add("f" + file + "l" + line);
        }// w w  w. j a va  2s .co  m
        allLines.addAll(lines);
        FileUtils.write(new File(testMeta.dir, "file" + file), StringUtils.join(lines, '\n'));
    }

    TestFSDirectoryInputOperator oper = new TestFSDirectoryInputOperator();

    CollectorTestSink<String> queryResults = new CollectorTestSink<String>();
    @SuppressWarnings({ "unchecked", "rawtypes" })
    CollectorTestSink<Object> sink = (CollectorTestSink) queryResults;
    oper.output.setSink(sink);

    oper.setDirectory(testMeta.dir);
    oper.getScanner().setFilePatternRegexp(".*file[\\d]");

    oper.setup(null);
    for (long wid = 0; wid < 3; wid++) {
        oper.beginWindow(wid);
        oper.emitTuples();
        oper.endWindow();
    }
    oper.teardown();

    Assert.assertEquals("number tuples", 4, queryResults.collectedTuples.size());
    Assert.assertEquals("lines", allLines, new HashSet<String>(queryResults.collectedTuples));

}

From source file:org.apache.solr.handler.clustering.carrot2.CarrotClusteringEngine.java

@Override
protected Set<String> getFieldsToLoad(SolrQueryRequest sreq) {
    SolrParams solrParams = sreq.getParams();

    HashSet<String> fields = new HashSet<>(getFieldsForClustering(sreq));
    fields.add(idFieldName);/*  w  ww  . ja  v  a 2 s .  co  m*/
    fields.add(solrParams.get(CarrotParams.URL_FIELD_NAME, "url"));
    fields.addAll(getCustomFieldsMap(solrParams).keySet());

    String languageField = solrParams.get(CarrotParams.LANGUAGE_FIELD_NAME);
    if (StringUtils.isNotBlank(languageField)) {
        fields.add(languageField);
    }
    return fields;
}

From source file:de.dfki.km.perspecting.obie.experiments.ProperNameExperiment.java

/**
 * Test method for/*from   ww  w.j  av  a  2  s .  c  om*/
 * {@link de.dfki.km.perspecting.obie.dixi.service.SimpleScobieService#extractInformationFromURL(java.lang.String, java.lang.String)}
 * .
 */
@Test
public void testExtractInformationFromURL() {
    try {
        StringBuffer b = new StringBuffer();

        for (int i = 0; i < 1; i++) {

            Document document = pipeline.createDocument(
                    FileUtils.toFile(new URL("http://en.wikipedia.org/wiki/Special:Random")),
                    new URI("http://en.wikipedia.org/wiki/Special:Random"), MediaType.HTML,
                    "SELECT * WHERE {?s ?p ?o}", Language.EN);

            Evaluator evaluator = new Evaluator(pipeline);

            for (int step = 0; pipeline.hasNext(step) && step <= 5; step = pipeline.execute(step, document)) {
                System.out.println(step);
            }

            HashSet<String> wordsOfPhrases = new HashSet<String>();
            HashSet<String> wordsOfDocument = new HashSet<String>();

            for (Token token : document.getTokens()) {
                wordsOfDocument.add(token.toString());
            }

            int count = 0;
            for (TokenSequence<String> np : document.getNounPhrases()) {
                String[] words = np.toString().split("[\\s]+");
                count += words.length;
                wordsOfPhrases.addAll(Arrays.asList(words));
            }

            b.append(document.getTokens().size() + "\t" + document.getNounPhrases().size() + "\t" + count + "\t"
                    + wordsOfPhrases.size() + "\t" + wordsOfDocument.size() + "\n");

        }
        System.out.println("tok in doc\tnp in doc\ttok in nps\tdistinct tok in nps\tdistinct tok in doc");
        System.out.println(b);
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:org.apache.hadoop.hbase.master.GroupAssignmentManager.java

/**
 * balance regions belong to one table/*from  ww  w .  j a v a 2 s. co m*/
 * 
 * @param table
 *          the table you want to balance
 */
public static void balanceTable(String table) {
    String groups[] = getTableGroups(table);
    HashSet<HServerInfo> servers = new HashSet<HServerInfo>();

    for (String group : groups) {
        System.out.println("table belong to :" + group);
        if (groupServers.get(group) != null) {
            servers.addAll(groupServers.get(group));
        }
    }

    HashMap<HRegionInfo, HServerAddress> maps = CheckMeta.getRegionAddress(table);
    HashMap<HRegionInfo, HServerInfo> map = new HashMap<HRegionInfo, HServerInfo>();
    for (Entry<HRegionInfo, HServerAddress> e : maps.entrySet()) {
        for (HServerInfo info : servers) {
            if (CheckMeta.isThisAddress(info, e.getValue())) {
                map.put(e.getKey(), info);
            }
        }
    }
    doBalance(servers, map);

}

From source file:com.cloudant.sync.replication.BasicPullStrategy.java

public List<Callable<DocumentRevsList>> createTasks(List<String> ids,
        Map<String, Collection<String>> revisions) {

    List<Callable<DocumentRevsList>> tasks = new ArrayList<Callable<DocumentRevsList>>();
    for (String id : ids) {
        // get list for atts_since (these are possible ancestors we have, it's ok to be eager
        // and get all revision IDs higher up in the tree even if they're not our ancestors and
        // belong to a different subtree)
        HashSet<String> possibleAncestors = new HashSet<String>();
        for (String revId : revisions.get(id)) {
            List<String> thesePossibleAncestors = targetDb.getDbCore().getPossibleAncestorRevisionIDs(id, revId,
                    50);/*  w ww . ja  va2  s .co m*/
            if (thesePossibleAncestors != null) {
                possibleAncestors.addAll(thesePossibleAncestors);
            }
        }
        tasks.add(GetRevisionTask.createGetRevisionTask(this.sourceDb, id, revisions.get(id), possibleAncestors,
                config.pullAttachmentsInline));
    }
    return tasks;
}