Example usage for java.util HashSet addAll

List of usage examples for java.util HashSet addAll

Introduction

In this page you can find the example usage for java.util HashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:nl.b3p.wms.capabilities.ServiceProvider.java

public ServiceProvider shallowClone() {
    ServiceProvider sc = new ServiceProvider();
    sc.setAbbr(this.getAbbr());
    sc.setAbstracts(this.getAbstracts());
    sc.setAccessConstraints(this.getAccessConstraints());
    sc.setFees(this.getFees());
    sc.setGivenName(this.getGivenName());
    sc.setIsSynchronized(false);/*  w w w.j  a v  a2 s.com*/
    sc.setName(this.getName());
    sc.setTitle(this.getTitle());
    sc.setUrl(this.getUrl());
    sc.setWmsVersion(this.getWmsVersion());
    sc.setUserName(this.getUserName());
    sc.setPassword(this.getPassword());

    sc.setUpdatedDate(this.getUpdatedDate());
    //        sc.setContactInformation(this.getContactInformation());
    //        sc.setCode(this.getCode());
    sc.setExpireDate(this.getExpireDate());
    sc.setPersonalCode(this.getPersonalCode());
    sc.setStatus(this.getStatus());
    sc.setSldUrl(this.getSldUrl());
    sc.setIgnoreResource(this.getIgnoreResource());
    sc.setAllowed(this.getAllowed());
    sc.setUrlServiceProvideCode(this.getUrlServiceProvideCode());

    HashSet dr = new HashSet();
    dr.addAll(this.getDomainResource());
    sc.setDomainResource(dr);

    return sc;
}

From source file:org.apache.nutch.crawl.CrawlDb.java

public int run(String[] args) throws Exception {
    if (args.length < 2) {
        System.err.println(//from   w  ww  .ja  va 2 s.  c om
                "Usage: CrawlDb <crawldb> (-dir <segments> | <seg1> <seg2> ...) [-force] [-normalize] [-filter] [-noAdditions]");
        System.err.println("\tcrawldb\tCrawlDb to update");
        System.err.println("\t-dir segments\tparent directory containing all segments to update from");
        System.err.println("\tseg1 seg2 ...\tlist of segment names to update from");
        System.err.println("\t-force\tforce update even if CrawlDb appears to be locked (CAUTION advised)");
        System.err
                .println("\t-normalize\tuse URLNormalizer on urls in CrawlDb and segment (usually not needed)");
        System.err.println("\t-filter\tuse URLFilters on urls in CrawlDb and segment");
        System.err.println(
                "\t-noAdditions\tonly update already existing URLs, don't add any newly discovered URLs");
        return -1;
    }
    boolean normalize = false;
    boolean filter = false;
    boolean force = false;
    final FileSystem fs = FileSystem.get(getConf());
    boolean additionsAllowed = getConf().getBoolean(CRAWLDB_ADDITIONS_ALLOWED, true);
    HashSet<Path> dirs = new HashSet<Path>();
    for (int i = 1; i < args.length; i++) {
        if (args[i].equals("-normalize")) {
            normalize = true;
        } else if (args[i].equals("-filter")) {
            filter = true;
        } else if (args[i].equals("-force")) {
            force = true;
        } else if (args[i].equals("-noAdditions")) {
            additionsAllowed = false;
        } else if (args[i].equals("-dir")) {
            FileStatus[] paths = fs.listStatus(new Path(args[++i]), HadoopFSUtil.getPassDirectoriesFilter(fs));
            dirs.addAll(Arrays.asList(HadoopFSUtil.getPaths(paths)));
        } else {
            dirs.add(new Path(args[i]));
        }
    }
    try {
        update(new Path(args[0]), dirs.toArray(new Path[dirs.size()]), normalize, filter, additionsAllowed,
                force);
        return 0;
    } catch (Exception e) {
        LOG.fatal("CrawlDb update: " + StringUtils.stringifyException(e));
        return -1;
    }
}

From source file:gov.nih.nci.cabig.caaers.domain.dto.EvaluationResultDTO.java

public Set<AdverseEvent> getAllSeriousAdverseEvents() {
    HashSet<AdverseEvent> aes = new HashSet<AdverseEvent>();
    for (Integer aeReportId : processedRulesEngineResultMap.keySet()) {
        aes.addAll(getSeriousAdverseEvents(aeReportId));
    }/*from   w  w  w  . j  a  va 2s  .c  o m*/
    return aes;
}

From source file:org.openremote.modeler.service.impl.ResourceServiceImpl.java

/**
 * This implementation has been moved and delegates to {@link DesignerState#save}.
 *///from  ww  w .  j  a v  a2  s.c  o  m
@Override
@Deprecated
@Transactional
public LocalFileCache saveResourcesToBeehive(Collection<Panel> panels, long maxOid) {
    UserAccount currentUserAccount = userService.getCurrentUserAccount();
    LocalFileCache cache = createLocalFileCache(currentUserAccount);

    // Create a set of panels to eliminate potential duplicate instances...

    HashSet<Panel> panelSet = new HashSet<Panel>();
    panelSet.addAll(panels);

    // Delegate implementation to DesignerState...

    DesignerState state = createDesignerState(currentUserAccount, cache);
    state.save(panelSet, maxOid);

    return cache;
}

From source file:com.fstx.stdlib2.author.AuthorizationBeanBuilder.java

public AuthorizationBean buildAuthorizationBean(String user) throws DaoException {

    HashSet rightsSet = new HashSet();
    HashSet rightsStringSet = new HashSet();
    GroupRightDao grDao = GroupRightDao.factory.build();
    //Get user groups
    UserGroupDao dao = UserGroupDao.factory.build();

    List l = dao.searchUserGroups(UserGroupDao.SELECT_BY_USER, user);

    //for each group
    Iterator i = l.iterator();//from   w  w w  .  j a va2 s  .  co m
    UserGroup g3 = null;
    Collection rightsList = null;
    while (i.hasNext()) {
        g3 = (UserGroup) i.next();
        //get list of rights for group
        rightsList = grDao.find(g3);

        //         add groups rights to the users list
        rightsSet.addAll(rightsList);

        //log.info("\n\nI just queried this group for rights: "+
        // g3.getGroupname());
    }
    //We only want to deal with strings, not GroupRights objs. This makes
    // the
    // seach of the hash eazily.
    //Convert GroupRights to Strings.
    Iterator i2 = rightsSet.iterator();
    GroupRight grTemp = null;
    while (i2.hasNext()) {
        grTemp = (GroupRight) i2.next();
        //log.info("\n\nAdding the right" + grTemp.getRightCode());
        rightsStringSet.add(grTemp.getRightCode());
    }

    //      add composite to the bean.
    AuthorizationBean ab = new AuthorizationBean(rightsStringSet);

    //      Return bean.
    return ab;
}

From source file:edu.uga.cs.fluxbuster.clustering.DomainCluster.java

/**
 * Returns a string representing this DomainCluster.
 * //from w w w. ja  va 2  s.c o m
 * @return a string representing this DomainCluster.
 */
@Override
public String toString() {
    StringBuffer buf = new StringBuffer();

    ArrayList<String> domains = new ArrayList<String>();
    HashSet<InetAddress> ipset = new HashSet<InetAddress>();
    ArrayList<InetAddress> ips = new ArrayList<InetAddress>();
    for (CandidateFluxDomain cfd : this.getCandidateDomains()) {
        domains.add(cfd.getDomainName());
        ipset.addAll(cfd.getIps());
    }
    ips.addAll(ipset);
    Collections.sort(domains);
    Collections.sort(ips, new InetAddressComparator());

    buf.append("Domains: \n");
    for (String domain : domains) {
        buf.append("\t" + domain + "\n");
    }

    buf.append("IP's: \n");
    for (InetAddress ip : ips) {
        buf.append("\t" + ip.getHostAddress() + "\n");
    }

    buf.append("Query Volume: " + this.getQueries() + "\n");
    buf.append("Distinct IPs: " + this.getIps().size() + "\n");
    buf.append("IP Diversity: " + this.getIpDiversity() + "\n");

    double sumAvgTTL = 0.0;
    for (double avgTTL : this.getAvgTTLs()) {
        sumAvgTTL += avgTTL;
    }
    buf.append("Average TTL: " + sumAvgTTL / this.getAvgTTLs().size() + "\n");

    double sumGrowthRatios = 0.0;
    for (double growthRatio : this.getGrowthRatios()) {
        sumGrowthRatios += growthRatio;
    }
    buf.append("Average Growth Ratio: " + sumGrowthRatios / this.getGrowthRatios().size() + "\n");

    buf.append("Candidate Flux Domains:\n");
    for (CandidateFluxDomain d : this.getCandidateDomains()) {
        buf.append(d.toString());
    }

    return buf.toString();
}

From source file:org.jdbcluster.privilege.PrivilegeCheckerImpl.java

/**
 * calculates static required privileges
 * //from   w w w . j  av a  2s.c  o m
 * @param calledMethod the method called
 * @param clusterObject cluster object instance
 * @return return reqired privileges
 */
private HashSet<String> calcClusterPrivileges(Method calledMethod, PrivilegedCluster clusterObject) {
    HashSet<String> hs = new HashSet<String>();
    PrivilegesMethod pmAnno = calledMethod.getAnnotation(PrivilegesMethod.class);

    hs.addAll(calcStaticClusterPrivileges(clusterObject.getClass()));

    if (pmAnno != null) {
        for (String s : pmAnno.required()) {
            hs.add(s);
        }
    }
    return hs;
}

From source file:org.apache.axis.encoding.TypeMappingImpl.java

/**
 * Returns an array of all the classes contained within this mapping
 *///w ww  .j av  a2  s  .  c o  m
public Class[] getAllClasses(TypeMappingDelegate next) {
    java.util.HashSet temp = new java.util.HashSet();
    if (next != null) {
        temp.addAll(java.util.Arrays.asList(next.getAllClasses()));
    }
    temp.addAll(class2Pair.keySet());
    return (Class[]) temp.toArray(new Class[temp.size()]);
}

From source file:com.data2semantics.yasgui.mgwtlinker.linker.PermutationMapLinker.java

@Override
public ArtifactSet link(TreeLogger logger, LinkerContext context, ArtifactSet artifacts, boolean onePermutation)
        throws UnableToCompleteException {
    if (onePermutation) {
        Map<String, Set<BindingProperty>> permutationMap = buildPermutationMap(logger, context, artifacts);
        Set<Entry<String, Set<BindingProperty>>> entrySet = permutationMap.entrySet();

        // since we are in onePermutation there should be just one
        // strongName
        // better make sure..
        if (permutationMap.size() != 1) {
            logger.log(Type.ERROR, "There should be only one permutation right now, but there were: '"
                    + permutationMap.size() + "'");
            throw new UnableToCompleteException();
        }/*from  w  ww.  java2 s .co  m*/

        Entry<String, Set<BindingProperty>> next = entrySet.iterator().next();
        String strongName = next.getKey();
        Set<BindingProperty> bindingProperties = next.getValue();

        // all artifacts for this compilation
        Set<String> artifactsForCompilation = getArtifactsForCompilation(logger, context, artifacts);

        ArtifactSet toReturn = new ArtifactSet(artifacts);
        PermutationArtifact permutationArtifact = new PermutationArtifact(PermutationMapLinker.class,
                strongName, artifactsForCompilation, bindingProperties);

        toReturn.add(permutationArtifact);
        return toReturn;
    }

    ArtifactSet toReturn = new ArtifactSet(artifacts);
    Map<String, Set<BindingProperty>> map = buildPermutationMap(logger, context, artifacts);

    if (map.size() == 0) {
        // hosted mode
        return toReturn;
    }

    Map<String, PermutationArtifact> permutationArtifactAsMap = getPermutationArtifactAsMap(artifacts);

    //we need different file sets/manifests for our dev version (unminimized js), and our stable version
    List<String> stableExternalFiles = getStableExternalFiles(logger, context);
    List<String> devExternalFiles = getDevExternalFiles(logger, context);

    // build manifest html page for our stable version (included as iframe in our webapp)
    String appcacheService = "manifest.appcache";
    String manifestHtmlPage = buildManifestHtmlPage(appcacheService);
    toReturn.add(emitString(logger, manifestHtmlPage, appcacheService + ".html"));

    // build manifest html page for our stable version (included as iframe in our webapp)
    String devManifestHtmlPage = buildManifestHtmlPage(appcacheService + "?type=dev");
    toReturn.add(emitString(logger, devManifestHtmlPage, "manifest.dev.appcache.html"));

    Set<String> allPermutationFiles = getAllPermutationFiles(permutationArtifactAsMap);

    // get all artifacts
    Set<String> allArtifacts = getArtifactsForCompilation(logger, context, artifacts);

    for (Entry<String, PermutationArtifact> entry : permutationArtifactAsMap.entrySet()) {
        PermutationArtifact permutationArtifact = entry.getValue();
        // make a copy of all artifacts
        HashSet<String> filesForCurrentPermutation = new HashSet<String>(allArtifacts);
        // remove all permutations
        filesForCurrentPermutation.removeAll(allPermutationFiles);
        // add files of the one permutation we are interested in
        // leaving the common stuff for all permutations in...
        filesForCurrentPermutation.addAll(entry.getValue().getPermutationFiles());
        filesForCurrentPermutation = appendVersionIfNeeded(filesForCurrentPermutation);

        String permXml = buildPermXml(logger, permutationArtifact, filesForCurrentPermutation,
                stableExternalFiles);

        // emit permutation information file
        SyntheticArtifact emitString = emitString(logger, permXml,
                permutationArtifact.getPermutationName() + PERMUTATION_FILE_ENDING);
        toReturn.add(emitString);

        // build manifest for our stable version
        String manifestFile = entry.getKey() + PERMUTATION_MANIFEST_FILE_ENDING;
        @SuppressWarnings("serial")
        Map<String, String> fallbacks = new HashMap<String, String>() {
            {
                put("/", "../index.jsp");
            }
        };
        String maniFest = buildManiFest(entry.getKey(), stableExternalFiles, filesForCurrentPermutation,
                fallbacks);
        toReturn.add(emitString(logger, maniFest, manifestFile));

        // build manifest for our dev version
        String devManifestFile = entry.getKey() + ".dev" + PERMUTATION_MANIFEST_FILE_ENDING;
        String devManiFest = buildManiFest(entry.getKey(), devExternalFiles, filesForCurrentPermutation);
        toReturn.add(emitString(logger, devManiFest, devManifestFile));

    }

    toReturn.add(createPermutationMap(logger, map));
    return toReturn;

}

From source file:com.haulmont.timesheets.gui.weeklytimesheets.SimpleWeeklyTimesheets.java

public void submitAll() {
    Collection<WeeklyReportEntry> entries = weeklyEntriesDs.getItems();
    if (!entries.isEmpty()) {
        CommitContext commitContext = new CommitContext();
        List<String> validationAlerts = new ArrayList<>();
        for (WeeklyReportEntry weeklyReportEntry : entries) {
            ResultAndCause resultAndCause = validationTools.validateWeeklyReport(weeklyReportEntry);
            if (resultAndCause.isNegative) {
                showNotification(resultAndCause.cause, NotificationType.WARNING);
                return;
            }/* ww w  . j  av a  2s .c o m*/

            for (final DayOfWeek day : DayOfWeek.values()) {
                String dayOfWeekTime = weeklyReportEntry.getDayOfWeekTime(day);
                if (StringUtils.isNotBlank(dayOfWeekTime)) {
                    HoursAndMinutes hoursAndMinutes = timeParser.parseToHoursAndMinutes(dayOfWeekTime);
                    List<TimeEntry> existingEntries = weeklyReportEntry.getDayOfWeekTimeEntries(day);
                    Set<Tag> defaultTags = weeklyReportEntry.getTask().getDefaultTags();

                    TimeEntry timeEntry = existingEntries != null ? existingEntries.get(0)
                            : metadata.create(TimeEntry.class);
                    timeEntry.setUser(userSession.getCurrentOrSubstitutedUser());
                    timeEntry.setTask(weeklyReportEntry.getTask());
                    timeEntry.setTimeInMinutes(hoursAndMinutes.toMinutes());
                    if (timeEntry.getActivityType() == null) {
                        timeEntry.setActivityType(weeklyReportEntry.getActivityType());
                    }

                    if (CollectionUtils.isNotEmpty(timeEntry.getTags())) {
                        HashSet<Tag> tags = new HashSet<>(timeEntry.getTags());
                        tags.addAll(defaultTags);
                        timeEntry.setTags(tags);
                    } else {
                        timeEntry.setTags(defaultTags);
                    }
                    timeEntry.setDate(
                            DateTimeUtils.getSpecificDayOfWeek(firstDayOfWeek, day.getJavaCalendarDay()));

                    ResultAndCause validationResult = validationTools.validateTags(timeEntry);
                    if (validationResult.isNegative) {
                        validationAlerts.add(formatMessage("notification.timeEntryValidation",
                                validationResult.cause, timeEntry.getTask().getName(), timeEntry.getDate(),
                                HoursAndMinutes.fromTimeEntry(timeEntry)));
                    }

                    commitContext.getCommitInstances().add(timeEntry);
                }
            }
        }

        getDsContext().getDataSupplier().commit(commitContext);
        updateWeek();

        if (validationAlerts.size() > 0) {
            showMessageDialog(getMessage("caption.attention"), StringUtils.join(validationAlerts, "<br/>"),
                    MessageType.WARNING_HTML);
        }
    }
}