Example usage for java.util HashSet addAll

List of usage examples for java.util HashSet addAll

Introduction

In this page you can find the example usage for java.util HashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:org.apache.nutch.crawl.DbUpdaterAllJob.java

public Map<String, Object> run(Map<String, Object> args) throws Exception {
    String crawlId = (String) args.get(Nutch.ARG_CRAWL);
    numJobs = 1;/* ww w  .  j  ava 2 s  . co  m*/
    currentJobNum = 0;
    currentJob = new NutchJob(getConf(), "updateAll-table:" + (this.getConf().get(NutchConstant.BATCH_ID_KEY)));
    if (crawlId != null) {
        currentJob.getConfiguration().set(Nutch.CRAWL_ID_KEY, crawlId);
    }
    // job.setBoolean(ALL, updateAll);
    ScoringFilters scoringFilters = new ScoringFilters(getConf());
    HashSet<WebPage.Field> fields = new HashSet<WebPage.Field>(FIELDS);
    fields.addAll(scoringFilters.getFields());

    // Partition by {url}, sort by {url,score} and group by {url}.
    // This ensures that the inlinks are sorted by score when they enter
    // the reducer.
    currentJob.getConfiguration().set(NutchConstant.STEPZKBATCHTIME, new Date().toLocaleString());
    currentJob.setPartitionerClass(UrlOnlyPartitioner.class);
    currentJob.setSortComparatorClass(UrlScoreComparator.class);
    currentJob.setGroupingComparatorClass(UrlOnlyComparator.class);
    String batchZKId = this.getConf().get(NutchConstant.BATCH_ID_KEY);
    NutchConstant.setUrlConfig(currentJob.getConfiguration(), 2);
    LOG.info("DbUpdaterAllJob: batchId: " + batchZKId + " batchTime:"
            + NutchConstant.getBatchTime(currentJob.getConfiguration()));
    LOG.info("DbUpdaterAllJob  batchId: " + batchZKId);
    StorageUtils.initMapperJob(currentJob, fields, WebPage.class, UrlWithScore.class, NutchWritable.class,
            DbUpdateAllMapper.class);
    StorageUtils.initReducerJob(currentJob, WebPage.class, DbUpdateAllReducer.class);
    currentJob.waitForCompletion(true);
    ToolUtil.recordJobStatus(null, currentJob, results);
    return results;
}

From source file:edu.umn.msi.tropix.proteomics.conversion.impl.MzXMLToDTAConverterStreamingImplTest.java

private void testStructure(final MzXMLToDTAOptions options) throws Exception {
    final MzXMLToDTAConverterStreamingImpl converter = new MzXMLToDTAConverterStreamingImpl();
    DTAList dtaList;/*from  w w w .ja v  a 2s.c o  m*/
    InputStream mzxmlStream;
    mzxmlStream = ProteomicsTests.getResourceAsStream("validMzXML.mzxml");
    try {
        dtaList = converter.mzxmlToDTA(mzxmlStream, options);
        VerifyUtils.verifyDTAList(dtaList);
    } finally {
        IO_UTILS.closeQuietly(mzxmlStream);
    }

    mzxmlStream = ProteomicsTests.getResourceAsStream("validMzXML.mzxml");
    try {
        dtaList = converter.mzxmlToDTA(mzxmlStream, options);
        final HashSet<String> originalNames = new HashSet<String>();
        originalNames.addAll(Arrays.asList(new String[] { "mrr.103.106.1.dta", "mrr.1105.1106.2.dta",
                "mrr.1105.1106.3.dta", "mrr.2025.2026.2.dta", "mrr.2025.2026.3.dta", "mrr.3009.3011.1.dta" }));

        final HashSet<String> mzxmlNames = new HashSet<String>();
        for (final DTAList.Entry entry : dtaList) {
            mzxmlNames.add(entry.getName());
        }
        assert mzxmlNames.equals(originalNames);
    } finally {
        IO_UTILS.closeQuietly(mzxmlStream);
    }

}

From source file:org.hibersap.mapping.model.BapiMapping.java

public Set<ParameterMapping> getAllParameters() {
    HashSet<ParameterMapping> parameters = new HashSet<ParameterMapping>();
    parameters.addAll(importParams);
    parameters.addAll(exportParams);//  www.j a  v  a 2  s . c o  m
    parameters.addAll(tableParams);
    return parameters;
}

From source file:com.salesmanager.core.service.tax.impl.dao.TaxRateDescriptionDao.java

public Set<TaxRateDescription> findByTaxRateId(long id) {
    try {//from   w w  w  . j  ava 2s  .  co  m
        List descriptions = super.getSession().createCriteria(TaxRateDescription.class)
                .add(Restrictions.eq("id.taxRateId", id)).list();
        HashSet set = new HashSet();
        set.addAll(descriptions);
        return set;
    } catch (RuntimeException re) {
        log.error("get failed", re);
        throw re;
    }
}

From source file:com.esri.geoevent.test.performance.report.AbstractFileRollOverReportWriter.java

@Override
public List<String> getReportColumnNames(List<String> reportColumns, List<String> additionalReportColumns) {
    List<String> columns = getDefaultColumnNames();
    if (reportColumns != null && reportColumns.size() > 0) {
        columns = reportColumns;//ww  w.  ja v a  2  s .c o m
    }

    if (additionalReportColumns != null && additionalReportColumns.size() > 0) {
        HashSet<String> uniqueColumns = new HashSet<String>(columns);
        uniqueColumns.addAll(additionalReportColumns);
        columns = new ArrayList<String>(uniqueColumns);
    }

    return columns;
}

From source file:edu.anu.spice.SemanticConcept.java

public float similarity(Object o) {
    if (o == null) {
        return 0;
    }// w  w  w. ja va  2  s  . c o m
    if (!(o instanceof SemanticConcept)) {
        return 0;
    }
    SemanticConcept otherConcept = (SemanticConcept) o;
    HashSet<String> concept_intersection = new HashSet<String>(this.concepts);
    concept_intersection.retainAll(otherConcept.concepts);
    if (!concept_intersection.isEmpty()) {
        return 1;
    }
    HashSet<Integer> synset_intersection = new HashSet<Integer>(this.synsets);
    synset_intersection.retainAll(otherConcept.synsets);
    HashSet<Integer> synset_union = new HashSet<Integer>(this.synsets);
    synset_union.addAll(otherConcept.synsets);
    return ((float) synset_intersection.size()) / ((float) synset_union.size());
}

From source file:org.apache.nutch.crawl.DbUpdaterJob.java

public Map<String, Object> run(Map<String, Object> args) throws Exception {
    String crawlId = (String) args.get(Nutch.ARG_CRAWL);
    numJobs = 1;//from ww  w  .  j  av  a2s  . com
    Integer numTasks = (Integer) args.get(Nutch.ARG_NUMTASKS);
    currentJobNum = 0;
    String gids = NutchConstant.getGids(getConf(), "all");
    currentJob = new NutchJob(getConf(),
            "[" + (this.getConf().get(NutchConstant.BATCH_ID_KEY)) + "]updateTable[" + gids + "]");
    if (crawlId != null) {
        currentJob.getConfiguration().set(Nutch.CRAWL_ID_KEY, crawlId);
    }
    // job.setBoolean(ALL, updateAll);
    ScoringFilters scoringFilters = new ScoringFilters(getConf());
    HashSet<WebPage.Field> fields = new HashSet<WebPage.Field>(FIELDS);
    fields.addAll(scoringFilters.getFields());

    // Partition by {url}, sort by {url,score} and group by {url}.
    // This ensures that the inlinks are sorted by score when they enter
    // the reducer.

    currentJob.setPartitionerClass(UrlOnlyPartitioner.class);
    currentJob.setSortComparatorClass(UrlScoreComparator.class);
    currentJob.setGroupingComparatorClass(UrlOnlyComparator.class);
    if (numTasks == null || numTasks < 1) {
        currentJob.setNumReduceTasks(
                currentJob.getConfiguration().getInt("mapred.reduce.tasks", currentJob.getNumReduceTasks()));
    } else {
        currentJob.setNumReduceTasks(numTasks);
    }

    String batchZKId = this.getConf().get(NutchConstant.BATCH_ID_KEY);
    NutchConstant.setUrlConfig(currentJob.getConfiguration(), 2);
    if (NutchConstant.preparStartJob(currentJob.getConfiguration(), NutchConstant.BatchNode.dbUpdateNode,
            NutchConstant.BatchNode.parseNode, LOG, false) == 0)
        return null;
    LOG.info("DbUpdaterJob: batchId: " + batchZKId + " batchTime:"
            + NutchConstant.getBatchTime(currentJob.getConfiguration()));
    StorageUtils.initMapperJob(currentJob, fields, WebPageIndex.class, UrlWithScore.class, NutchWritable.class,
            DbUpdateMapper.class);
    StorageUtils.initReducerJob(currentJob, WebPage.class, DbUpdateReducer.class);
    currentJob.waitForCompletion(true);
    NutchConstant.preparEndJob(currentJob.getConfiguration(), NutchConstant.BatchNode.dbUpdateNode, LOG, false);
    ToolUtil.recordJobStatus(null, currentJob, results);
    return results;
}

From source file:com.linuxbox.enkive.permissions.SpringContextPermissionService.java

@Override
public boolean canReadMessage(String userId, MessageSummary message) throws CannotGetPermissionsException {
    if (isAdmin()) {
        return true;
    }/* w  w  w  . jav a  2s  .c  om*/

    Collection<String> canReadAddresses = canReadAddresses(userId);

    HashSet<String> originalMessageAddresses = new HashSet<String>();
    originalMessageAddresses.addAll(message.getTo());
    originalMessageAddresses.addAll(message.getCc());
    originalMessageAddresses.addAll(message.getBcc());
    originalMessageAddresses.addAll(message.getFrom());
    originalMessageAddresses.add(message.getMailFrom());
    originalMessageAddresses.addAll(message.getRcptTo());

    HashSet<String> normalizedMessageAddresses = new HashSet<String>(originalMessageAddresses.size());
    com.linuxbox.util.CollectionUtils.addAllMapped(normalizedMessageAddresses, originalMessageAddresses,
            emailAddressNormalizer);

    return CollectionUtils.containsAny(normalizedMessageAddresses, canReadAddresses);
}

From source file:com.cyclopsgroup.waterview.servlet.ServletRequestParameters.java

/**
 * Override method doGetAttributeNames in class ServletRequestValueParser
 *
 * @see com.cyclopsgroup.waterview.Attributes#doGetAttributeNames()
 *///from w  ww. j a va2  s. co  m
protected String[] doGetAttributeNames() {
    HashSet names = new HashSet();
    CollectionUtils.addAll(names, httpServletRequest.getParameterNames());
    names.addAll(extra.keySet());
    return (String[]) names.toArray(ArrayUtils.EMPTY_STRING_ARRAY);
}

From source file:de.huberlin.wbi.cfjava.data.Amap.java

public Set<K> keys() {

    HashSet<K> s;

    s = new HashSet<>();
    s.addAll(content.keySet());

    return s;/*from w  w w  . ja  v  a  2s.co  m*/
}