Example usage for java.util HashSet addAll

List of usage examples for java.util HashSet addAll

Introduction

In this page you can find the example usage for java.util HashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:com.data2semantics.yasgui.mgwtlinker.linker.PermutationMapLinker.java

protected String buildPermXml(TreeLogger logger, PermutationArtifact permutationArtifact,
        Set<String> gwtCompiledFiles, List<String> otherResources) throws UnableToCompleteException {
    HashSet<String> namesForPermXml = new HashSet<String>(gwtCompiledFiles);
    namesForPermXml.addAll(otherResources);

    try {//from   w w w  .ja va  2s.  com
        return xmlPermutationProvider.writePermutationInformation(permutationArtifact.getPermutationName(),
                permutationArtifact.getBindingProperties(), namesForPermXml);
    } catch (XMLPermutationProviderException e) {
        logger.log(Type.ERROR, "can not build xml for permutation file", e);
        throw new UnableToCompleteException();
    }

}

From source file:org.apache.directory.studio.schemaeditor.model.DependenciesComputer.java

/**
 * Gets the dependencies of the given schema.
 *
 * @param schema/*from  w  ww  .ja v  a 2  s. c  o  m*/
 *      the schema
 * @return
 *      the dependencies of the schema
 */
@SuppressWarnings("unchecked")
public List<Schema> getDependencies(Schema schema) {
    List<Schema> dependencies = (List<Schema>) schemasDependencies.get(schema);

    HashSet<Schema> set = new HashSet<Schema>();

    if (dependencies != null) {
        set.addAll(dependencies);
    }

    return Arrays.asList(set.toArray(new Schema[0]));
}

From source file:com.aliyun.odps.graph.local.worker.Worker.java

@SuppressWarnings("unchecked")
public void processWorkerMutations(VertexResolver vertexResolver) throws IOException {
    HashSet<VERTEX_ID> mutationIDs = new HashSet<VERTEX_ID>();
    mutationIDs.addAll(mVertexMutations.keySet());

    for (WritableComparable<?> id : mMsgManager.getVertexIDList()) {
        if (vertices.get(id) == null) {
            mutationIDs.add((VERTEX_ID) id);
        }//from   ww  w .  j a v  a2 s.  com
    }

    for (VERTEX_ID id : mutationIDs) {
        processMutation(id, mVertexMutations.get(id), vertexResolver);
    }
    mVertexMutations = new HashMap<VERTEX_ID, LocalVertexMutations>();
}

From source file:edu.cornell.mannlib.vitro.webapp.utils.dataGetter.ExecuteDataRetrieval.java

public List<DataGetter> retrieveDataGetters() {
    //Using a hashset to prevent duplicates
    //Would this work with interfaces? In this case, all of them would be interfaces?
    HashSet<DataGetter> dataGetters = new HashSet<DataGetter>();
    List<VClass> vclasses = this.individual.getVClasses();
    //For any of the vclasses that apply to this individual, check whether
    //there are any datagetter assigned for that class
    try {// w w w. j a  va2  s .com
        for (VClass v : vclasses) {
            String classURI = v.getURI();
            //How to handle duplicates?
            dataGetters.addAll(DataGetterUtils.getDataGettersForClass(vreq, displayModel, classURI));
        }
    } catch (Exception ex) {
        log.error("Error occurred in retrieving datagetters for vclasses", ex);
    }
    List<DataGetter> dgList = new ArrayList<DataGetter>(dataGetters);
    return dgList;
}

From source file:edu.internet2.middleware.shibboleth.common.attribute.provider.ShibbolethSAML1AttributeAuthority.java

/** {@inheritDoc} */
public Map<String, BaseAttribute> getAttributes(
        SAMLProfileRequestContext<? extends SAMLObject, ? extends ResponseAbstractType, NameIdentifier, ? extends AbstractSAML1ProfileConfiguration> requestContext)
        throws AttributeRequestException {
    HashSet<String> requestedAttributes = new HashSet<String>();

    // get attributes from the message
    Set<String> queryAttributeIds = getAttributeIds(requestContext.getInboundSAMLMessage());
    requestedAttributes.addAll(queryAttributeIds);

    // get attributes from metadata
    Set<String> metadataAttributeIds = getAttribtueIds(requestContext.getPeerEntityMetadata());
    requestedAttributes.addAll(metadataAttributeIds);

    requestContext.setRequestedAttributes(requestedAttributes);

    Map<String, BaseAttribute> attributes = attributeResolver.resolveAttributes(requestContext);

    if (filteringEngine != null) {
        attributes = filteringEngine.filterAttributes(attributes, requestContext);
    }/*from  w  w w .  ja  v a2  s  . c om*/

    return attributes;
}

From source file:org.apache.hadoop.corona.ClusterNode.java

public Set<GrantId> getGrants() {
    HashSet<GrantId> ret = new HashSet<GrantId>();
    ret.addAll(grants.keySet());
    return (ret);
}

From source file:org.apache.samza.execution.JobNodeConfigurationGenerator.java

/**
 * Serializes the {@link Serde} instances for operators, adds them to the provided config, and
 * sets the serde configuration for the input/output/intermediate streams appropriately.
 *
 * We try to preserve the number of Serde instances before and after serialization. However we don't
 * guarantee that references shared between these serdes instances (e.g. an Jackson ObjectMapper shared
 * between two json serdes) are shared after deserialization too.
 *
 * Ideally all the user defined objects in the application should be serialized and de-serialized in one pass
 * from the same output/input stream so that we can maintain reference sharing relationships.
 *
 * @param configs the configs to add serialized serde instances and stream serde configs to
 *///w  w  w. ja va2 s .com
private void configureSerdes(Map<String, String> configs, Map<String, StreamEdge> inEdges,
        Map<String, StreamEdge> outEdges, List<StoreDescriptor> stores, Collection<String> tables,
        JobNode jobNode) {
    // collect all key and msg serde instances for streams
    Map<String, Serde> streamKeySerdes = new HashMap<>();
    Map<String, Serde> streamMsgSerdes = new HashMap<>();
    inEdges.keySet().forEach(streamId -> addSerdes(jobNode.getInputSerdes(streamId), streamId, streamKeySerdes,
            streamMsgSerdes));
    outEdges.keySet().forEach(streamId -> addSerdes(jobNode.getOutputSerde(streamId), streamId, streamKeySerdes,
            streamMsgSerdes));

    Map<String, Serde> storeKeySerdes = new HashMap<>();
    Map<String, Serde> storeMsgSerdes = new HashMap<>();
    stores.forEach(storeDescriptor -> {
        storeKeySerdes.put(storeDescriptor.getStoreName(), storeDescriptor.getKeySerde());
        storeMsgSerdes.put(storeDescriptor.getStoreName(), storeDescriptor.getMsgSerde());
    });

    Map<String, Serde> tableKeySerdes = new HashMap<>();
    Map<String, Serde> tableMsgSerdes = new HashMap<>();
    tables.forEach(tableId -> {
        addSerdes(jobNode.getTableSerdes(tableId), tableId, tableKeySerdes, tableMsgSerdes);
    });

    // for each unique stream or store serde instance, generate a unique name and serialize to config
    HashSet<Serde> serdes = new HashSet<>(streamKeySerdes.values());
    serdes.addAll(streamMsgSerdes.values());
    serdes.addAll(storeKeySerdes.values());
    serdes.addAll(storeMsgSerdes.values());
    serdes.addAll(tableKeySerdes.values());
    serdes.addAll(tableMsgSerdes.values());
    SerializableSerde<Serde> serializableSerde = new SerializableSerde<>();
    Base64.Encoder base64Encoder = Base64.getEncoder();
    Map<Serde, String> serdeUUIDs = new HashMap<>();
    serdes.forEach(serde -> {
        String serdeName = serdeUUIDs.computeIfAbsent(serde,
                s -> serde.getClass().getSimpleName() + "-" + UUID.randomUUID().toString());
        configs.putIfAbsent(String.format(SerializerConfig.SERDE_SERIALIZED_INSTANCE(), serdeName),
                base64Encoder.encodeToString(serializableSerde.toBytes(serde)));
    });

    // set key and msg serdes for streams to the serde names generated above
    streamKeySerdes.forEach((streamId, serde) -> {
        String streamIdPrefix = String.format(StreamConfig.STREAM_ID_PREFIX(), streamId);
        String keySerdeConfigKey = streamIdPrefix + StreamConfig.KEY_SERDE();
        configs.put(keySerdeConfigKey, serdeUUIDs.get(serde));
    });

    streamMsgSerdes.forEach((streamId, serde) -> {
        String streamIdPrefix = String.format(StreamConfig.STREAM_ID_PREFIX(), streamId);
        String valueSerdeConfigKey = streamIdPrefix + StreamConfig.MSG_SERDE();
        configs.put(valueSerdeConfigKey, serdeUUIDs.get(serde));
    });

    // set key and msg serdes for stores to the serde names generated above
    storeKeySerdes.forEach((storeName, serde) -> {
        String keySerdeConfigKey = String.format(StorageConfig.KEY_SERDE, storeName);
        configs.put(keySerdeConfigKey, serdeUUIDs.get(serde));
    });

    storeMsgSerdes.forEach((storeName, serde) -> {
        String msgSerdeConfigKey = String.format(StorageConfig.MSG_SERDE, storeName);
        configs.put(msgSerdeConfigKey, serdeUUIDs.get(serde));
    });

    // set key and msg serdes for stores to the serde names generated above
    tableKeySerdes.forEach((tableId, serde) -> {
        String keySerdeConfigKey = String.format(JavaTableConfig.STORE_KEY_SERDE, tableId);
        configs.put(keySerdeConfigKey, serdeUUIDs.get(serde));
    });

    tableMsgSerdes.forEach((tableId, serde) -> {
        String valueSerdeConfigKey = String.format(JavaTableConfig.STORE_MSG_SERDE, tableId);
        configs.put(valueSerdeConfigKey, serdeUUIDs.get(serde));
    });
}

From source file:com.predic8.membrane.core.cloud.etcd.EtcdBasedConfigurator.java

private void cleanUpNotRunningNodes(HashSet<EtcdNodeInformation> newRunningNodes) {
    HashSet<EtcdNodeInformation> currentlyRunningNodes = new HashSet<EtcdNodeInformation>();
    for (String module : runningNodesForModule.keySet()) {
        currentlyRunningNodes.addAll(runningNodesForModule.get(module));
    }/*from   ww  w.  j  av a  2  s.c om*/
    for (EtcdNodeInformation node : newRunningNodes) {
        currentlyRunningNodes.remove(node);
    }
    for (EtcdNodeInformation node : currentlyRunningNodes) {
        shutdownRunningClusterNode(node);
    }

    HashSet<String> modules = new HashSet<String>();
    for (String module : runningNodesForModule.keySet()) {
        modules.add(module);
    }
    for (String module : modules) {
        if (runningNodesForModule.get(module).size() == 0) {
            runningNodesForModule.remove(module);
            shutDownRunningModuleServiceProxy(module);
        }
    }
}

From source file:org.jactr.io.antlr3.parser.AbstractModelParser.java

synchronized public Collection<ITreeTracker> getTreeTrackers() {
    if (_treeTrackers == null)
        return Collections.EMPTY_LIST;

    HashSet<ITreeTracker> trackers = new HashSet<ITreeTracker>();
    for (Collection<ITreeTracker> tracker : _treeTrackers.values())
        trackers.addAll(tracker);

    return trackers;
}

From source file:de.ks.flatadocdb.metamodel.EntityDescriptor.java

public EntityDescriptor(Builder b) {
    this.entityClass = b.entityClass;
    this.persister = b.persister;
    this.idGetterAccess = b.idGetterAccess;
    this.idSetterAccess = b.idSetterAccess;
    this.pathInRepoGetterAccess = b.pathInRepoGetterAccess;
    this.pathInRepoSetterAccess = b.pathInRepoSetterAccess;
    this.naturalIdFieldAccess = b.naturalIdFieldAccess;
    this.versionGetterAccess = b.versionGetterAccess;
    this.versionSetterAccess = b.versionSetterAccess;
    this.lifecycleMethods = Collections.unmodifiableMap(b.lifecycleMethods);
    this.propertyPersisters = Collections.unmodifiableMap(b.propertyPersisters);
    this.toOneRelations = Collections.unmodifiableSet(b.toOneRelations);
    this.toManyRelations = Collections.unmodifiableSet(b.toManyRelations);
    this.toOneChildRelations = Collections.unmodifiableSet(b.toOneChildRelations);
    this.toManyChildRelations = Collections.unmodifiableSet(b.toManyChildRelations);
    this.folderGenerator = b.folderGenerator;
    this.fileGenerator = b.fileGenerator;
    this.luceneExtractor = b.extractor;
    this.queries = Collections.unmodifiableSet(b.queries);

    HashSet<Relation> allRels = new HashSet<>();
    allRels.addAll(toManyChildRelations);
    allRels.addAll(toManyRelations);//from  w w  w  .  java  2 s  .  c om
    allRels.addAll(toOneChildRelations);
    allRels.addAll(toOneRelations);
    this.allRelations = Collections.unmodifiableSet(allRels);

    HashSet<Relation> childRelations = new HashSet<>();
    childRelations.addAll(toManyChildRelations);
    childRelations.addAll(toOneChildRelations);
    this.childRelations = Collections.unmodifiableSet(childRelations);

    HashSet<Relation> normalRelations = new HashSet<>();
    normalRelations.addAll(toManyRelations);
    normalRelations.addAll(toOneRelations);
    this.normalRelations = Collections.unmodifiableSet(normalRelations);
}