Example usage for java.util HashSet addAll

List of usage examples for java.util HashSet addAll

Introduction

In this page you can find the example usage for java.util HashSet addAll.

Prototype

boolean addAll(Collection<? extends E> c);

Source Link

Document

Adds all of the elements in the specified collection to this set if they're not already present (optional operation).

Usage

From source file:org.apache.zeppelin.rest.NotebookRestApi.java

/**
 * set note authorization information/*w  w  w  . jav  a2  s . co m*/
 */
@PUT
@Path("{noteId}/permissions")
@ZeppelinApi
public Response putNotePermissions(@PathParam("noteId") String noteId, String req) throws IOException {
    String principal = SecurityUtils.getPrincipal();
    HashSet<String> roles = SecurityUtils.getRoles();
    HashSet<String> userAndRoles = new HashSet<>();
    userAndRoles.add(principal);
    userAndRoles.addAll(roles);

    checkIfUserIsAnon(getBlockNotAuthenticatedUserErrorMsg());
    checkIfUserIsOwner(noteId, ownerPermissionError(userAndRoles, notebookAuthorization.getOwners(noteId)));

    HashMap<String, HashSet<String>> permMap = gson.fromJson(req,
            new TypeToken<HashMap<String, HashSet<String>>>() {
            }.getType());
    Note note = notebook.getNote(noteId);

    LOG.info("Set permissions {} {} {} {} {}", noteId, principal, permMap.get("owners"), permMap.get("readers"),
            permMap.get("writers"));

    HashSet<String> readers = permMap.get("readers");
    HashSet<String> owners = permMap.get("owners");
    HashSet<String> writers = permMap.get("writers");
    // Set readers, if writers and owners is empty -> set to user requesting the change
    if (readers != null && !readers.isEmpty()) {
        if (writers.isEmpty()) {
            writers = Sets.newHashSet(SecurityUtils.getPrincipal());
        }
        if (owners.isEmpty()) {
            owners = Sets.newHashSet(SecurityUtils.getPrincipal());
        }
    }
    // Set writers, if owners is empty -> set to user requesting the change
    if (writers != null && !writers.isEmpty()) {
        if (owners.isEmpty()) {
            owners = Sets.newHashSet(SecurityUtils.getPrincipal());
        }
    }

    notebookAuthorization.setReaders(noteId, readers);
    notebookAuthorization.setWriters(noteId, writers);
    notebookAuthorization.setOwners(noteId, owners);
    LOG.debug("After set permissions {} {} {}", notebookAuthorization.getOwners(noteId),
            notebookAuthorization.getReaders(noteId), notebookAuthorization.getWriters(noteId));
    AuthenticationInfo subject = new AuthenticationInfo(SecurityUtils.getPrincipal());
    note.persist(subject);
    notebookServer.broadcastNote(note);
    notebookServer.broadcastNoteList(subject, userAndRoles);
    return new JsonResponse<>(Status.OK).build();
}

From source file:uk.ac.soton.itinnovation.sad.service.adapters.ProvEMClient.java

/**
 * Defines SAD metric model//from www .  j  a v a2  s  .c om
 */
private HashSet<MetricGenerator> createMetricModel(Experiment experiment) {

    measurementSetMap.clear(); // This map will be useful later for reporting measurement summaries

    //        MetricHelper.
    theMetricGenerator = new MetricGenerator();
    theMetricGenerator.setName("PROV Metric Generator");
    theMetricGenerator.setDescription("Metric generator for AVCC Sim");

    experiment.addMetricGenerator(theMetricGenerator);

    MetricGroup theMetricGroup = new MetricGroup();
    theMetricGroup.setName("PROV Metric Group");
    theMetricGroup.setDescription("Metric group for all AVCC Sim metircs");
    theMetricGroup.setMetricGeneratorUUID(theMetricGenerator.getUUID());
    theMetricGenerator.addMetricGroup(theMetricGroup);

    Entity theEntity = new Entity();
    theEntity.setName("User actions");
    theEntity.setDescription("Entity for PROV user actions metrics");
    theMetricGenerator.addEntity(theEntity);

    Attribute actionsTaken = new Attribute();
    actionsTaken.setName("Actions taken");
    actionsTaken.setDescription("Names of actions by user at sample video page");
    actionsTaken.setEntityUUID(theEntity.getUUID());
    theEntity.addAttribute(actionsTaken);

    actionsTakenUuid = setupMeasurementForAttribute(actionsTaken, theMetricGroup, MetricType.NOMINAL,
            new Unit(""));

    Attribute videoQuality = new Attribute();
    videoQuality.setName("Video Quality");
    videoQuality.setDescription("Video resolution of the video player selected by user");
    videoQuality.setEntityUUID(theEntity.getUUID());
    theEntity.addAttribute(videoQuality);

    videoQualityUuid = setupMeasurementForAttribute(videoQuality, theMetricGroup, MetricType.NOMINAL,
            new Unit(""));

    metricGenerators.put(theMetricGenerator.getUUID(), theMetricGenerator);

    HashSet<MetricGenerator> mgSet = new HashSet<>();
    mgSet.addAll(metricGenerators.values());

    logger.debug("Reporting the following metric generator set to ECC: ");
    int counter = 0;
    for (MetricGenerator tempMg : mgSet) {
        printMetricGenerator(tempMg, counter);
        counter++;
    }

    metricsModelSetup = true;

    return mgSet;
}

From source file:com.eTilbudsavis.etasdk.ListManager.java

private boolean editList(Shoppinglist sl, User user) {

    DbHelper db = DbHelper.getInstance();

    Map<String, Share> dbShares = new HashMap<String, Share>();
    for (Share s : db.getShares(sl, user, false)) {
        dbShares.put(s.getEmail(), s);/* w w w . ja v a 2  s  .c o m*/
    }

    Map<String, Share> slShares = sl.getShares();

    /* User have remove it self. Then only set the DELETE state on the share,
     * ListSyncManager will delete from DB Once it's synced the changes to API
     */
    if (!slShares.containsKey(user.getEmail())) {
        Share dbShare = dbShares.get(user.getEmail());
        dbShare.setState(SyncState.DELETE);
        db.editShare(dbShare, user);
        mNotification.del(sl);
        sendNotification(mNotification);
        return true;
    }

    if (!canEdit(sl, slShares.get(user.getEmail()))) {
        EtaLog.i(TAG, String.format("User [%s], doesn't have rights to edit this list", user.getEmail()));
        return false;
    }

    HashSet<String> union = new HashSet<String>();
    union.addAll(slShares.keySet());
    union.addAll(dbShares.keySet());

    /* Variable for owner. If it has been removed from the sl-shares-list
     * then we need to re-add it from the DB
     */
    Share owner = null;

    for (String shareId : union) {

        if (dbShares.containsKey(shareId)) {

            Share dbShare = dbShares.get(shareId);

            if (slShares.containsKey(shareId)) {
                Share slShare = slShares.get(shareId);

                if (!dbShare.equals(slShare)) {
                    slShare.setState(SyncState.TO_SYNC);
                    db.editShare(slShare, user);
                    mNotification.edit(sl);
                }

            } else {
                if (dbShare.getAccess().equals(Share.ACCESS_OWNER)) {
                    owner = dbShare;
                    EtaLog.i(TAG, "Owner cannot be removed from lists, owner will be reattached");
                } else {
                    if (user.isLoggedIn()) {
                        dbShare.setState(SyncState.DELETE);
                        db.editShare(dbShare, user);
                    } else {
                        db.deleteShare(dbShare, user);
                    }
                    mNotification.edit(sl);
                }
            }

        } else {
            Share slShare = slShares.get(shareId);
            db.insertShare(slShare, user);
            mNotification.edit(sl);
        }

    }

    // If owner was removed, then re-insert it.
    if (owner != null) {
        sl.putShare(owner);
    }

    Date now = new Date();

    sl.setModified(now);
    sl.setState(SyncState.TO_SYNC);

    // Check for changes in previous item, and update surrounding
    Shoppinglist oldList = db.getList(sl.getId(), user);
    if (oldList == null) {
        EtaLog.i(TAG, "No such list exists in the database. To add new items, use addList().");
        return false;
    }

    if (oldList.getPreviousId() != null && !oldList.getPreviousId().equals(sl.getPreviousId())) {

        // If there is an item pointing at sl, it needs to point at the oldList.prev
        Shoppinglist slAfter = db.getListPrevious(sl.getId(), user);
        if (slAfter != null) {
            slAfter.setPreviousId(oldList.getPreviousId());
            slAfter.setModified(now);
            slAfter.setState(SyncState.TO_SYNC);
            db.editList(slAfter, user);
            mNotification.edit(slAfter);
        }

        // If some another sl was pointing at the same item, it should be pointing at sl
        Shoppinglist slSamePointer = db.getListPrevious(sl.getPreviousId(), user);
        if (slSamePointer != null) {
            slSamePointer.setPreviousId(sl.getId());
            slSamePointer.setModified(now);
            slSamePointer.setState(SyncState.TO_SYNC);
            db.editList(slSamePointer, user);
            mNotification.edit(slSamePointer);
        }

    }

    int count = db.editList(sl, user);
    boolean success = count == 1;
    if (success) {
        mNotification.edit(sl);
    }
    sendNotification(mNotification);
    return success;
}

From source file:com.vmware.bdd.manager.ClusterManager.java

private static HashSet<String> getExtraRequiredPackages() {
    HashSet<String> hs = new HashSet<String>();
    String extraPackStr = Configuration
            .getString(Constants.SERENGETI_YUM_EXTRA_PACKAGES_CONFIG, Constants.SERENGETI_YUM_EXTRA_PACKAGES)
            .trim();/* ww w.  j  a va  2  s  . co m*/
    if (!extraPackStr.isEmpty()) {
        String[] packs = extraPackStr.split(",");
        hs.addAll(Arrays.asList(packs));
    }
    return hs;
}

From source file:hms.hwestra.interactionrebuttal.InteractionRebuttal.java

public void prepareDataForCelltypeSpecificEQTLMapping(DoubleMatrixDataset<String, String> rawExpressionDataset,
        String inexpraw, String outdirectory, Double correlationThreshold, String celltypeSpecificProbeFile,
        String mdsComponentFile, String cellCountFile, String gte, Integer threads) throws IOException {
    String rawExpressionDataFile = inexpraw;
    // 7. select Cell type specific probes
    System.out.println("Loading list of cell type specific probes from: " + celltypeSpecificProbeFile);
    HashSet<String> cellTypeSpecificProbeSet = new HashSet<String>();
    TextFile cellSpecificProbeTF = new TextFile(celltypeSpecificProbeFile, TextFile.R);
    cellTypeSpecificProbeSet.addAll(cellSpecificProbeTF.readAsArrayList());
    cellSpecificProbeTF.close();//from   w  w w . j ava2s .  c o  m

    if (cellTypeSpecificProbeSet.isEmpty()) {
        System.err.println("Error: " + celltypeSpecificProbeFile + " is empty!");
        System.exit(-1);
    } else {
        System.out.println(cellTypeSpecificProbeSet.size() + " cell type specific probes loaded.");
    }

    // 1. load gene expression data
    System.out.println("Loading gene expression data.");

    double[][] rawExpressionData = rawExpressionDataset.getRawData();

    // determine the number of cell type specific probes in this dataset
    int probeCounter = 0;
    List<String> probes = rawExpressionDataset.rowObjects;
    for (int i = 0; i < probes.size(); i++) {
        if (cellTypeSpecificProbeSet.contains(probes.get(i))) {
            probeCounter++;
        }
    }

    if (probeCounter == 0) {
        System.err
                .println("Error: none of the cell type specific probes defined in " + celltypeSpecificProbeFile
                        + " are present in expression dataset: " + rawExpressionDataset.fileName);
        System.exit(-1);
    } else {
        System.out.println(probeCounter + " of the cell type specific probes are in your dataset.");
    }

    System.out.println("Now reloading the gene expression data for the samples that passed the QC.");
    // 6. Remove samples with r < 0.9 for PC1
    // reload expression file, include only samples that pass QC...
    //        rawExpressionDataset = new DoubleMatrixDataset<String, String>(rawExpressionDataFile);
    //        rawExpressionData = rawExpressionDataset.getRawData();

    //        // quantile normalize, log2 transform again, because the number of samples might have been changed..
    //        QuantileNormalization.quantilenormalize(rawExpressionData);
    //        Log2Transform.log2transform(rawExpressionData);
    rawExpressionData = rawExpressionDataset.rawData;

    // collect data for cell type specific probes
    double[][] probeData = new double[probeCounter][rawExpressionDataset.colObjects.size()];
    probeCounter = 0;
    ArrayList<String> cellTypeSpecificProbeDatasetRowNames = new ArrayList<String>();
    for (int i = 0; i < probes.size(); i++) {
        if (cellTypeSpecificProbeSet.contains(probes.get(i))) {
            probeData[probeCounter] = rawExpressionData[i];
            cellTypeSpecificProbeDatasetRowNames.add(probes.get(i));
            probeCounter++;
        }
    }

    // initiate cell type specific probe correlation matrix
    double[][] celltypeSpecificCorrelationMatrix = new double[probeCounter][probeCounter];
    for (int i = 0; i < probeCounter; i++) {
        for (int j = i + 1; j < probeCounter; j++) {
            double r = Correlation.correlate(probeData[i], probeData[j]);
            celltypeSpecificCorrelationMatrix[i][j] = r;
            celltypeSpecificCorrelationMatrix[j][i] = r;
        }
        celltypeSpecificCorrelationMatrix[i][i] = 1;
    }

    // save the correlation matrix
    DoubleMatrixDataset<String, String> probeCorrelationMatrixOut = new DoubleMatrixDataset<String, String>();
    probeCorrelationMatrixOut.colObjects = cellTypeSpecificProbeDatasetRowNames;
    probeCorrelationMatrixOut.rowObjects = cellTypeSpecificProbeDatasetRowNames;
    probeCorrelationMatrixOut.rawData = celltypeSpecificCorrelationMatrix;
    probeCorrelationMatrixOut.recalculateHashMaps();
    //        probeCorrelationMatrixOut.save(outdirectory + "CelltypeSpecificProbeCorrelationMatrix.txt.gz");

    // 9. PCA over cell specific probe correlation matrix
    DoubleMatrixDataset<String, String> cellTypeSpecificDataset = new DoubleMatrixDataset<String, String>(
            probeData);
    cellTypeSpecificDataset.colObjects = rawExpressionDataset.colObjects;
    cellTypeSpecificDataset.rowObjects = cellTypeSpecificProbeDatasetRowNames;
    //        cellTypeSpecificDataset.save(expressionOutputDirectory + "CellTypeSpecificProbeExpression.txt.gz");
    cellTypeSpecificDataset.transposeDataset();
    Normalizer n = new Normalizer();
    // calculate first Principal Component over the cell type specific probe matrix...
    Pair<DoubleMatrixDataset<String, String>, DoubleMatrixDataset<String, String>> PCAResults = n.calculatePCA(
            cellTypeSpecificDataset, celltypeSpecificCorrelationMatrix,
            outdirectory + "CellTypeSpecificProbePCA", 1);

    // 10. PC1 scores: cell specific proxy -- write to file for future use...
    DoubleMatrixDataset<String, String> cellSpecificPCScores = PCAResults.getLeft();

    //Ensure that the cellTypeSpecificPCScores correlate positively with the set of probes that we have used to determine this component:
    double[] pcScoresSamples = new double[cellSpecificPCScores.nrRows];
    for (int i = 0; i < cellSpecificPCScores.nrRows; i++) {
        pcScoresSamples[i] = cellSpecificPCScores.rawData[i][0];
    }
    cellTypeSpecificDataset.transposeDataset();
    int nrProbesCorrelatingPositively = 0;
    for (int i = 0; i < cellTypeSpecificDataset.rawData.length; i++) {
        double corr = JSci.maths.ArrayMath.correlation(pcScoresSamples, cellTypeSpecificDataset.rawData[i]);
        if (corr >= 0) {
            nrProbesCorrelatingPositively++;
        } else {
            nrProbesCorrelatingPositively--;
        }
    }
    if (nrProbesCorrelatingPositively < 0) {
        for (int i = 0; i < cellSpecificPCScores.nrRows; i++) {
            cellSpecificPCScores.rawData[i][0] = -cellSpecificPCScores.rawData[i][0];
        }
    }

    TextFile tfOutCellSpecific = new TextFile(outdirectory + "CellTypeProxyFile.txt", TextFile.W);
    tfOutCellSpecific.writeln("Sample\tCellCountProxyValue");
    for (int i = 0; i < cellSpecificPCScores.nrRows; i++) {
        tfOutCellSpecific
                .writeln(cellSpecificPCScores.rowObjects.get(i) + "\t" + cellSpecificPCScores.rawData[i][0]);
    }
    tfOutCellSpecific.close();

    File f = new File(outdirectory + "CellTypeSpecificProbePCA.PCAOverSamplesEigenvalues.txt.gz");
    f.delete();
    f = new File(outdirectory + "CellTypeSpecificProbePCA.PCAOverSamplesEigenvectors.txt.gz");
    f.delete();
    f = new File(outdirectory + "CellTypeSpecificProbePCA.PCAOverSamplesEigenvectorsTransposed.txt.gz");
    f.delete();
    f = new File(outdirectory + "CellTypeSpecificProbePCA.PCAOverSamplesPrincipalComponents.txt.gz");
    f.delete();

}

From source file:amie.keys.CSAKey.java

public HashSet<HashSet<Integer>> buidPropertyGraph(int property) {
    HashSet<HashSet<Integer>> propertyPowerSets = new HashSet<>();
    for (HashSet<Integer> nonKeyInt : nonKeysInt) {
        if (nonKeyInt.contains(property)) {
            HashSet<Integer> remainingSet = new HashSet<>(nonKeyInt);
            remainingSet.addAll(nonKeyInt);
            remainingSet.remove(property);
            propertyPowerSets.addAll(powerSet(remainingSet));
        }/*from  w  w w. j  ava 2s. c o m*/
    }
    return propertyPowerSets;
}

From source file:org.commonjava.cartographer.INTERNAL.graph.agg.DefaultGraphAggregator.java

@Override
public void connectIncomplete(final RelationshipGraph graph, final AggregationOptions config)
        throws CartoDataException {
    if (graph != null && config.isDiscoveryEnabled()) {
        final Set<ProjectVersionRef> missing = new HashSet<ProjectVersionRef>();

        logger.debug("Loading existing cycle participants...");
        //            final Set<ProjectVersionRef> cycleParticipants = loadExistingCycleParticipants( net );

        final Map<ProjectVersionRef, Set<ProjectRef>> seen = new HashMap<>();

        logger.debug("Loading initial set of GAVs to be resolved...");
        final List<DiscoveryTodo> pending = loadInitialPending(graph, seen);
        final HashSet<DiscoveryTodo> done = new HashSet<DiscoveryTodo>();

        int pass = 0;
        while (!pending.isEmpty()) {
            //                final HashSet<DiscoveryTodo> current = new HashSet<DiscoveryTodo>( pending );
            //                pending.clear();

            final HashSet<DiscoveryTodo> current = new HashSet<DiscoveryTodo>(MAX_BATCHSIZE);
            while (!pending.isEmpty() && current.size() < MAX_BATCHSIZE) {
                current.add(pending.remove(0));
            }//  w  w w .j  a v  a  2  s  .  co m

            done.addAll(current);

            logger.debug("{}. {} in next batch of TODOs:\n  {}", pass, current.size(),
                    new JoinString("\n  ", current));
            final Set<DiscoveryTodo> newTodos = discover(current, config, /*cycleParticipants,*/missing, seen,
                    pass);

            if (newTodos != null) {
                logger.debug("{}. Uncovered new batch of TODOs:\n  {}", pass, new JoinString("\n  ", newTodos));

                for (final DiscoveryTodo todo : newTodos) {
                    if (!done.contains(todo) && !pending.contains(todo)) {
                        logger.debug("+= {}", todo);
                        pending.add(todo);
                    }
                }
            }

            pass++;
        }

        logger.info("Discovery complete. {} seen, {} missing in {} passes.", seen.size(), missing.size(), pass);
    }
}

From source file:eu.tango.energymodeller.EnergyModeller.java

/**
 * This gets the current overhead caused by other physical hosts that
 * provide utilities to the other physical hosts, such as Distributed file
 * systems./* ww  w.j  a v  a 2  s .  c  o m*/
 *
 * @return
 */
public CurrentUsageRecord getCurrentGeneralPowerConsumerOverhead() {
    double power = dataGatherer.getGeneralPurposeHostsPowerConsumption();
    Collection<GeneralPurposePowerConsumer> consumers = dataGatherer.getGeneralPurposeHostList().values();
    HashSet<EnergyUsageSource> usageSources = new HashSet<>();
    usageSources.addAll(consumers);
    CurrentUsageRecord answer = new CurrentUsageRecord(usageSources, power);
    return answer;
}

From source file:nl.umcg.westrah.binarymetaanalyzer.BinaryMetaAnalysis.java

private void loadProbeAnnotation() throws IOException {

    HashSet<String> platforms = new HashSet<String>();
    platforms.addAll(settings.getDatasetannotations());
    System.out.println("Defined platforms in settings file: ");
    for (String s : platforms) {
        System.out.println(s);/*  w  ww.ja v  a  2s  .  co m*/
    }
    probeAnnotation = new MetaQTL4TraitAnnotation(new File(settings.getProbetranslationfile()), platforms);

    traitList = new MetaQTL4MetaTrait[probeAnnotation.getMetatraits().size()];

    traitMap = new TObjectIntHashMap<MetaQTL4MetaTrait>();

    int q = 0;
    for (MetaQTL4MetaTrait t : probeAnnotation.getMetatraits()) {
        traitList[q] = t;
        traitMap.put(t, q);
        q++;
    }

    System.out.println(traitList.length + " trait annotations loaded");

}

From source file:net.lightbody.bmp.proxy.jetty.jetty.servlet.ServletHandler.java

/** Get Servlets.
 * @return Array of defined servlets/*from w  w  w  .  j a  va2s  . co m*/
 */
public ServletHolder[] getServlets() {
    // Sort and Initialize servlets
    HashSet holder_set = new HashSet(_nameMap.size());
    holder_set.addAll(_nameMap.values());
    ServletHolder holders[] = (ServletHolder[]) holder_set.toArray(new ServletHolder[holder_set.size()]);
    java.util.Arrays.sort(holders);
    return holders;
}