Example usage for java.util Set remove

List of usage examples for java.util Set remove

Introduction

In this page you can find the example usage for java.util Set remove.

Prototype

boolean remove(Object o);

Source Link

Document

Removes the specified element from this set if it is present (optional operation).

Usage

From source file:com.netflix.genie.core.jpa.services.JpaClusterServiceImpl.java

/**
 * {@inheritDoc}//from  ww  w  .j av a  2s.  co  m
 */
@Override
public void removeTagForCluster(
        @NotBlank(message = "No cluster id entered. Unable to remove tag.") final String id,
        @NotBlank(message = "No tag entered. Unable to remove.") final String tag) throws GenieException {
    final ClusterEntity cluster = this.findCluster(id);
    final Set<String> tags = cluster.getTags();
    tags.remove(tag);
    cluster.setTags(tags);
}

From source file:com.almende.eve.ggdemo.HolonAgent.java

/**
 * Check merge.//from w  w w .j a  v a  2s  . c  o m
 * 
 * @throws JSONRPCException
 *             the jSONRPC exception
 * @throws IOException
 *             Signals that an I/O exception has occurred.
 */
public void checkMerge() throws JSONRPCException, IOException {
    System.out.println(getId() + ": checkMerge()");
    if (getState().containsKey("parent")) {
        System.out.println(getId() + ": Already merged, not doubling");
        return;
    }
    if (neighbours == null) {
        neighbours = getNeighbours();
    }
    ArrayList<Sub> subs = getState().get("subs", type);
    if (subs == null) {
        subs = new ArrayList<Sub>(0);
    }
    System.out.println(getId() + ": n:" + neighbours.size() + " - s:" + subs.size());
    if (neighbours.size() - subs.size() == 1) {
        Set<String> nbs = new HashSet<String>(neighbours);
        int size = 1;
        for (Sub sub : subs) {
            nbs.remove(sub.getAddress());
            size += sub.getSize();
        }
        String neighbour = nbs.toArray(new String[0])[0];
        getState().put("parent", neighbour);

        ObjectNode params = JOM.createObjectNode();
        params.put("size", size);
        try {
            System.out.println(getId() + ": Merging with " + neighbour);
            Boolean res = send(URI.create(neighbour), "merge", params, Boolean.class);
            if (!res) {
                getState().remove("parent");
                getScheduler().createTask(new JSONRequest("checkMerge", null), (int) Math.random() * 500);
            } else {
                System.out.println(getId() + ": Merged with " + neighbour);
            }
        } catch (Exception e) {
            getScheduler().createTask(new JSONRequest("checkMerge", null), (int) Math.random() * 500);
        }
    }
}

From source file:eu.openanalytics.rsb.component.JobProcessor.java

public void process(final MultiFilesJob job) throws Exception {
    process(job, new JobRunner() {
        @Override/*w  w w .  jav  a 2s  . c  o m*/
        public AbstractResult<File[]> runOn(final RServi rServi) throws Exception {
            final Set<String> filesUploadedToR = new HashSet<String>();

            // locate and upload the R script
            final File rScriptFile = getRScriptFile(job);

            uploadFileToR(rServi, rScriptFile, filesUploadedToR);

            // optionally uploads a Sweave file
            final String sweaveFileFromCatalog = (String) getUploadableJobMeta(job)
                    .get(Constants.SWEAVE_FILE_CONFIGURATION_KEY);

            if (sweaveFileFromCatalog != null) {
                final File sweaveFile = getCatalogManager().internalGetCatalogFile(CatalogSection.SWEAVE_FILES,
                        job.getApplicationName(), sweaveFileFromCatalog);

                if (!sweaveFile.isFile()) {
                    throw new IllegalArgumentException("Invalid catalog Sweave file reference in job: " + job);
                }

                uploadFileToR(rServi, sweaveFile, filesUploadedToR);
            }

            // upload the job files (except the R Script which has already been
            // taken care of)
            for (final File jobFile : job.getFiles()) {
                if (!jobFile.equals(rScriptFile)) {
                    uploadFileToR(rServi, jobFile, filesUploadedToR);
                }
            }

            // upload the configuration file to R
            uploadPropertiesToR(rServi, getUploadableJobMeta(job), filesUploadedToR);

            // hit R
            executeScriptOnR(rServi, rScriptFile.getName());

            final MultiFilesResult result = job.buildSuccessResult();

            // download the result files but not the uploaded ones nor the log
            // file
            final Set<String> filesToDownload = getFilesInRWorkspace(rServi);
            filesToDownload.removeAll(filesUploadedToR);
            filesToDownload.remove(Constants.DEFAULT_R_LOG_FILE);
            for (final String fileToDownload : filesToDownload) {
                final File resultFile = result.createNewResultFile(fileToDownload);
                final FileOutputStream fos = new FileOutputStream(resultFile);
                rServi.downloadFile(fos, fileToDownload, 0, null);
                IOUtils.closeQuietly(fos);
            }

            return result;
        }

        private Map<String, Serializable> getUploadableJobMeta(final Job job) {
            final Map<String, Serializable> meta = new HashMap<String, Serializable>(job.getMeta());

            if ((JobProcessor.this.getConfiguration().isPropagateSecurityContext())
                    && (StringUtils.isNotBlank(job.getUserName()))) {
                meta.put("rsbSecure", true);
                meta.put("rsbUserPrincipal", job.getUserName());
            }

            return meta;
        }

        private File getRScriptFile(final MultiFilesJob job) {
            final String rScriptFromCatalog = (String) getUploadableJobMeta(job)
                    .get(Constants.R_SCRIPT_CONFIGURATION_KEY);

            return rScriptFromCatalog != null ? getRScriptFileFromCatalog(rScriptFromCatalog, job)
                    : getRScriptFileFromJob(job);
        }

        private File getRScriptFileFromCatalog(final String rScriptFromCatalog, final MultiFilesJob job) {
            final File rScriptFile = getCatalogManager().internalGetCatalogFile(CatalogSection.R_SCRIPTS,
                    job.getApplicationName(), rScriptFromCatalog);

            if ((rScriptFile == null) || (!rScriptFile.isFile())) {
                throw new IllegalArgumentException("No R script has been found for job: " + job
                        + ", in the catalog under the name: " + rScriptFromCatalog);
            } else {
                return rScriptFile;
            }
        }

        private File getRScriptFileFromJob(final MultiFilesJob job) {
            if ((job.getRScriptFile() == null) || (!job.getRScriptFile().isFile())) {
                throw new IllegalArgumentException("No R script has been found for job: " + job);
            } else {
                return job.getRScriptFile();
            }
        }
    }, false);
}

From source file:backtype.storm.blobstore.BlobSynchronizerTest.java

@Test
public void testBlobSynchronizerForKeysToDownload() {
    BlobStore store = initLocalFs();/*  ww  w  . j a  v  a 2 s  .c o  m*/
    BlobSynchronizer sync = new BlobSynchronizer(store, conf);
    // test for keylist to download
    Set<String> zkSet = new HashSet<String>();
    zkSet.add("key1");
    Set<String> blobStoreSet = new HashSet<String>();
    blobStoreSet.add("key1");
    Set<String> resultSet = sync.getKeySetToDownload(blobStoreSet, zkSet);
    assertTrue("Not Empty", resultSet.isEmpty());
    zkSet.add("key1");
    blobStoreSet.add("key2");
    resultSet = sync.getKeySetToDownload(blobStoreSet, zkSet);
    assertTrue("Not Empty", resultSet.isEmpty());
    blobStoreSet.remove("key1");
    blobStoreSet.remove("key2");
    zkSet.add("key1");
    resultSet = sync.getKeySetToDownload(blobStoreSet, zkSet);
    assertTrue("Unexpected keys to download", (resultSet.size() == 1) && (resultSet.contains("key1")));
}

From source file:org.cyberjos.jcconf2016.node.HazelcastHelper.java

/**
 * Creates and runs a new producer.//from   w w w. j a  v  a2  s.co  m
 *
 * @param cloudNode the cloud node which runs this new producer
 */
private void runProducer(final CloudNode cloudNode) {
    final Thread thread = new Thread(() -> {
        final String nodeName = cloudNode.getName();
        logger.info("[{}] Producer thread started.", nodeName);

        while (this.isMaster(cloudNode)) {
            try {
                final Set<String> nodes = new HashSet<>(HazelcastHelper.this.getActiveNodes());
                nodes.remove(cloudNode.getName());
                for (int i = 0; i < nodes.size(); i++) {
                    final int taskSize = RandomUtils.nextInt(MIN_SIZE, MAX_SIZE);
                    HazelcastHelper.this.getTaskQueue().put(taskSize);
                    logger.info("[{}] Added task size: {}", nodeName, taskSize);
                }
                Thread.sleep(WAIT_PRODUCER);
            } catch (final Exception ex) {
                logger.error(String.format("[%s] Exception occurred", nodeName), ex);
            }
        }
    });
    thread.start();
}

From source file:org.shredzone.cilla.ws.assembler.GallerySectionAssembler.java

private void mergePictures(GallerySectionDto dto, GallerySection entity) throws CillaServiceException {
    int sequence = 0;

    Set<Picture> removables = new HashSet<>(entity.getPictures());
    for (PictureDto picDto : dto.getPictures()) {
        Picture picture;//from  www  .  j  a  va 2s .  c  o  m

        if (picDto.isPersisted()) {
            picture = pictureDao.fetch(picDto.getId());
            pictureAssembler.merge(picDto, picture);
            picture.setSequence(sequence++);
            removables.remove(picture);
            pictureService.updatePicture(picture,
                    (picDto.getUploadFile() != null ? picDto.getUploadFile().getDataSource() : null));

        } else {
            DataHandler dh = picDto.getUploadFile();
            if (dh == null) {
                throw new CillaParameterException("new picture requires a file");
            }

            picture = new Picture();
            picture.setSequence(sequence++);
            pictureAssembler.merge(picDto, picture);
            pictureService.addPicture(entity, picture, dh.getDataSource());
            picDto.setId(picture.getId());
        }
    }

    for (Picture picture : removables) {
        pictureService.removePicture(picture);
    }
}

From source file:com.vecna.dbDiff.business.dbCompare.impl.RdbDiffEngine.java

/**
 * Tests two tables' foreign keys.//from  w ww.  ja va 2  s. co m
 * Any errors get added to the errors param list.
 * @param refT A reference table
 * @param testT A test table
 * @return foreign key differences.
 */
private List<RdbCompareError> compareForeignKeys(RelationalTable refT, RelationalTable testT) {
    List<RdbCompareError> errors = new ArrayList<>();
    Set<ForeignKey> refFks = new HashSet<>(refT.getFks());

    for (ForeignKey testFk : testT.getFks()) {
        if (!refFks.remove(testFk)) {
            ForeignKeyCompareError error = getUnexpectedFkError(testFk, testT, refT);
            if (error.getSimilarFk() != null) {
                refFks.remove(error.getSimilarFk());
            }
            errors.add(error);
        }
    }

    //Missing FK's: Any test fk that had some partial match against a reference fk would have had the reference fk removed.
    //Any remaining reference fk's are missing ones.
    for (ForeignKey fk : refFks) {
        errors.add(new RdbCompareError(RdbCompareErrorType.MISSING_FK,
                "Reference foreign key \"" + fk + "\" is missing!"));
    }
    return errors;
}

From source file:com.netflix.genie.server.services.impl.jpa.CommandConfigServiceJPAImpl.java

/**
 * {@inheritDoc}// w w  w  .jav a  2 s.  c o  m
 */
@Override
public Command deleteCommand(@NotBlank(message = "No id entered. Unable to delete.") final String id)
        throws GenieException {
    LOG.debug("Called to delete command config with id " + id);
    final Command command = this.commandRepo.findOne(id);
    if (command == null) {
        throw new GenieNotFoundException("No command with id " + id + " exists to delete.");
    }
    //Remove the command from the associated Application references
    final Application app = command.getApplication();
    if (app != null) {
        final Set<Command> commands = app.getCommands();
        if (commands != null) {
            commands.remove(command);
        }
    }
    this.commandRepo.delete(command);
    return command;
}

From source file:gov.nih.nci.caarray.plugins.illumina.IlluminaCsvDesignHandler.java

private void validateHeader(List<String> headers, FileValidationResult result) throws IOException {
    final Set<? extends Enum> requiredHeaders = this.helper.getRequiredColumns();
    final Set<Enum> tmp = new HashSet<Enum>(requiredHeaders);
    for (final String v : headers) {
        for (final Enum h : requiredHeaders) {
            if (h.name().equalsIgnoreCase(v)) {
                tmp.remove(h);
            }//www.  ja v  a 2  s .c  o  m
        }
    }
    if (!tmp.isEmpty()) {
        result.addMessage(ValidationMessage.Type.ERROR,
                "Illumina CSV file didn't contain the expected columns " + tmp.toString());
    }
}

From source file:org.shredzone.cilla.ws.impl.PageWsImpl.java

/**
 * Commits all media of a page.//from   w  ww  .j av  a  2s. com
 *
 * @param dto
 *            {@link PageDto} containing the media
 * @param entity
 *            {@link Page} to commit the media to
 */
private void commitMedium(PageDto dto, Page entity) throws CillaServiceException {
    Set<Medium> deletableMedia = new HashSet<>(mediumDao.fetchAll(entity));
    for (MediumDto mediumDto : dto.getMedia()) {
        if (mediumDto.isPersisted()) {
            Medium medium = mediumDao.fetch(mediumDto.getId());
            deletableMedia.remove(medium);
            mediumAssembler.merge(mediumDto, medium);
            if (mediumDto.getUploadMediumFile() != null) {
                pageService.updateMedium(entity, medium, mediumDto.getUploadMediumFile().getDataSource());
            }

        } else {
            Medium medium = new Medium();
            mediumAssembler.merge(mediumDto, medium);
            pageService.addMedium(entity, medium, mediumDto.getUploadMediumFile().getDataSource());
            mediumDto.setId(medium.getId());
        }
    }
    for (Medium deletable : deletableMedia) {
        pageService.removeMedium(entity, deletable);
    }
}