Example usage for java.util TreeSet addAll

List of usage examples for java.util TreeSet addAll

Introduction

In this page you can find the example usage for java.util TreeSet addAll.

Prototype

public boolean addAll(Collection<? extends E> c) 

Source Link

Document

Adds all of the elements in the specified collection to this set.

Usage

From source file:com.salesforce.ide.upgrade.ui.wizards.UpgradeComponentConflictsComposite.java

protected void populateTree(Map<String, List<UpgradeConflict>> upgradeConflicts) {
    TreeSet<String> sortedComponentTypes = new TreeSet<String>();
    sortedComponentTypes.addAll(upgradeConflicts.keySet());

    // loop thru sorted list creating upgrade component tree
    for (String componentType : sortedComponentTypes) {
        // create root component type node
        TreeItem componentTypeTreeItem = new TreeItem(getTreeComponentConflicts(), SWT.NONE);
        componentTypeTreeItem.setText(componentType);
        FontRegistry registry = new FontRegistry();
        Font boldFont = registry.getBold(Display.getCurrent().getSystemFont().getFontData()[0].getName());

        componentTypeTreeItem.setFont(boldFont);
        componentTypeTreeItem.setExpanded(true);

        // loop thru each component instance and create named node
        List<UpgradeConflict> upgradeComponents = upgradeConflicts.get(componentType);
        for (UpgradeConflict upgradeConflict : upgradeComponents) {
            TreeItem componentTreeItem = new TreeItem(componentTypeTreeItem, SWT.NONE);
            componentTreeItem.setText(upgradeConflict.getLocalComponent().getFileName());
            componentTreeItem.setForeground(Display.getCurrent().getSystemColor(SWT.COLOR_BLUE));
            componentTreeItem.setData("local", upgradeConflict.getLocalComponent());
            componentTreeItem.setData("remote", upgradeConflict.getRemoteComponent());
            componentTreeItem.setExpanded(true);
        }//from   w  ww.jav  a  2  s. co  m
    }
}

From source file:org.openanzo.client.cli.RdfIOCommand.java

Collection<File> getFiles(File dir, boolean recurse) {
    TreeSet<File> files = new TreeSet<File>();
    for (File file : dir.listFiles()) {
        if (file.isDirectory()) {
            if (recurse) {
                files.addAll(getFiles(file, recurse));
            }/*from   w  w  w. j a v  a 2s .  c  o  m*/
        } else {
            files.add(file);
        }
    }
    return files;
}

From source file:org.openmrs.module.clinicalsummary.rule.antenatal.grouped.DatetimeBasedConceptFilteredRule.java

/**
 * @see org.openmrs.logic.Rule#eval(org.openmrs.logic.LogicContext, Integer, java.util.Map)
 *///from   w  w  w  . ja v a2  s. c  om
@Override
protected Result evaluate(final LogicContext context, final Integer patientId,
        final Map<String, Object> parameters) {
    Result result = new Result();

    Object conceptObjects = parameters.get(EvaluableConstants.OBS_CONCEPT);
    Map<Concept, Integer> conceptNamePositions = searchPositions(conceptObjects);

    ObsWithRestrictionRule obsWithRestrictionRule = new ObsWithStringRestrictionRule();
    Result obsResults = obsWithRestrictionRule.eval(context, patientId, parameters);

    Map<Date, Result[]> obsResultDates = new HashMap<Date, Result[]>();
    for (Result obsResult : obsResults) {
        Obs obs = (Obs) obsResult.getResultObject();
        Date obsDatetime = obs.getObsDatetime();
        // see if we already have obs array for this date
        Result[] obsResultDate = obsResultDates.get(obsDatetime);
        if (obsResultDate == null) {
            obsResultDate = new Result[CollectionUtils.size(conceptNamePositions)];
            obsResultDates.put(obsDatetime, obsResultDate);
        }
        // search the concept in the concept ordering map
        Integer position = conceptNamePositions.get(obs.getConcept());
        if (position != null)
            obsResultDate[position] = obsResult;
    }

    TreeSet<Date> keys = new TreeSet<Date>(new Comparator<Date>() {

        public int compare(final Date firstDate, final Date secondDate) {
            return firstDate.equals(secondDate) ? 0 : firstDate.after(secondDate) ? -1 : 1;
        }
    });
    keys.addAll(obsResultDates.keySet());

    // TODO: need to merge the two loop into one
    Integer counter = 0;
    Iterator<Date> iterator = keys.iterator();
    while (iterator.hasNext() && counter < 5) {
        Date date = iterator.next();
        // create the grouped results
        Result groupedResult = new Result();
        groupedResult.add(new Result(date));
        groupedResult.addAll(Arrays.asList(obsResultDates.get(date)));
        // add them to the main result of the rule
        result.add(groupedResult);
        // increase the counter as we only want last 5
        counter++;
    }

    Collections.reverse(result);

    return result;
}

From source file:net.spfbl.core.Peer.java

public synchronized static TreeSet<Peer> getSet() {
    TreeSet<Peer> peerSet = new TreeSet<Peer>();
    peerSet.addAll(MAP.values());
    return peerSet;
}

From source file:evaluation.Evaluator.java

/**
 * This function removes duplicates from an array of given labels. It is used while
 * reading the file with the predicted labels.
 *
 * @param labels    the array with the labels to be checked for duplicates
 *///from   w w w . j a v  a 2s .c  o  m
public String[] removeDuplicates(String labels[]) {
    TreeSet aset = new TreeSet();
    aset.addAll(Arrays.asList(labels));

    int num_of_labels = aset.size();

    String finallabels[] = new String[num_of_labels];
    Iterator iterator = aset.iterator();
    int k = 0;
    while (iterator.hasNext()) {
        finallabels[k++] = (String) iterator.next();
    }

    return finallabels;
}

From source file:edu.mbl.jif.imaging.mmtiff.FileSet.java

/**
 * Completes the current time point of an aborted acquisition with blank images, so that it can
 * be opened correctly by ImageJ/BioForamts
 *//*  w ww  . j  a  v  a  2  s  . co m*/
private void completeFrameWithBlankImages(int frame) throws JSONException, MMScriptException {

    int numFrames = MDUtils.getNumFrames(mpTiff_.summaryMetadata_);
    int numSlices = MDUtils.getNumSlices(mpTiff_.summaryMetadata_);
    int numChannels = MDUtils.getNumChannels(mpTiff_.summaryMetadata_);
    if (numFrames > frame + 1) {
        TreeSet<String> writtenImages = new TreeSet<String>();
        for (MultipageTiffWriter w : tiffWriters_) {
            writtenImages.addAll(w.getIndexMap().keySet());
            w.setAbortedNumFrames(frame + 1);
        }
        int positionIndex = MDUtils.getIndices(writtenImages.first())[3];
        if (mpTiff_.omeTiff_) {
            mpTiff_.omeMetadata_.setNumFrames(positionIndex, frame + 1);
        }
        TreeSet<String> lastFrameLabels = new TreeSet<String>();
        for (int c = 0; c < numChannels; c++) {
            for (int z = 0; z < numSlices; z++) {
                lastFrameLabels.add(MDUtils.generateLabel(c, z, frame, positionIndex));
            }
        }
        lastFrameLabels.removeAll(writtenImages);
        try {
            for (String label : lastFrameLabels) {
                tiffWriters_.getLast().writeBlankImage(label);
                if (mpTiff_.omeTiff_) {
                    JSONObject dummyTags = new JSONObject();
                    int channel = Integer.parseInt(label.split("_")[0]);
                    int slice = Integer.parseInt(label.split("_")[1]);
                    MDUtils.setChannelIndex(dummyTags, channel);
                    MDUtils.setFrameIndex(dummyTags, frame);
                    MDUtils.setSliceIndex(dummyTags, slice);
                    mpTiff_.omeMetadata_.addImageTagsToOME(dummyTags, ifdCount_, baseFilename_,
                            currentTiffFilename_);
                }
            }
        } catch (IOException ex) {
            ReportingUtils.logError("problem writing dummy image");
        }
    }
}

From source file:org.jactr.modules.pm.visual.AbstractVisualModule.java

/**
 * Return list of all parameters that can be set.
 * //  www  .j a  va 2 s.  co  m
 * @return The setableParameters value
 */
@Override
public Collection<String> getSetableParameters() {
    TreeSet<String> params = new TreeSet<String>(_parameterMap.keySet());
    params.addAll(SETABLE_PARAMS);
    params.addAll(super.getSetableParameters());
    return params;
}

From source file:ca.uhn.fhir.rest.server.provider.dev.ServerConformanceProvider.java

private void handleDynamicSearchMethodBinding(RestResource resource, RuntimeResourceDefinition def,
        TreeSet<String> includes, DynamicSearchMethodBinding searchMethodBinding) {
    includes.addAll(searchMethodBinding.getIncludes());

    List<RuntimeSearchParam> searchParameters = new ArrayList<RuntimeSearchParam>();
    searchParameters.addAll(searchMethodBinding.getSearchParams());
    sortRuntimeSearchParameters(searchParameters);

    if (!searchParameters.isEmpty()) {

        for (RuntimeSearchParam nextParameter : searchParameters) {

            String nextParamName = nextParameter.getName();

            // String chain = null;
            String nextParamUnchainedName = nextParamName;
            if (nextParamName.contains(".")) {
                // chain = nextParamName.substring(nextParamName.indexOf('.') + 1);
                nextParamUnchainedName = nextParamName.substring(0, nextParamName.indexOf('.'));
            }/*from   w  w  w. ja  va2s .c om*/

            String nextParamDescription = nextParameter.getDescription();

            /*
             * If the parameter has no description, default to the one from the resource
             */
            if (StringUtils.isBlank(nextParamDescription)) {
                RuntimeSearchParam paramDef = def.getSearchParam(nextParamUnchainedName);
                if (paramDef != null) {
                    nextParamDescription = paramDef.getDescription();
                }
            }

            RestResourceSearchParam param;
            param = resource.addSearchParam();

            param.setName(nextParamName);
            // if (StringUtils.isNotBlank(chain)) {
            // param.addChain(chain);
            // }
            param.setDocumentation(nextParamDescription);
            //            param.setType(nextParameter.getParamType());
        }
    }
}

From source file:com.kelveden.rastajax.representation.flat.FlatRepresentationBuilder.java

@Override
public Set<FlatResource> buildRepresentationFor(final Set<ResourceClass> resourceClasses) {

    final TreeSet<FlatResource> result = new TreeSet<FlatResource>(RESOURCE_COMPARATOR);

    for (ResourceClass rawResource : resourceClasses) {
        result.addAll(buildRepresentationFor(rawResource));
    }// w w  w.j  a va 2 s . co  m

    LOGGER.info("Representation completed with {} resources.", result.size());

    return result;
}

From source file:org.broadinstitute.sting.gatk.walkers.variantrecalibration.ApplyRecalibration.java

public void initialize() {
    if (TS_FILTER_LEVEL != null) {
        for (final Tranche t : Tranche.readTranches(TRANCHES_FILE)) {
            if (t.ts >= TS_FILTER_LEVEL) {
                tranches.add(t);//from ww  w  . ja v a 2  s  .  com
            }
            logger.info(String.format("Read tranche " + t));
        }
        Collections.reverse(tranches); // this algorithm wants the tranches ordered from best (lowest truth sensitivity) to worst (highest truth sensitivity)
    }

    for (final RodBinding rod : input) {
        inputNames.add(rod.getName());
    }

    if (IGNORE_INPUT_FILTERS != null) {
        ignoreInputFilterSet.addAll(Arrays.asList(IGNORE_INPUT_FILTERS));
    }

    // setup the header fields
    final Set<VCFHeaderLine> hInfo = new HashSet<>();
    hInfo.addAll(GATKVCFUtils.getHeaderFields(getToolkit(), inputNames));
    addVQSRStandardHeaderLines(hInfo);
    final TreeSet<String> samples = new TreeSet<>();
    samples.addAll(SampleUtils.getUniqueSamplesFromRods(getToolkit(), inputNames));

    if (TS_FILTER_LEVEL != null) {
        // if the user specifies both ts_filter_level and lodCutoff then throw a user error
        if (VQSLOD_CUTOFF != null) {
            throw new UserException(
                    "Arguments --ts_filter_level and --lodCutoff are mutually exclusive. Please only specify one option.");
        }

        if (tranches.size() >= 2) {
            for (int iii = 0; iii < tranches.size() - 1; iii++) {
                final Tranche t = tranches.get(iii);
                hInfo.add(new VCFFilterHeaderLine(t.name,
                        String.format("Truth sensitivity tranche level for " + t.model.toString()
                                + " model at VQS Lod: " + t.minVQSLod + " <= x < "
                                + tranches.get(iii + 1).minVQSLod)));
            }
        }
        if (tranches.size() >= 1) {
            hInfo.add(new VCFFilterHeaderLine(tranches.get(0).name + "+",
                    String.format("Truth sensitivity tranche level for " + tranches.get(0).model.toString()
                            + " model at VQS Lod < " + tranches.get(0).minVQSLod)));
        } else {
            throw new UserException(
                    "No tranches were found in the file or were above the truth sensitivity filter level "
                            + TS_FILTER_LEVEL);
        }

        logger.info("Keeping all variants in tranche " + tranches.get(tranches.size() - 1));
    } else {
        if (VQSLOD_CUTOFF == null) {
            VQSLOD_CUTOFF = DEFAULT_VQSLOD_CUTOFF;
        }
        hInfo.add(new VCFFilterHeaderLine(LOW_VQSLOD_FILTER_NAME, "VQSLOD < " + VQSLOD_CUTOFF));
        logger.info("Keeping all variants with VQSLOD >= " + VQSLOD_CUTOFF);
    }

    final VCFHeader vcfHeader = new VCFHeader(hInfo, samples);
    vcfWriter.writeHeader(vcfHeader);
}