Example usage for com.google.common.collect Sets newTreeSet

List of usage examples for com.google.common.collect Sets newTreeSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets newTreeSet.

Prototype

public static <E extends Comparable> TreeSet<E> newTreeSet() 

Source Link

Document

Creates a mutable, empty TreeSet instance sorted by the natural sort ordering of its elements.

Usage

From source file:org.jclouds.s3.domain.AccessControlList.java

/**
 * @return an unmodifiable set of grantees who have been assigned permissions in this ACL.
 *//*from   w  ww  .  ja v  a 2  s.c  o  m*/
public Set<Grantee> getGrantees() {
    Set<Grantee> grantees = Sets.newTreeSet();
    for (Grant grant : getGrants()) {
        grantees.add(grant.getGrantee());
    }
    return Collections.unmodifiableSet(grantees);
}

From source file:org.gbif.refine.datasets.taibif.FishAssemblages.java

/**
 * Iterates over original source file and does the following:
 * i) cleans it (e.g. maps column header names to DwC term names, converts dates to ISO format, etc)
 * ii) augments it (e.g. adds new columns for sample size, higher taxonomy, etc)
 * iii) transforms it into star format (core file events.txt is list of unique sampling events, and extension file
 * occurrence.txt is a list of all observations derived from all sampling events)
 *
 * @param output directory to write files to
 *
 * @throws IOException if method fails//w  w  w  .  j a va 2 s.  c  o  m
 */
public static void processFish(File output) throws IOException {
    // load the original source file to process
    InputStream fis = FishAssemblages.class.getResourceAsStream("/datasets/taibif/1987-1990_UTF8.txt");

    // create an iterator on the file
    CSVReader reader = CSVReaderFactory.build(fis, "UTF-8", "\t", null, 1);

    // get header row for the new event and occurrence files that this method will output
    String[] header = getHeader();

    // sampling events file
    Writer writerEvents = FileUtils.startEventsFile(output, header);

    // observations file
    Writer writerOccs = FileUtils.startOccurrencesFile(output, header);

    // to capture all unique eventIDs
    Set<String> events = Sets.newHashSet();

    // to capture bad names
    Set<String> namesNotFound = Sets.newTreeSet();

    ClosableReportingIterator<String[]> iter = null;
    int line = 0;
    try {
        iter = reader.iterator();
        while (iter.hasNext()) {
            line++;
            String[] record = iter.next();
            if (record == null || record.length == 0) {
                continue;
            }

            // create new augmented record
            String[] modifiedRecord = Arrays.copyOf(record, header.length);

            // convert year and month into ISO format
            String year = modifiedRecord[1];
            String month = modifiedRecord[2];

            if (year.length() == 4 && month.length() == 3) {
                String concatenatedDate = year + "-" + month;
                DateFormat concatenatedDf = new SimpleDateFormat("yy-MMM", Locale.ENGLISH);
                Date concatenatedEventDate = concatenatedDf.parse(concatenatedDate);
                String concatenatedIsoDate = Constants.ISO_DF_SHORT.format(concatenatedEventDate);

                // quality control: ensure year and month are same as eventDate (if eventDate provided)
                String verbatimEventDate = modifiedRecord[3];
                if (!verbatimEventDate.isEmpty()) {

                    // convert event date (e.g. 1987/03/) into ISO format (e.g. 1987-03)
                    DateFormat df = new SimpleDateFormat("yy/MM/", Locale.ENGLISH);
                    Date eventDate = df.parse(verbatimEventDate);
                    String isoDate = Constants.ISO_DF_SHORT.format(eventDate);

                    if (!isoDate.equals(concatenatedIsoDate)) {
                        LOG.error("Skipping record: year " + year + " & month " + month
                                + " don't match eventDate " + isoDate);
                        continue;
                    }
                }
                modifiedRecord[3] = concatenatedIsoDate;
            } else {
                LOG.error("Skipping record: invalid year (" + year + ") and month (" + month + ")");
                continue;
            }

            modifiedRecord[4] = modifiedRecord[4].toUpperCase();

            // occurrenceStatus (present vs absent)
            // TODO: confirm there are absence records! Indeed there are records missing individualCount
            if (modifiedRecord[10].isEmpty()) {
                modifiedRecord[16] = Constants.ABSENT;
            } else {
                modifiedRecord[16] = TermUtils.getOccurrenceStatus(Integer.valueOf(modifiedRecord[10]))
                        .toString().toLowerCase();
            }

            // add static values
            modifiedRecord[17] = "Taiwan"; // country
            modifiedRecord[18] = "TW"; // countryCode

            // static values, based on which nuclear power plant it is: N1 or N2
            if (modifiedRecord[4].equals("N1")) {
                modifiedRecord[19] = "Nuclear Power Plant at Shihmen"; // locality
                modifiedRecord[20] = "25 17 9 N, 121 35 10 E"; // verbatimCoordinates
                modifiedRecord[21] = "25.28583"; // decimalLatitude
                modifiedRecord[22] = "121.5861"; // decimalLongitude

            } else {
                modifiedRecord[19] = "Nuclear Power Plant at Yehliu"; // locality
                modifiedRecord[20] = "25 12 10 N, 121 39 45 E"; // verbatimCoordinates
                modifiedRecord[21] = "25.20278"; // decimalLatitude
                modifiedRecord[22] = "121.6625"; // decimalLongitude
            }

            modifiedRecord[23] = "fish samples were collected monthly from the intake screens at nuclear power plant for 24h (from 9 AM to 9 AM) on the date chosen by a systematic sampling method (Cochran, W. G. Sampling Techniques. 3rd ed. (John Wiley & Sons, 1977)"; // samplingProtocol
            modifiedRecord[24] = "24"; // sampleSizeValue
            modifiedRecord[25] = "hour"; // sampleSizeUnit
            modifiedRecord[26] = "24hr"; // samplingEffort
            modifiedRecord[27] = "http://creativecommons.org/publicdomain/zero/1.0/legalcode"; // license
            modifiedRecord[28] = "Event"; // type
            modifiedRecord[29] = "Chen H, Liao Y, Chen C, Tsai J, Chen L, Shao K"; // rightsHolder
            modifiedRecord[31] = "MaterialSample"; // basisOfRecord
            modifiedRecord[32] = "Dr. Kwang-Tsao Shao and the senior laboratory members"; // identifiedBy
            modifiedRecord[33] = "Identification done using plenty of handbooks of field guide and identification keys."; // identifiedBy
            modifiedRecord[35] = "individuals"; // organismQuantityType
            modifiedRecord[36] = "Animalia"; // kingdom
            modifiedRecord[37] = "Chordata"; // phylum

            // store organismQuantity
            modifiedRecord[34] = modifiedRecord[11]; // same as individualCount

            // construct unique eventID for this sampling period
            // Format: "urn:[institutionID]:[eventDate]:[locationID]"
            // Example: "urn:taibif:1987-08:N2"
            modifiedRecord[0] = "urn:taibif:" + modifiedRecord[3] + ":" + modifiedRecord[4];

            // verify taxonomy
            String name = modifiedRecord[8].trim();

            // for more accurate match, we take higher taxonomy into consideration
            LinneanClassification cl = new NameUsage();
            cl.setFamily(modifiedRecord[6]);
            // only if binomial, set species
            if (name.split(" ").length == 2 && !name.endsWith("spp.")) {
                cl.setSpecies(name);

                // lowest rank specified
                Rank rank = TermUtils.lowestRank(cl);
                if (rank != null) {
                    modifiedRecord[43] = rank.toString().toLowerCase();
                }

                // verify name, and add higher taxonomy
                NameUsageMatch match = MATCHING_SERVICE.match(name, rank, cl, false, false);
                if (match.getMatchType().equals(NameUsageMatch.MatchType.EXACT)) {
                    modifiedRecord[36] = match.getKingdom();
                    modifiedRecord[37] = match.getPhylum();
                    modifiedRecord[38] = match.getClazz();
                    modifiedRecord[39] = match.getOrder();
                    modifiedRecord[40] = match.getFamily();
                    modifiedRecord[41] = match.getGenus();
                    modifiedRecord[42] = match.getScientificName();
                    modifiedRecord[43] = match.getRank().toString();
                    modifiedRecord[44] = match.getUsageKey().toString();
                    modifiedRecord[45] = match.getStatus().toString();
                } else {
                    if (!namesNotFound.contains(name)) {
                        LOG.error(match.getMatchType().toString() + " match for: " + name + " (with rank "
                                + rank + ") to: " + match.getScientificName() + " (with rank " + match.getRank()
                                + ")" + ". See example record with eventDate: " + modifiedRecord[0]);
                        namesNotFound.add(name);
                    }
                }
            } else {
                namesNotFound.add(name);
            }

            // construct unique occurrenceID for this abundance record:
            // Format: "urn:[institutionCode]:[eventDate]:[locationID]:[taxonID]"
            // Example: "urn:taibif:1994-08:N2:1301"
            modifiedRecord[30] = modifiedRecord[0] + modifiedRecord[44];

            // always output line to new occurrences file
            String row = FileUtils.tabRow(modifiedRecord);
            writerOccs.write(row);

            // only output line to events file if event hasn't been included yet
            String eventID = modifiedRecord[0];
            if (!events.contains(eventID)) {
                writerEvents.write(row);
                events.add(eventID);
            }
        }

        LOG.info("Iterated over " + line + " rows.");
        LOG.info("Found " + events.size() + " unique events.");

        LOG.warn("***** " + namesNotFound.size() + " names not found in taxa list: ");
        for (String notFound : namesNotFound) {
            LOG.warn(notFound);
        }

    } catch (Exception e) {
        // some error validating this file, report
        LOG.error("Exception caught while iterating over file", e);
    } finally {
        if (iter != null) {
            iter.close();
        }
        reader.close();
        writerEvents.close();
        writerOccs.close();
    }
}

From source file:uk.ac.ebi.atlas.experimentpage.context.DifferentialRequestContextBuilder.java

Set<Contrast> getSelectedQueryContrasts(DifferentialExperiment experiment) {
    if (CollectionUtils.isEmpty(getRequestPreferences().getQueryFactorValues())) {
        return Sets.newHashSet();
    }//ww  w.j a va 2s.  c o  m

    SortedSet<Contrast> selectedQueryContrasts = Sets.newTreeSet();
    for (String queryContrastId : getRequestPreferences().getQueryFactorValues()) {

        try {
            Contrast contrast = experiment.getContrast(queryContrastId);
            selectedQueryContrasts.add(contrast);
        } catch (IllegalArgumentException e) {
            throw new ResourceNotFoundException(e);
        }

    }
    return selectedQueryContrasts;
}

From source file:symbolicexecutor.SymbolicExecutor.java

private SymbolicExecutor(String source, FileLoader loader, NewInputGenerator newInputGenerator, Input arguments,
        int maxNumInputs) {
    this.jsSource = source;
    this.loader = loader;
    this.newInputGenerator = newInputGenerator;
    this.maxNumInputs = maxNumInputs;
    this.inputs = Sets.newTreeSet();
    this.inputs.add(arguments);
}

From source file:blackboard.plugin.hayabusa.provider.LanguagePackProvider.java

@Override
public Iterable<Command> getCommands() {
    Iterator<LocalePickerOption> ll = getLocaleList().iterator();
    Set<Command> commands = Sets.newTreeSet();
    Context bbCtxt = ContextManagerFactory.getInstance().getContext();

    while (ll.hasNext()) {
        LocalePickerOption option = ll.next();

        HashMap<String, String> params = new HashMap<String, String>();
        params.put("top_Submit", "Submit");
        params.put("target", "/webapps/portal/frameset.jsp");
        params.put("locale", option.getLocaleLocale());
        params.put("showInstructions", "true");
        params.put(NonceUtil.NONCE_KEY, NonceUtil.create(bbCtxt.getSession(), NONCE_ID, NONCE_CONTEXT));

        commands.add(new PostCommand(option.getLocaleName(),
                "/webapps/blackboard/execute/personalSettings?command=save", Category.LANGUAGE_PACK, params));
    }/*from  w w w  . ja v  a  2  s  .co  m*/
    return commands;
}

From source file:org.geogit.api.plumbing.merge.CheckMergeScenarioOp.java

@Override
public Boolean call() {
    if (commits.size() < 2) {
        return Boolean.FALSE;
    }// w  w  w .jav a  2  s.c  om
    Optional<RevCommit> ancestor = command(FindCommonAncestor.class).setLeft(commits.get(0))
            .setRight(commits.get(1)).call();
    Preconditions.checkState(ancestor.isPresent(), "No ancestor commit could be found.");
    for (int i = 2; i < commits.size(); i++) {
        ancestor = command(FindCommonAncestor.class).setLeft(commits.get(i)).setRight(ancestor.get()).call();
        Preconditions.checkState(ancestor.isPresent(), "No ancestor commit could be found.");
    }

    Map<String, List<DiffEntry>> diffs = Maps.newHashMap();
    Set<String> removedPaths = Sets.newTreeSet();

    // we organize the changes made for each path
    for (RevCommit commit : commits) {
        Iterator<DiffEntry> toMergeDiffs = command(DiffTree.class).setReportTrees(true)
                .setOldTree(ancestor.get().getId()).setNewTree(commit.getId()).call();
        while (toMergeDiffs.hasNext()) {
            DiffEntry diff = toMergeDiffs.next();
            String path = diff.oldPath() == null ? diff.newPath() : diff.oldPath();
            if (diffs.containsKey(path)) {
                diffs.get(path).add(diff);
            } else {
                diffs.put(path, Lists.newArrayList(diff));
            }
            if (ChangeType.REMOVED.equals(diff.changeType())) {
                removedPaths.add(path);
            }
        }
    }

    // now we check that, for any path, changes are compatible
    Collection<List<DiffEntry>> values = diffs.values();
    for (List<DiffEntry> list : values) {
        for (int i = 0; i < list.size(); i++) {
            for (int j = i + 1; j < list.size(); j++) {
                if (hasConflicts(list.get(i), list.get(j))) {
                    return true;
                }
            }
            if (!ChangeType.REMOVED.equals(list.get(i).changeType())) {
                if (removedPaths.contains(list.get(i).getNewObject().getParentPath())) {
                    return true;
                }
            }
        }
    }

    return false;

}

From source file:com.google.gerrit.testutil.InMemoryRepositoryManager.java

@Override
public SortedSet<Project.NameKey> list() {
    SortedSet<Project.NameKey> names = Sets.newTreeSet();
    for (DfsRepository repo : repos.values()) {
        names.add(new Project.NameKey(repo.getDescription().getRepositoryName()));
    }// ww w  .  j  ava2  s . co  m
    return ImmutableSortedSet.copyOf(names);
}

From source file:com.google.devtools.j2objc.gen.ObjectiveCImplementationGenerator.java

private void printImports() {
    Set<String> includeFiles = Sets.newTreeSet();
    includeFiles.add("J2ObjC_source.h");
    includeFiles.add(getGenerationUnit().getOutputPath() + ".h");
    for (GeneratedType generatedType : getOrderedTypes()) {
        for (Import imp : generatedType.getImplementationIncludes()) {
            if (!isLocalType(imp.getTypeName())) {
                includeFiles.add(imp.getImportFileName());
            }//www  . jav a  2 s.  co  m
        }
    }

    newline();
    for (String header : includeFiles) {
        printf("#include \"%s\"\n", header);
    }

    for (String code : getGenerationUnit().getNativeImplementationBlocks()) {
        print(code);
    }

    Set<String> seenTypes = Sets.newHashSet();
    Set<Import> forwardDecls = Sets.newHashSet();
    for (GeneratedType generatedType : getOrderedTypes()) {
        String name = generatedType.getTypeName();
        seenTypes.add(name);
        for (Import imp : generatedType.getImplementationForwardDeclarations()) {
            // Only need to forward declare private local types. All else is handled
            // by imports.
            GeneratedType localType = getLocalType(imp.getTypeName());
            if (!seenTypes.contains(imp.getTypeName()) && localType != null && localType.isPrivate()) {
                forwardDecls.add(imp);
            }
        }
    }

    printForwardDeclarations(forwardDecls);
}

From source file:org.codeqinvest.investment.QualityInvestmentPlanService.java

public QualityInvestmentPlan computeInvestmentPlan(QualityAnalysis analysis, String basePackage,
        int investmentInMinutes) {
    Multimap<Double, QualityViolation> violationsByProfit = ArrayListMultimap.create();
    for (QualityViolation violation : filterViolationsByArtefactNameStartingWith(basePackage,
            analysis.getViolations())) {
        double profit = profitCalculator.calculateProfit(violation);
        if (Math.round(profit) > 0) {
            violationsByProfit.put(profit, violation);
        }/*from  w w w .j  a  v a  2  s.  c o  m*/
    }

    List<Double> allProfits = new ArrayList<Double>();
    for (Double profit : violationsByProfit.keySet()) {
        int numberOfViolations = violationsByProfit.get(profit).size();
        for (int i = 0; i < numberOfViolations; i++) {
            allProfits.add(profit);
        }
    }
    Collections.sort(allProfits, new DescendingComparator<Double>());

    Set<QualityInvestmentPlanEntry> investmentPlanEntries = Sets.newTreeSet();
    int toInvest = investmentInMinutes;
    int invested = 0;

    for (double profit : allProfits) {
        List<QualityViolation> violations = new ArrayList<QualityViolation>(violationsByProfit.get(profit));
        Collections.sort(violations, new ViolationByRemediationCostsComparator());

        for (QualityViolation violation : violations) {
            int remediationCost = violation.getRemediationCosts();
            if (remediationCost <= toInvest) {

                invested += remediationCost;
                toInvest -= remediationCost;

                QualityRequirement requirement = violation.getRequirement();
                investmentPlanEntries.add(new QualityInvestmentPlanEntry(requirement.getMetricIdentifier(),
                        requirement.getOperator() + " " + requirement.getThreshold(),
                        violation.getArtefact().getName(), violation.getArtefact().getShortClassName(),
                        (int) Math.round(profit), remediationCost));
            }
        }
    }

    final int overallProfit = calculateOverallProfit(investmentPlanEntries);
    return new QualityInvestmentPlan(basePackage, invested, overallProfit,
            calculateRoi(investmentPlanEntries, overallProfit), investmentPlanEntries);
}

From source file:org.incode.eurocommercial.contactapp.dom.number.ContactNumberRepository.java

@Programmatic
public Set<String> existingTypes() {
    final Set<String> types = Sets.newTreeSet();
    types.addAll(ContactNumberType.titles());
    types.addAll(FluentIterable.from(listAll()).transform(ContactNumber::getType).toSet());
    return types;
}