Example usage for com.google.common.collect Sets newTreeSet

List of usage examples for com.google.common.collect Sets newTreeSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets newTreeSet.

Prototype

public static <E> TreeSet<E> newTreeSet(Comparator<? super E> comparator) 

Source Link

Document

Creates a mutable, empty TreeSet instance with the given comparator.

Usage

From source file:org.jclouds.samples.googleappengine.functions.BlobStoreContextToContainerResult.java

public ContainerResult apply(final String contextName) {
    final BlobStoreContext<?, ?> context = contexts.get(contextName);
    final String host = context.getEndPoint().getHost();
    try {// w  w w  . j a  v a 2s .c om
        ResourceMetadata md = Iterables.getLast(Sets
                .newTreeSet(Iterables.filter(context.getBlobStore().list(), new Predicate<ResourceMetadata>() {

                    public boolean apply(ResourceMetadata input) {
                        return input.getType() == ResourceType.CONTAINER;
                    }

                })));
        return new BuildContainerResult(host, context, contextName).apply(md);
    } catch (Exception e) {
        ContainerResult result = new ContainerResult(contextName, host, null, e.getMessage());
        logger.error(e, "Error listing service %s", contextName);
        return result;
    }

}

From source file:org.apache.druid.indexer.path.GranularUnprocessedPathSpec.java

@Override
public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException {
    // This PathSpec breaks so many abstractions that we might as break some more
    Preconditions.checkState(config.getGranularitySpec() instanceof UniformGranularitySpec,
            StringUtils.format("Cannot use %s without %s", GranularUnprocessedPathSpec.class.getSimpleName(),
                    UniformGranularitySpec.class.getSimpleName()));

    final Path betaInput = new Path(getInputPath());
    final FileSystem fs = betaInput.getFileSystem(job.getConfiguration());
    final Granularity segmentGranularity = config.getGranularitySpec().getSegmentGranularity();

    Map<Long, Long> inputModifiedTimes = new TreeMap<>(Ordering.natural().reverse());

    for (FileStatus status : FSSpideringIterator.spiderIterable(fs, betaInput)) {
        final DateTime key = segmentGranularity.toDate(status.getPath().toString());
        final Long currVal = inputModifiedTimes.get(key.getMillis());
        final long mTime = status.getModificationTime();

        inputModifiedTimes.put(key.getMillis(), currVal == null ? mTime : Math.max(currVal, mTime));
    }/*from ww  w .j av  a2  s  .c o m*/

    Set<Interval> bucketsToRun = Sets.newTreeSet(Comparators.intervals());
    for (Map.Entry<Long, Long> entry : inputModifiedTimes.entrySet()) {
        DateTime timeBucket = DateTimes.utc(entry.getKey());
        long mTime = entry.getValue();

        String bucketOutput = StringUtils.format("%s/%s",
                config.getSchema().getIOConfig().getSegmentOutputPath(), segmentGranularity.toPath(timeBucket));
        for (FileStatus fileStatus : FSSpideringIterator.spiderIterable(fs, new Path(bucketOutput))) {
            if (fileStatus.getModificationTime() > mTime) {
                bucketsToRun.add(new Interval(timeBucket, segmentGranularity.increment(timeBucket)));
                break;
            }
        }

        if (bucketsToRun.size() >= maxBuckets) {
            break;
        }
    }

    config.setGranularitySpec(
            new UniformGranularitySpec(segmentGranularity, config.getGranularitySpec().getQueryGranularity(),
                    config.getGranularitySpec().isRollup(), Lists.newArrayList(bucketsToRun)

            ));

    return super.addInputPaths(config, job);
}

From source file:uk.ac.ebi.atlas.utils.ExperimentInfo.java

public void setArrayDesigns(Set<String> arrayDesigns) {
    this.arrayDesigns = Sets.newTreeSet(arrayDesigns);
}

From source file:edu.harvard.med.screensaver.model.libraries.PlateRange.java

public String getAdminLibraryWarning() {
    List<String> warnings = Lists.newArrayList();
    Library library = getLibrary();/*w  ww .  j  av  a 2 s .co m*/
    if (library.getScreeningStatus() != LibraryScreeningStatus.ALLOWED) {
        // TODO: do not show warning if Screen.getLibrariesPermitted() contains this library
        warnings.add("Library status is " + library.getScreeningStatus().getValue());
    }

    Predicate<Plate> plateStatusIsOtherThanAvailable = new Predicate<Plate>() {
        @Override
        public boolean apply(Plate p) {
            return p.getStatus() != PlateStatus.AVAILABLE;
        }
    };
    Set<Plate> invalidPlates = Sets.newTreeSet(Iterables.filter(_plates, plateStatusIsOtherThanAvailable));
    if (!invalidPlates.isEmpty()) {
        Set<PlateStatus> invalidStatuses = Sets.newTreeSet(Iterables.transform(invalidPlates, Plate.ToStatus));
        warnings.add("Plate(s) have invalid status(es): " + Joiner.on(", ").join(invalidStatuses));
    }

    return Joiner.on(". ").join(warnings);
}

From source file:org.fenixedu.qubdocs.academic.documentRequests.providers.CurriculumEntriesDataProvider.java

protected Set<CurriculumEntry> getCurriculumEntries() {
    if (curriculumEntries == null) {
        RegistrationConclusionBean conclusionBean = new RegistrationConclusionBean(this.registration);

        final Set<ICurriculumEntry> curricularYearEntries = Sets
                .newHashSet(conclusionBean.getCurriculumForConclusion().getCurriculumEntries());

        curriculumEntries = Sets.newTreeSet(new Comparator<CurriculumEntry>() {

            @Override//  w w  w .j  a  va2s . co m
            public int compare(final CurriculumEntry left, final CurriculumEntry right) {
                final String leftContent = left.getName().getContent(locale) != null
                        ? left.getName().getContent(locale)
                        : left.getName().getContent();
                final String rightContent = right.getName().getContent(locale) != null
                        ? right.getName().getContent(locale)
                        : right.getName().getContent();

                return leftContent.compareTo(rightContent);
            }

        });

        curriculumEntries
                .addAll(CurriculumEntry.transform(registration, curricularYearEntries, remarksDataProvider));
    }

    return curriculumEntries;
}

From source file:org.apache.hadoop.hbase.ZKNamespaceManager.java

public NavigableSet<NamespaceDescriptor> list() throws IOException {
    NavigableSet<NamespaceDescriptor> ret = Sets
            .newTreeSet(NamespaceDescriptor.NAMESPACE_DESCRIPTOR_COMPARATOR);
    for (NamespaceDescriptor ns : cache.values()) {
        ret.add(ns);/*from   w w w.  j  ava2 s  .  c om*/
    }
    return ret;
}

From source file:de.tu_berlin.dima.oligos.stat.distribution.histogram.QuantileHistogram.java

public SortedSet<T> getLowerBounds() {
    SortedSet<T> lBounds = Sets.newTreeSet(getOperator());
    for (int i = 0; i < getNumberOfBuckets(); i++) {
        lBounds.add(getLowerBoundAt(i));
    }/*from   ww  w.  j  ava2 s .c om*/
    return lBounds;
}

From source file:de.cosmocode.palava.maven.ipcstub.GeneratorModule.java

/**
 * Generates stub files for all found IpcCommands in the classpath.
 *
 * {@inheritDoc}/*from w ww.j  a va  2 s.com*/
 */
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
    final File targetDirectory = new File(project.getBuild().getOutputDirectory(), "ipcstub");

    // check configurations and aggregate all required packages
    final Set<String> allPackages = Sets.newHashSet();
    for (Generator generator : generators) {
        generator.check();
        allPackages.addAll(generator.getPackages());
    }

    log.info("Searching for IpcCommands in:");
    for (String pkg : allPackages) {
        log.info("    " + pkg);
    }

    // search for IpcCommands in all required packages
    final Set<Class<? extends IpcCommand>> foundClasses = Sets.newTreeSet(Reflection.orderByName());
    Iterables.addAll(foundClasses, generateCommandList(allPackages));

    log.info("Found " + foundClasses.size() + " IpcCommands; generating stubs...");

    // filter classes and let the generators do their work
    for (Generator generator : generators) {
        final Set<Class<? extends IpcCommand>> filteredClasses = Sets.newLinkedHashSet();
        for (Class<? extends IpcCommand> foundClass : foundClasses) {
            for (String requiredPackage : generator.getPackages()) {
                if (foundClass.getName().startsWith(requiredPackage + ".")) {
                    filteredClasses.add(foundClass);
                    break;
                }
            }
        }

        // whats the target directory?
        final File stubTargetDirectory = new File(targetDirectory, generator.getName());

        // now call the generator
        generator.generate(log, filteredClasses, stubTargetDirectory);
    }
}

From source file:org.apache.druid.server.http.MetadataResource.java

@GET
@Path("/datasources")
@Produces(MediaType.APPLICATION_JSON)//  ww w.  j  av  a  2  s .c  o m
public Response getDatabaseDataSources(@QueryParam("full") final String full,
        @QueryParam("includeDisabled") final String includeDisabled, @Context final HttpServletRequest req) {
    final Collection<ImmutableDruidDataSource> druidDataSources = metadataSegmentManager.getInventory();
    final Set<String> dataSourceNamesPreAuth;
    if (includeDisabled != null) {
        dataSourceNamesPreAuth = Sets.newTreeSet(metadataSegmentManager.getAllDatasourceNames());
    } else {
        dataSourceNamesPreAuth = Sets
                .newTreeSet(Iterables.transform(druidDataSources, ImmutableDruidDataSource::getName));
    }

    final Set<String> dataSourceNamesPostAuth = Sets.newTreeSet();
    Function<String, Iterable<ResourceAction>> raGenerator = datasourceName -> {
        return Collections.singletonList(AuthorizationUtils.DATASOURCE_READ_RA_GENERATOR.apply(datasourceName));
    };

    Iterables.addAll(dataSourceNamesPostAuth, AuthorizationUtils.filterAuthorizedResources(req,
            dataSourceNamesPreAuth, raGenerator, authorizerMapper));

    // Cannot do both includeDisabled and full, let includeDisabled take priority
    // Always use dataSourceNamesPostAuth to determine the set of returned dataSources
    if (full != null && includeDisabled == null) {
        return Response.ok().entity(Collections2.filter(druidDataSources,
                dataSource -> dataSourceNamesPostAuth.contains(dataSource.getName()))).build();
    } else {
        return Response.ok().entity(dataSourceNamesPostAuth).build();
    }
}

From source file:org.geogit.api.RevTreeImpl.java

public static RevTreeImpl createLeafTree(ObjectId id, long size, Collection<Node> features,
        Collection<Node> trees) {
    Preconditions.checkNotNull(id);/*from  w ww.  j  a  va 2 s  .  c o m*/
    Preconditions.checkNotNull(features);

    ImmutableList<Node> featuresList = ImmutableList.of();
    ImmutableList<Node> treesList = ImmutableList.of();

    if (!features.isEmpty()) {
        TreeSet<Node> featureSet = Sets.newTreeSet(new NodeStorageOrder());
        featureSet.addAll(features);
        featuresList = ImmutableList.copyOf(featureSet);
    }
    if (!trees.isEmpty()) {
        TreeSet<Node> treeSet = Sets.newTreeSet(new NodeStorageOrder());
        treeSet.addAll(trees);
        treesList = ImmutableList.copyOf(treeSet);
    }
    return createLeafTree(id, size, featuresList, treesList);
}