Example usage for com.google.common.collect Sets newTreeSet

List of usage examples for com.google.common.collect Sets newTreeSet

Introduction

In this page you can find the example usage for com.google.common.collect Sets newTreeSet.

Prototype

public static <E> TreeSet<E> newTreeSet(Comparator<? super E> comparator) 

Source Link

Document

Creates a mutable, empty TreeSet instance with the given comparator.

Usage

From source file:com.opengamma.web.analytics.formatting.VolatilitySurfaceDataFormatter.java

@SuppressWarnings("unchecked")
private <X, Y> Map<String, Object> formatExpanded(VolatilitySurfaceData<X, Y> surface) {
    // the x and y values won't necessarily be unique and won't necessarily map to a rectangular grid
    // this projects them onto a grid and inserts nulls where there's no data available
    Set<X> xVals = surface.getUniqueXValues();
    Y[] yValues = surface.getYs();/*  w w  w .ja v  a2 s  .  c om*/
    Set<Y> yVals;
    if (yValues.length > 0 && yValues[0] instanceof Pair) {
        //TODO emcleod This nastiness is here because ObjectsPair is now (2013/5/13) no longer Comparable
        Pair<Object, Object> pair = (Pair) yValues[0];
        if (pair.getFirst() instanceof Integer && pair.getSecond() instanceof FXVolQuoteType) {
            FirstThenSecondPairComparator<Integer, FXVolQuoteType> comparator = new FirstThenSecondPairComparator<>();
            Set sortedSet = new TreeSet(comparator);
            sortedSet.addAll(Arrays.asList(surface.getYs()));
            yVals = (Set<Y>) sortedSet;
        } else {
            throw new UnsupportedOperationException("Cannot handle pairs of type " + pair);
        }
    } else {
        yVals = Sets.newTreeSet((Iterable) Arrays.asList(surface.getYs()));
    }
    Map<String, Object> results = Maps.newHashMap();
    results.put(SurfaceFormatterUtils.X_LABELS, getAxisLabels(xVals));
    results.put(SurfaceFormatterUtils.Y_LABELS, getAxisLabels(yVals));
    if (isPlottable(surface)) {
        return formatForPlotting(surface, xVals, yVals, results);
    } else {
        return formatForGrid(surface, xVals, yVals, results);
    }
}

From source file:com.metamx.druid.indexer.path.GranularUnprocessedPathSpec.java

@Override
public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException {
    // This PathSpec breaks so many abstractions that we might as break some more
    Preconditions.checkState(config.getGranularitySpec() instanceof UniformGranularitySpec,
            String.format("Cannot use %s without %s", GranularUnprocessedPathSpec.class.getSimpleName(),
                    UniformGranularitySpec.class.getSimpleName()));

    final Path betaInput = new Path(getInputPath());
    final FileSystem fs = betaInput.getFileSystem(job.getConfiguration());
    final Granularity segmentGranularity = ((UniformGranularitySpec) config.getGranularitySpec())
            .getGranularity();/*from  w w w  .  jav a2  s.c o  m*/

    Map<DateTime, Long> inputModifiedTimes = new TreeMap<DateTime, Long>(
            Comparators.inverse(Comparators.<Comparable>comparable()));

    for (FileStatus status : FSSpideringIterator.spiderIterable(fs, betaInput)) {
        final DateTime key = segmentGranularity.toDate(status.getPath().toString());
        final Long currVal = inputModifiedTimes.get(key);
        final long mTime = status.getModificationTime();

        inputModifiedTimes.put(key, currVal == null ? mTime : Math.max(currVal, mTime));
    }

    Set<Interval> bucketsToRun = Sets.newTreeSet(Comparators.intervals());
    for (Map.Entry<DateTime, Long> entry : inputModifiedTimes.entrySet()) {
        DateTime timeBucket = entry.getKey();
        long mTime = entry.getValue();

        String bucketOutput = String.format("%s/%s", config.getSegmentOutputDir(),
                segmentGranularity.toPath(timeBucket));
        for (FileStatus fileStatus : FSSpideringIterator.spiderIterable(fs, new Path(bucketOutput))) {
            if (fileStatus.getModificationTime() > mTime) {
                bucketsToRun.add(new Interval(timeBucket, segmentGranularity.increment(timeBucket)));
                break;
            }
        }

        if (bucketsToRun.size() >= maxBuckets) {
            break;
        }
    }

    config.setGranularitySpec(new UniformGranularitySpec(segmentGranularity, Lists.newArrayList(bucketsToRun)));

    return super.addInputPaths(config, job);
}

From source file:co.mitro.core.server.data.DBHistoricalUserState.java

public DBHistoricalUserState(ListMySecretsAndGroupKeysResponse resp, Map<Integer, GroupInfo> orgIdToOrg,
        long timestampMs) {
    this.userId = resp.myUserId;
    this.timestampMs = timestampMs;
    // sort the users
    this.visibleUsers = Sets.newTreeSet(resp.autocompleteUsers);
    numVisibleUsersInSameDomain = 0;//from www .  j  a v  a  2 s  . co m
    String myDomain = userId.split("@")[1];
    for (String u : visibleUsers) {
        if (myDomain.equals(u.split("@")[1])) {
            ++numVisibleUsersInSameDomain;
        }
    }

    this.organizations = Lists.newArrayList();

    this.secrets = resp.secretToPath.values();
    this.groups = resp.groups.values();
    numGroups = groups.size();
    Set<Integer> myPrivateGroups = Sets.newHashSet();
    Set<Integer> seenOrgs = Sets.newHashSet();
    for (GroupInfo gi : groups) {
        if (gi.isNonOrgPrivateGroup || gi.isOrgPrivateGroup) {
            myPrivateGroups.add(gi.groupId);
        }
        if (gi.owningOrgId != null && seenOrgs.add(gi.owningOrgId)) {
            organizations.add(orgIdToOrg.get(gi.owningOrgId));
        }
        if (gi.isTopLevelOrg && seenOrgs.add(gi.groupId)) {
            organizations.add(orgIdToOrg.get(gi.groupId));
        }
    }
    numOrganizations = organizations.size();
    numSecrets = secrets.size();
    numVisibleUsers = visibleUsers.size();
    Set<Integer> sharedSecrets = new HashSet<Integer>();
    for (Secret s : secrets) {
        // the user should be excluded from this list.
        Set<String> usersExcludingMe = Sets.difference(Sets.newHashSet(s.users), ImmutableSet.of(userId));
        Set<Integer> groupsExcludingMe = Sets.difference(Sets.newHashSet(s.groups), myPrivateGroups);
        if (!(usersExcludingMe.isEmpty() && groupsExcludingMe.isEmpty())) {
            sharedSecrets.add(s.secretId);
        }
    }
    numSharedSecrets = sharedSecrets.size();
}

From source file:io.druid.indexer.path.GranularityPathSpec.java

@Override
public Job addInputPaths(HadoopDruidIndexerConfig config, Job job) throws IOException {
    final Set<Interval> intervals = Sets.newTreeSet(Comparators.intervals());
    Optional<Set<Interval>> optionalIntervals = config.getSegmentGranularIntervals();
    if (optionalIntervals.isPresent()) {
        for (Interval segmentInterval : optionalIntervals.get()) {
            for (Interval dataInterval : dataGranularity.getIterable(segmentInterval)) {
                intervals.add(dataInterval);
            }//ww  w.j a va 2  s.co m
        }
    }

    Path betaInput = new Path(inputPath);
    FileSystem fs = betaInput.getFileSystem(job.getConfiguration());
    Set<String> paths = Sets.newTreeSet();
    Pattern fileMatcher = Pattern.compile(filePattern);

    DateTimeFormatter customFormatter = null;
    if (pathFormat != null) {
        customFormatter = DateTimeFormat.forPattern(pathFormat);
    }

    for (Interval interval : intervals) {
        DateTime t = interval.getStart();
        String intervalPath = null;
        if (customFormatter != null) {
            intervalPath = customFormatter.print(t);
        } else {
            intervalPath = dataGranularity.toPath(t);
        }

        Path granularPath = new Path(betaInput, intervalPath);
        log.info("Checking path[%s]", granularPath);
        for (FileStatus status : FSSpideringIterator.spiderIterable(fs, granularPath)) {
            final Path filePath = status.getPath();
            if (fileMatcher.matcher(filePath.toString()).matches()) {
                paths.add(filePath.toString());
            }
        }
    }

    for (String path : paths) {
        log.info("Appending path[%s]", path);
        StaticPathSpec.addToMultipleInputs(config, job, path, inputFormat);
    }

    return job;
}

From source file:com.google.api.tools.framework.importers.swagger.aspects.auth.AuthBuilder.java

@Override
public void addFromSwagger(Service.Builder serviceBuilder, Swagger swagger) {
    if (swagger.getSecurityDefinitions() == null) {
        return;/*from w  w w.  jav  a  2  s  . c o m*/
    }
    TreeSet<String> swaggerSecurityDefNames = Sets.newTreeSet(swagger.getSecurityDefinitions().keySet());
    for (String swaggerSecurityDefName : swaggerSecurityDefNames) {

        addAuthProvider(serviceBuilder, swaggerSecurityDefName,
                swagger.getSecurityDefinitions().get(swaggerSecurityDefName));
    }
    addSecurityRequirementForEntireService(serviceBuilder, swagger.getSecurity());
    addSecurityRequirementExtensionForEntireService(serviceBuilder, swagger);
}

From source file:edu.washington.cs.cupid.internal.CapabilityRegistry.java

@Override
public synchronized SortedSet<ICapability> getCapabilities(final TypeToken<?> inputType,
        final TypeToken<?> outputType) {
    SortedSet<ICapability> result = Sets.newTreeSet(CapabilityUtil.COMPARE_NAME);

    for (ICapability capability : getCapabilities(inputType)) {
        for (ICapability.IOutput<?> output : capability.getOutputs()) {
            if (TypeManager.isJavaCompatible(outputType, output.getType())) {
                result.add(capability);//from w  w w. j  av a 2s  .c  om
                break;
            }
        }
    }
    return result;
}

From source file:org.fenixedu.academic.ui.renderers.providers.enrollment.bolonha.ExecutionPeriodsForEnrolmentProvider.java

static private ExecutionYear getLastExecutionYear(final StudentCurricularPlan plan) {
    final SortedSet<ExecutionYear> result = Sets.newTreeSet(ExecutionYear.COMPARATOR_BY_YEAR);

    // whatever the case, the SCP lines must be able to be accessible 
    final ExecutionYear lastScpYear = plan.getLastExecutionYear();
    if (lastScpYear != null) {
        result.add(lastScpYear);/*from   ww w  . j  av a2s . co m*/
    }

    // inspect DCP executions
    final DegreeCurricularPlan dcp = plan.getDegreeCurricularPlan();
    if (!dcp.getExecutionDegreesSet().isEmpty()) {
        result.add(dcp.getLastExecutionYear());
    }

    // inspect DCP definition
    result.addAll(getEndContextExecutionYears(dcp.getRoot()));

    return result.last();
}

From source file:com.cloudera.science.ml.parallel.summary.InternalStats.java

public void merge(InternalStats other, int maxLevels) {
    if (other.internalNumeric != null) {
        internalNumeric().merge(other.internalNumeric);
    } else {//from  www  .ja  v  a 2 s  .co  m
        Map<String, Entry> entries = histogram();
        Map<String, Entry> merged = Maps.newTreeMap();
        Set<String> keys = Sets.newTreeSet(Sets.union(entries.keySet(), other.histogram().keySet()));
        for (String key : keys) {
            Entry e = entries.get(key);
            Entry entry = other.histogram().get(key);
            Entry newEntry = new Entry();
            if (e != null) {
                newEntry.inc(e.getCount());
            }
            if (entry != null) {
                newEntry.inc(entry.getCount());
            }
            merged.put(key, newEntry);
            if (merged.size() == maxLevels) {
                this.trimmed = true;
                break;
            }
        }
        entries.clear();
        entries.putAll(merged);
        if (other.trimmed) {
            this.trimmed = true;
        }
    }
}

From source file:com.shopzilla.hadoop.repl.HadoopREPL.java

protected Map<Call, Command> buildCommandMappings() {
    final Map<Call, Command> commands = ImmutableMap.<Call, Command>builder()
            .putAll(new SessionCommandProvider().apply(sessionState))
            .putAll(new FSShellCommandProvider().apply(sessionState)).build();
    return ImmutableMap.<Call, Command>builder().putAll(commands)
            .put(call("help", new DeferredStringsCompleter<Map<Call, Command>>(commandMappings,
                    new Function<Map<Call, Command>, TreeSet<String>>() {
                        @Override
                        public TreeSet<String> apply(final Map<Call, Command> calls) {
                            return Sets.newTreeSet(Maps.transformEntries(commandMappings,
                                    new Maps.EntryTransformer<Call, Command, String>() {
                                        @Override
                                        public String transformEntry(final Call key, final Command value) {
                                            return key.commandName();
                                        }
                                    }).values());
                        }//w  ww .j  a  v a2  s .c  o  m
                    })), new Command() {
                        @Override
                        public void execute(final CommandInvocation call, final SessionState sessionState)
                                throws REPL.ExitSignal {
                            if (call.args().length != 1) {
                                sessionState.output("Usage: help [command]");
                            } else {
                                final String command = call.args()[0];
                                if (commandMappings.containsKey(call(command))) {
                                    final Usage usage = commandMappings.get(call(command)).usage(sessionState);
                                    sessionState.output("Displaying help for \"%s\":\n", command);
                                    sessionState.outputUsage(usage);
                                } else {
                                    sessionState.error("Unknown command \"%s\"", command);
                                }
                            }
                        }

                        @Override
                        public Usage usage(final SessionState sessionState) {
                            return new Usage("help", "Displays help / usage information for the given command ",
                                    "<command>");
                        }
                    })
            .build();
}

From source file:org.gradoop.flink.algorithms.fsm.transactional.tle.functions.JoinEmbeddings.java

@Override
public void join(SE embeddings, G graph, Collector<SE> out) throws Exception {

    Set<TreeSet<Integer>> edgeSets = Sets.newHashSet();
    Map<String, List<Embedding>> subgraphEmbeddings = Maps.newHashMap();

    for (Embedding parent : embeddings.getEmbeddings()) {
        for (Map.Entry<Integer, FSMEdge> entry : graph.getEdges().entrySet()) {

            int edgeId = entry.getKey();

            if (!parent.getEdges().containsKey(edgeId)) {
                FSMEdge edge = entry.getValue();

                int sourceId = edge.getSourceId();
                boolean containsSourceId = parent.getVertices().containsKey(sourceId);

                int targetId = edge.getTargetId();
                boolean containsTargetId = parent.getVertices().containsKey(targetId);

                if (containsSourceId || containsTargetId) {
                    TreeSet<Integer> edgeSet = Sets.newTreeSet(parent.getEdgeIds());
                    edgeSet.add(edgeId);

                    if (!edgeSets.contains(edgeSet)) {
                        edgeSets.add(edgeSet);

                        Embedding child = parent.deepCopy();
                        child.getEdges().put(edgeId, edge);

                        if (!containsSourceId) {
                            child.getVertices().put(sourceId, graph.getVertices().get(sourceId));
                        }/*from  w w w.  jav a2  s .  co  m*/
                        if (!containsTargetId) {
                            child.getVertices().put(targetId, graph.getVertices().get(targetId));
                        }

                        String canonicalLabel = canonicalLabeler.label(child);

                        List<Embedding> siblings = subgraphEmbeddings.get(canonicalLabel);

                        if (siblings == null) {
                            siblings = Lists.newArrayList(child);
                            subgraphEmbeddings.put(canonicalLabel, siblings);
                        } else {
                            siblings.add(child);
                        }
                    }
                }
            }
        }
    }

    collect(embeddings, out, subgraphEmbeddings);
}