Example usage for java.util Set size

List of usage examples for java.util Set size

Introduction

In this page you can find the example usage for java.util Set size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:com.facebook.presto.execution.resourceGroups.TestResourceGroups.java

private static Set<MockQueryExecution> fillGroupTo(InternalResourceGroup group,
        Set<MockQueryExecution> existingQueries, int count, boolean queryPriority) {
    int existingCount = existingQueries.size();
    Set<MockQueryExecution> queries = new HashSet<>(existingQueries);
    for (int i = 0; i < count - existingCount; i++) {
        MockQueryExecution query = new MockQueryExecution(0,
                group.getId().toString().replace(".", "") + Integer.toString(i), queryPriority ? i + 1 : 1);
        queries.add(query);//from  ww w  . ja va2  s  .  c o m
        group.run(query);
    }
    return queries;
}

From source file:com.github.abel533.mapperhelper.EntityHelper.java

/**
 * ?/*from www.  ja  v  a  2s .c  om*/
 *
 * @param entityClass
 * @return
 */
public static Map<String, String> getColumnAlias(Class<?> entityClass) {
    EntityTable entityTable = getEntityTable(entityClass);
    if (entityTable.aliasMap != null) {
        return entityTable.aliasMap;
    }
    Set<EntityColumn> columnList = entityTable.getEntityClassColumns();
    entityTable.aliasMap = new HashMap<String, String>(columnList.size());
    for (EntityColumn column : columnList) {
        entityTable.aliasMap.put(column.getColumn(), column.getProperty());
    }
    return entityTable.aliasMap;
}

From source file:com.apporiented.hermesftp.PluginManager.java

private static File[] collectJars(String[] paths) {
    Set<File> jarList = new HashSet<File>();
    for (String path : paths) {
        File dir = new File(path);
        if (log.isWarnEnabled() && !dir.exists()) {
            log.warn("JAR folder not found: " + dir);
        }//from  w w w.j  av a 2  s.c om
        if (dir.exists() && dir.isDirectory()) {
            File[] files = dir.listFiles(new JarFileFilter());
            Collections.addAll(jarList, files);
        }
    }
    return jarList.toArray(new File[jarList.size()]);
}

From source file:com.diversityarrays.kdxplore.trials.PlotCellRenderer.java

static public void addAnnotations(Plot plot, Set<String> tagLabels, StringBuilder sb) {

    int nAttachments = plot.getMediaFileCount();
    int nSpecimens = plot.getSpecimenCount(false);
    int nLabels = tagLabels == null ? 0 : tagLabels.size();

    if (nAttachments > 0 || nSpecimens > 0 || nLabels > 0) {

        Number[] values = UnicodeChars.Number.values();
        int maxValues = values.length - 1;

        if (nAttachments > 0) {
            if (nAttachments < maxValues) {
                sb.append(' ').append(values[nAttachments].parenthesis);
            } else {
                sb.append(' ').append(Number.N_nn.parenthesis);
            }//from w w w  .j  a  va 2s .c  om
        }

        if (nSpecimens > 0) {
            if (nSpecimens < maxValues) {
                sb.append(' ').append(values[nSpecimens].positive);
            } else {
                sb.append(' ').append(Number.N_nn.positive);
            }
        }

        if (nLabels > 0) {
            if (nLabels < maxValues) {
                sb.append(' ').append(values[nLabels].negative);
            } else {
                sb.append(' ').append(Number.N_nn.negative);
            }
        }
    }
}

From source file:mailbox.EmailHandler.java

/**
 * Finds resources by the given message id.
 *
 * If there are resources created via an email which matches the message
 * id, returns the resources, else finds and returns resources which matches
 * the resource path taken from the id-left part of the message id.
 *
 * The format of message-id is defined by RFC 5322 as follows:
 *
 *     msg-id  =   [CFWS] "<" id-left "@" id-right ">" [CFWS]
 *
 * @param  messageId/*from  w w  w.  ja  v a2  s. c  o m*/
 * @return the set of resources
 */
@Nonnull
public static Set<Resource> findResourcesByMessageId(String messageId) {
    Set<Resource> resources = new HashSet<>();
    Set<OriginalEmail> originalEmails = OriginalEmail.finder.where().eq("messageId", messageId).findSet();

    if (originalEmails.size() > 0) {
        for (OriginalEmail originalEmail : originalEmails) {
            resources.add(Resource.get(originalEmail.resourceType, originalEmail.resourceId));
        }
        return resources;
    }

    try {
        String resourcePath = IMAPMessageUtil.getIdLeftFromMessageId(messageId);
        Resource resource = Resource.findByPath(resourcePath);
        if (resource != null) {
            resources.add(resource);
        }
    } catch (Exception e) {
        Logger.info("Error while finding a resource by message-id '" + messageId + "'", e);
    }

    return resources;
}

From source file:msi.gama.util.GAML.java

public static String getDocumentationOn2(final String query) {
    final String keyword = StringUtils.removeEnd(StringUtils.removeStart(query.trim(), "#"), ":");
    final THashMap<String, String> results = new THashMap<>();
    // Statements
    final SymbolProto p = DescriptionFactory.getStatementProto(keyword);
    if (p != null) {
        results.put("Statement", p.getDocumentation());
    }//from  ww w.  j a  v a 2  s .c o  m
    DescriptionFactory.visitStatementProtos((name, proto) -> {
        if (proto.getFacet(keyword) != null) {
            results.put("Facet of statement " + name, proto.getFacet(keyword).getDocumentation());
        }
    });
    final Set<String> types = new HashSet<>();
    final String[] facetDoc = { "" };
    DescriptionFactory.visitVarProtos((name, proto) -> {
        if (proto.getFacet(keyword) != null && types.size() < 4) {
            if (!Types.get(name).isAgentType() || name.equals(IKeyword.AGENT)) {
                types.add(name);
            }
            facetDoc[0] = proto.getFacet(keyword).getDocumentation();
        }
    });
    if (!types.isEmpty()) {
        results.put("Facet of attribute declarations with types " + types + (types.size() == 4 ? " ..." : ""),
                facetDoc[0]);
    }
    // Operators
    final THashMap<Signature, OperatorProto> ops = IExpressionCompiler.OPERATORS.get(keyword);
    if (ops != null) {
        ops.forEachEntry((sig, proto) -> {
            results.put("Operator on " + sig.toString(), proto.getDocumentation());
            return true;
        });
    }
    // Built-in skills
    final SkillDescription sd = GamaSkillRegistry.INSTANCE.get(keyword);
    if (sd != null) {
        results.put("Skill", sd.getDocumentation());
    }
    GamaSkillRegistry.INSTANCE.visitSkills(desc -> {
        final SkillDescription sd1 = (SkillDescription) desc;
        final VariableDescription var = sd1.getAttribute(keyword);
        if (var != null) {
            results.put("Attribute of skill " + desc.getName(), var.getDocumentation());
        }
        final ActionDescription action = sd1.getAction(keyword);
        if (action != null) {
            results.put("Primitive of skill " + desc.getName(),
                    action.getDocumentation().isEmpty() ? "" : ":" + action.getDocumentation());
        }
        return true;
    });
    // Types
    final IType<?> t = Types.builtInTypes.containsType(keyword) ? Types.get(keyword) : null;
    if (t != null) {
        String tt = t.getDocumentation();
        if (tt == null) {
            tt = "type " + keyword;
        }
        results.put("Type", tt);
    }
    // Built-in species
    for (final TypeDescription td : Types.getBuiltInSpecies()) {
        if (td.getName().equals(keyword)) {
            results.put("Built-in species", ((SpeciesDescription) td).getDocumentationWithoutMeta());
        }
        final IDescription var = td.getOwnAttribute(keyword);
        if (var != null) {
            results.put("Attribute of built-in species " + td.getName(), var.getDocumentation());
        }
        final ActionDescription action = td.getOwnAction(keyword);
        if (action != null) {
            results.put("Primitive of built-in species " + td.getName(),
                    action.getDocumentation().isEmpty() ? "" : ":" + action.getDocumentation());
        }
    }
    // Constants
    final UnitConstantExpression exp = IUnits.UNITS_EXPR.get(keyword);
    if (exp != null) {
        results.put("Constant", exp.getDocumentation());
    }
    if (results.isEmpty()) {
        return "No result found";
    }
    final StringBuilder sb = new StringBuilder();
    final int max = results.keySet().stream().mapToInt(each -> each.length()).max().getAsInt();
    final String separator = StringUtils.repeat("", max + 6).concat(Strings.LN);
    results.forEachEntry((sig, doc) -> {
        sb.append("").append(separator).append("|| ");
        sb.append(StringUtils.rightPad(sig, max));
        sb.append(" ||").append(Strings.LN).append(separator);
        sb.append(toText(doc)).append(Strings.LN);
        return true;
    });

    return sb.toString();

    //
}

From source file:com.vmware.photon.controller.common.dcp.ServiceHostUtils.java

public static <H extends ServiceHost> void deleteAllDocuments(H host, String referrer, long timeout,
        TimeUnit timeUnit) throws Throwable {
    QueryTask.Query selfLinkClause = new QueryTask.Query()
            .setTermPropertyName(ServiceDocument.FIELD_NAME_SELF_LINK).setTermMatchValue("*")
            .setTermMatchType(QueryTask.QueryTerm.MatchType.WILDCARD);

    QueryTask.QuerySpecification querySpecification = new QueryTask.QuerySpecification();
    querySpecification.query.addBooleanClause(selfLinkClause);
    QueryTask queryTask = QueryTask.create(querySpecification).setDirect(true);

    NodeGroupBroadcastResponse queryResponse = ServiceHostUtils.sendBroadcastQueryAndWait(host, referrer,
            queryTask);//from   w  ww .j  av  a2s  .  c  o  m

    Set<String> documentLinks = QueryTaskUtils.getBroadcastQueryResults(queryResponse);

    if (documentLinks == null || documentLinks.size() <= 0) {
        return;
    }

    CountDownLatch latch = new CountDownLatch(1);

    OperationJoin.JoinedCompletionHandler handler = new OperationJoin.JoinedCompletionHandler() {
        @Override
        public void handle(Map<Long, Operation> ops, Map<Long, Throwable> failures) {
            if (failures != null && !failures.isEmpty()) {
                for (Throwable e : failures.values()) {
                    logger.error("deleteAllDocuments failed", e);
                }
            }
            latch.countDown();
        }
    };

    Collection<Operation> deletes = new LinkedList<>();
    for (String documentLink : documentLinks) {
        Operation deleteOperation = Operation.createDelete(UriUtils.buildUri(host, documentLink)).setBody("{}")
                .setReferer(UriUtils.buildUri(host, referrer));
        deletes.add(deleteOperation);
    }

    OperationJoin join = OperationJoin.create(deletes);
    join.setCompletion(handler);
    join.sendWith(host);
    if (!latch.await(timeout, timeUnit)) {
        throw new TimeoutException(String
                .format("Deletion of all documents timed out. Timeout:{%s}, TimeUnit:{%s}", timeout, timeUnit));
    }
}

From source file:it.infn.ct.downtime.Downtime.java

public static String[] getIntersection(String[] arr1, String[] arr2) {
    Set<String> s1 = new HashSet<String>(Arrays.asList(arr1));
    Set<String> s2 = new HashSet<String>(Arrays.asList(arr2));
    s1.retainAll(s2);/*ww  w . j av a 2s  .  c  o m*/

    String[] result = s1.toArray(new String[s1.size()]);

    return result;
}

From source file:com.streamsets.pipeline.lib.jdbc.multithread.TableRuntimeContext.java

public static TableRuntimeContext createNextPartition(final TableRuntimeContext lastPartition) {
    if (!lastPartition.isPartitioned()) {
        throw new IllegalStateException("lastPartition TableRuntimeContext was not partitioned");
    }/*from   ww w. j  a  v a  2s .co  m*/

    final Set<String> offsetColumns = lastPartition.getSourceTableContext().getOffsetColumns();
    final Map<String, String> startingPartitionOffsets = lastPartition.getStartingPartitionOffsets();

    if (startingPartitionOffsets.size() < offsetColumns.size()) {
        // we have not yet captured an offset for every offset columns
        if (LOG.isTraceEnabled()) {
            LOG.trace("Cannot create next partition after {} since we are missing values for offset columns {}",
                    lastPartition.getPartitionSequence(),
                    Sets.difference(offsetColumns, startingPartitionOffsets.keySet()));
        }
        return null;
    }

    final Map<String, String> nextStartingOffsets = new HashMap<>();
    final Map<String, String> nextMaxOffsets = new HashMap<>();

    final int newPartitionSequence = lastPartition.partitionSequence > 0 ? lastPartition.partitionSequence + 1
            : 1;

    lastPartition.startingPartitionOffsets.forEach((col, off) -> {
        String basedOnStartOffset = lastPartition.generateNextPartitionOffset(col, off);
        nextStartingOffsets.put(col, basedOnStartOffset);
    });

    nextStartingOffsets.forEach(
            (col, off) -> nextMaxOffsets.put(col, lastPartition.generateNextPartitionOffset(col, off)));

    final TableRuntimeContext nextPartition = new TableRuntimeContext(lastPartition.sourceTableContext,
            lastPartition.usingNonIncrementalLoad, lastPartition.partitioned, newPartitionSequence,
            nextStartingOffsets, nextMaxOffsets);

    return nextPartition;
}

From source file:edu.stanford.muse.groups.SimilarGroupMethods.java

/** This is the alternative group algorithm, described in the IUI-2011 paper */
public static <T extends Comparable<? super T>> GroupHierarchy<T> findContactGroupsIUI(List<Group<T>> input,
        int MINCOUNT, int MIN_GROUP_SIZE, float MAX_SUBSUMPTION_ERROR, float MIN_MERGE_GROUP_SIM,
        String utilityType, float UTILITY_MULTIPLIER, GroupAlgorithmStats<T> stats) {
    log.info(//from w  ww  .  j  a v a2s . c  o  m
            "-----------------------------------------------   GROUPER  -----------------------------------------------\n");

    long startTimeMillis = System.currentTimeMillis();
    // copy over the alg. parameters so everything is in one place
    stats.MIN_GROUP_SIZE = MIN_GROUP_SIZE;
    stats.MIN_FREQ = MINCOUNT;
    stats.MAX_SUBSUMPTION_ERROR = MAX_SUBSUMPTION_ERROR;
    stats.MIN_MERGE_GROUP_SIM = MIN_MERGE_GROUP_SIM;
    int MAX_EDGES = 1000;
    Set<T> hypers = Grouper.findHyperConnectedElementsRaw(input, MAX_EDGES);

    for (Group<T> g : input) {
        for (Iterator<T> it = g.elements.iterator(); it.hasNext();) {
            T t = it.next();
            if (hypers.contains(t))
                it.remove();
            if (g.elements.size() == 0)
                continue;
        }
    }

    List<SimilarGroup<T>> exactGroups = Grouper.convertToSimilarGroups(input);

    //int nUniqueGroups = exactGroups.size();
    stats.startingGroups = new GroupStats<T>(exactGroups);
    //   dumpGroupsForDebug("Starting Groups", exactGroups);

    Set<SimilarGroup<T>> candidates = selectGroupsWithMinSize(exactGroups, MIN_GROUP_SIZE);
    stats.groupsWithMinSize = new GroupStats<T>(candidates);
    //   dumpGroupsForDebug("Groups with min size " + MIN_GROUP_SIZE, candidates);

    log.warn("Intersections are disabled because taking too long for Ken Lay!!");
    //candidates // returns (log, result)
    //= SimilarGroupMethods.intersectGroups(candidates, MIN_GROUP_SIZE, stats);
    stats.groupsAfterIntersections = new GroupStats<T>(candidates);
    //   dumpGroupsForDebug("Groups after intersections ", candidates);

    // verify
    for (SimilarGroup<T> sg : candidates) {
        Util.softAssert(candidates.contains(sg));
        Util.softAssert(sg.size() >= MIN_GROUP_SIZE);
    }

    // now filter based on min freq
    computeGroupFrequencies(input, candidates);
    candidates = SimilarGroupMethods.selectGroupsWithMinFreq(candidates, MINCOUNT);
    stats.groupsWithMinFreqAndMinSize = new GroupStats<T>(candidates);
    //   dumpGroupsForDebug("Groups with min. freq. " + MINCOUNT, candidates);

    // compute utilities
    // Map<T, Integer> indivFreqs =
    // SimilarGroupMethods.computeIndivFreqs(input);
    for (SimilarGroup<T> sg : candidates) {
        if ("linear".equals(utilityType))
            sg.computeLinearUtility();
        else if ("square".equals(utilityType))
            sg.computeSquareUtility();
        else
            sg.computeExpUtility(UTILITY_MULTIPLIER);

        // sg.computeZScore(indivFreqs, input.size());
    }

    // convert candidates from set to list now, because we need sorting etc

    List<SimilarGroup<T>> candidateList = new ArrayList<SimilarGroup<T>>();
    candidateList.addAll(candidates);

    // remove subsumed groups
    List<SimilarGroup<T>> selectedGroups = SimilarGroupMethods.selectGroupsNotSubsumed(candidateList,
            MAX_SUBSUMPTION_ERROR);
    stats.groupsAfterSubsumption = new GroupStats<T>(selectedGroups);
    //   dumpGroupsForDebug("Groups after subsumption with error "
    //                      + MAX_SUBSUMPTION_ERROR, selectedGroups);

    // now compute hierarchy, just to identify the root groups
    GroupHierarchy<T> hierarchy = new GroupHierarchy<T>(selectedGroups);
    // Map<SimilarGroup<T>, List<SimilarGroup<T>>> parentToChildGroupMap
    // = hierarchy.parentToChildrenMap;
    Set<SimilarGroup<T>> rootGroups = hierarchy.rootGroups;
    log.info("hierarchy: #root groups = " + rootGroups.size());

    // add supergroups. supergroups never subsume other subgroups
    Set<SimilarSuperGroup<T>> manufacturedGroups = SimilarGroupMethods.manufactureSuperGroups(input, rootGroups,
            MAX_SUBSUMPTION_ERROR, MIN_MERGE_GROUP_SIM);
    stats.manufacturedGroups = new GroupStats(manufacturedGroups);
    selectedGroups.addAll(manufacturedGroups);
    stats.finalGroups = new GroupStats<T>(selectedGroups);
    //   dumpGroupsForDebug("Final groups " + MAX_SUBSUMPTION_ERROR, selectedGroups);

    // recompute hierarchy. its easier than
    // trying to update the existing hierarchy
    hierarchy = new GroupHierarchy<T>(selectedGroups);
    rootGroups = hierarchy.rootGroups;
    stats.finalRootGroups = new GroupStats(rootGroups);
    long endTimeMillis = System.currentTimeMillis();
    stats.executionTimeMillis = endTimeMillis - startTimeMillis;
    return hierarchy;
}