Example usage for java.util SortedSet size

List of usage examples for java.util SortedSet size

Introduction

In this page you can find the example usage for java.util SortedSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:net.sourceforge.fenixedu.domain.Shift.java

public String getShiftTypesPrettyPrint() {
    StringBuilder builder = new StringBuilder();
    int index = 0;
    SortedSet<ShiftType> sortedTypes = getSortedTypes();
    for (ShiftType shiftType : sortedTypes) {
        builder.append(BundleUtil.getString(Bundle.ENUMERATION, shiftType.getName()));
        index++;/*from  w  ww  . j a  v a 2s  .  com*/
        if (index < sortedTypes.size()) {
            builder.append(", ");
        }
    }
    return builder.toString();
}

From source file:nl.b3p.viewer.config.services.WMSService.java

/**
 * Set feature types for layers in the WMSService from the given WFS according
 * to the DescribeLayer response. When errors occur these are logged but no
 * exception is thrown. Note: DescribeLayer may return multiple type names
 * for a layer, this is not supported - only the first one is used.
 * @param wfsUrl the WFS URL//from   ww w.ja v a 2s  .  c  o  m
 * @param layerDescriptions description of which feature types of the WFS are
 *   used in layers of this service according to DescribeLayer
 */
public void loadLayerFeatureTypes(String wfsUrl, List<LayerDescription> layerDescriptions) {
    Map p = new HashMap();
    p.put(WFSDataStoreFactory.URL.key, wfsUrl);
    p.put(WFSDataStoreFactory.USERNAME.key, getUsername());
    p.put(WFSDataStoreFactory.PASSWORD.key, getPassword());

    try {
        WFSFeatureSource wfsFs = new WFSFeatureSource(p);
        wfsFs.loadFeatureTypes();

        boolean used = false;
        for (LayerDescription ld : layerDescriptions) {
            Layer l = getLayer(ld.getName());
            if (l != null) {
                // Prevent warning when multiple queries for all the same type name
                // by removing duplicates, but keeping sort order to pick the first
                SortedSet<String> uniqueQueries = new TreeSet(Arrays.asList(ld.getQueries()));
                if (uniqueQueries.size() != 1) {
                    // Allowed by spec but not handled by this application
                    log.warn("Cannot handle multiple typeNames for layer " + l.getName()
                            + ", only using the first. Type names: " + Arrays.toString(ld.getQueries()));
                }
                // Queries is not empty, checked before this method is called
                SimpleFeatureType sft = wfsFs.getFeatureType(uniqueQueries.first());
                if (sft != null) {
                    // Type name may not exist in the referenced WFS
                    l.setFeatureType(sft);
                    log.debug("Feature type for layer " + l.getName() + " set to feature type "
                            + sft.getTypeName());
                    used = true;
                } else {
                    log.warn("Type name " + uniqueQueries.first() + " in WFS for described layer " + l.getName()
                            + " does not exist!");
                }
            }
        }
        if (used) {
            log.debug("Type from WFSFeatureSource with url " + wfsUrl + " used by layer of WMS");

            wfsFs.setLinkedService(this);
        } else {
            log.debug("No type from WFSFeatureSource with url " + wfsUrl + " used!");
        }
    } catch (Exception e) {
        log.error("Error loading WFS from url " + wfsUrl, e);
    }
}

From source file:org.jclouds.aws.s3.xml.S3ParserTest.java

@Test
public void testCanParseListAllMyBuckets() throws HttpException {
    SortedSet<BucketMetadata> s3Buckets = runParseListAllMyBuckets();
    BucketMetadata container1 = s3Buckets.first();
    assert container1.getName().equals("adrianjbosstest");
    DateTime expectedDate1 = new DateTime("2009-03-12T02:00:07.000Z");
    DateTime date1 = container1.getCreationDate();
    assert date1.equals(expectedDate1);
    BucketMetadata container2 = (BucketMetadata) s3Buckets.toArray()[1];
    assert container2.getName().equals("adrianjbosstest2");
    DateTime expectedDate2 = new DateTime("2009-03-12T02:00:09.000Z");
    DateTime date2 = container2.getCreationDate();
    assert date2.equals(expectedDate2);
    assert s3Buckets.size() == 2;
    CanonicalUser owner = new CanonicalUser("e1a5f66a480ca99a4fdfe8e318c3020446c9989d7004e7778029fbcc5d990fa0");
    assert container1.getOwner().equals(owner);
    assert container2.getOwner().equals(owner);
}

From source file:org.codehaus.mojo.license.AbstractAddThirdPartyMojo.java

protected boolean checkForbiddenLicenses() {
    List<String> whiteLicenses = getIncludedLicenses();
    List<String> blackLicenses = getExcludedLicenses();
    Set<String> unsafeLicenses = new HashSet<String>();
    if (CollectionUtils.isNotEmpty(blackLicenses)) {
        Set<String> licenses = getLicenseMap().keySet();
        getLog().info("Excluded licenses (blacklist): " + blackLicenses);

        for (String excludeLicense : blackLicenses) {
            if (licenses.contains(excludeLicense)) {
                //bad license found
                unsafeLicenses.add(excludeLicense);
            }/* ww  w.ja  v a 2 s.co  m*/
        }
    }

    if (CollectionUtils.isNotEmpty(whiteLicenses)) {
        Set<String> licenses = getLicenseMap().keySet();
        getLog().info("Included licenses (whitelist): " + whiteLicenses);

        for (String license : licenses) {
            if (!whiteLicenses.contains(license)) {
                //bad license found
                unsafeLicenses.add(license);
            }
        }
    }

    boolean safe = CollectionUtils.isEmpty(unsafeLicenses);

    if (!safe) {
        Log log = getLog();
        log.warn("There are " + unsafeLicenses.size() + " forbidden licenses used:");
        for (String unsafeLicense : unsafeLicenses) {

            SortedSet<MavenProject> deps = getLicenseMap().get(unsafeLicense);
            StringBuilder sb = new StringBuilder();
            sb.append("License ").append(unsafeLicense).append(" used by ").append(deps.size())
                    .append(" dependencies:");
            for (MavenProject dep : deps) {
                sb.append("\n -").append(MojoHelper.getArtifactName(dep));
            }

            log.warn(sb.toString());
        }
    }
    return safe;
}

From source file:net.sourceforge.seqware.webservice.resources.tables.WorkflowRunIDResource.java

/**
 * <p>updateWorkflowRun.</p>
 *
 * @param newWR a {@link net.sourceforge.seqware.common.model.WorkflowRun} object.
 * @return a {@link net.sourceforge.seqware.common.model.WorkflowRun} object.
 * @throws org.restlet.resource.ResourceException if any.
 * @throws java.sql.SQLException if any.
 *///from  w w w  .  j  av a 2 s. c  om
public WorkflowRun updateWorkflowRun(WorkflowRun newWR) throws ResourceException, SQLException {
    authenticate();
    WorkflowRunService wrs = BeanFactory.getWorkflowRunServiceBean();
    WorkflowRun wr = (WorkflowRun) testIfNull(wrs.findBySWAccession(newWR.getSwAccession()));
    wr.givesPermission(registration);
    //ius_workflow_runs
    if (newWR.getIus() != null) {
        SortedSet<IUS> iuses = newWR.getIus();
        if (iuses != null) {
            SortedSet<IUS> set = new TreeSet<>();
            for (IUS ius : iuses) {
                IUSService is = BeanFactory.getIUSServiceBean();
                IUS newI = is.findBySWAccession(ius.getSwAccession());
                newI.givesPermission(registration);
                set.add(newI);
            }
            wr.setIus(set);
        } else {
            Log.info("Could not be found: iuses");
        }
    }
    //lane_workflow_runs
    if (newWR.getLanes() != null) {
        SortedSet<Lane> lanes = newWR.getLanes();
        if (lanes != null) {
            SortedSet<Lane> set = new TreeSet<>();
            for (Lane lane : lanes) {
                LaneService ls = BeanFactory.getLaneServiceBean();
                Lane newL = ls.findBySWAccession(lane.getSwAccession());
                newL.givesPermission(registration);
                set.add(newL);
            }
            wr.setLanes(set);
        } else {
            Log.info("Could not be found: lanes");
        }
    }

    wr.setCommand(newWR.getCommand());
    wr.setCurrentWorkingDir(newWR.getCurrentWorkingDir());
    wr.setDax(newWR.getDax());
    wr.setHost(newWR.getHost());
    wr.setIniFile(newWR.getIniFile());
    wr.setName(newWR.getName());
    wr.setStatus(newWR.getStatus());
    wr.setStatusCmd(newWR.getStatusCmd());
    wr.setTemplate(newWR.getTemplate());
    wr.setSeqwareRevision(newWR.getSeqwareRevision());
    wr.setUserName(newWR.getUserName());
    wr.setUpdateTimestamp(new Date());
    wr.setStdErr(newWR.getStdErr());
    wr.setStdOut(newWR.getStdOut());
    wr.setWorkflowEngine(newWR.getWorkflowEngine());
    if (newWR.getInputFileAccessions() != null) {
        Log.debug("Saving " + wr.getInputFileAccessions().size() + " input files");
        wr.getInputFileAccessions().addAll(newWR.getInputFileAccessions());
    }

    if (newWR.getWorkflow() != null) {
        WorkflowService ws = BeanFactory.getWorkflowServiceBean();
        Workflow w = ws.findByID(newWR.getWorkflow().getWorkflowId());
        if (w != null) {
            wr.setWorkflow(w);
        } else {
            Log.info("Could not be found: workflow " + newWR.getWorkflow());
        }
    }
    if (newWR.getOwner() != null) {
        Registration reg = BeanFactory.getRegistrationServiceBean()
                .findByEmailAddress(newWR.getOwner().getEmailAddress());
        if (reg != null) {
            wr.setOwner(reg);
        } else {
            Log.info("Could not be found: " + newWR.getOwner());
        }
    } else if (wr.getOwner() == null) {
        wr.setOwner(registration);
    }

    if (newWR.getWorkflowRunAttributes() != null) {
        this.mergeAttributes(wr.getWorkflowRunAttributes(), newWR.getWorkflowRunAttributes(), wr);
    }
    // SEQWARE-1778 - try to properly create parameters in the workflow_run_param table as well
    //convert ini file parameters into expected format
    HashMap<String, String> map = new HashMap<>();
    if (wr.getIniFile() != null && !wr.getIniFile().isEmpty()) {
        // just skip if previous ini file params detected
        if (wr.getWorkflowRunParams().size() > 0) {
            Log.debug("Skipping since params: " + wr.getWorkflowRunParams().size());
        } else {
            String[] splitByWholeSeparator = StringUtils.splitByWholeSeparator(wr.getIniFile(), "\n");
            for (String line : splitByWholeSeparator) {
                String[] lineSplit = StringUtils.splitByWholeSeparator(line, "=");
                if (lineSplit.length == 0) {
                    continue;
                }
                map.put(lineSplit[0], lineSplit.length > 1 ? lineSplit[1] : "");
            }
            SortedSet<WorkflowRunParam> createWorkflowRunParameters = WorkflowManager
                    .createWorkflowRunParameters(map);
            // looks like the WorkflowManager code does not set workflow run
            for (WorkflowRunParam p : createWorkflowRunParameters) {
                p.setWorkflowRun(wr);
            }
            Log.debug("Setting params: " + createWorkflowRunParameters.size());
            wr.getWorkflowRunParams().addAll(createWorkflowRunParameters);
        }
    }
    wrs.update(registration, wr);

    //direct DB calls
    if (newWR.getIus() != null) {
        addNewIUSes(newWR, wr);
    }
    if (newWR.getLanes() != null) {
        addNewLanes(newWR, wr);
    }

    return wr;

}

From source file:com.spotify.heroic.filter.AndFilter.java

static Filter optimize(final SortedSet<Filter> filters) {
    final SortedSet<Filter> result = new TreeSet<>();

    for (final Filter f : filters) {
        if (f instanceof NotFilter) {
            // Optimize away expressions which are always false.
            // Example: foo = bar and !(foo = bar)

            if (filters.contains(((NotFilter) f).getFilter())) {
                return FalseFilter.get();
            }/*w  ww .  j  a v  a 2s . c o  m*/
        } else if (f instanceof StartsWithFilter) {
            // Optimize away prefixes which encompass each other.
            // Example: foo ^ hello and foo ^ helloworld -> foo ^ helloworld

            if (FilterUtils.containsPrefixedWith(filters, (StartsWithFilter) f,
                    (inner, outer) -> FilterUtils.prefixedWith(inner.getValue(), outer.getValue()))) {
                continue;
            }
        } else if (f instanceof MatchTagFilter) {
            // Optimize matchTag expressions which are always false.
            // Example: foo = bar and foo = baz

            if (FilterUtils.containsConflictingMatchTag(filters, (MatchTagFilter) f)) {
                return FalseFilter.get();
            }
        } else if (f instanceof MatchKeyFilter) {
            // Optimize matchTag expressions which are always false.
            // Example: $key = bar and $key = baz

            if (FilterUtils.containsConflictingMatchKey(filters, (MatchKeyFilter) f)) {
                return FalseFilter.get();
            }
        }

        result.add(f);
    }

    if (result.isEmpty()) {
        return FalseFilter.get();
    }

    if (result.size() == 1) {
        return result.iterator().next();
    }

    return new AndFilter(ImmutableList.copyOf(result));
}

From source file:org.deegree.tools.feature.gml.SchemaAnalyzer.java

public void printSimpleTypesSummary(String namespace) {
    XSNamedMap elementMap = schema.getComponentsByNamespace(XSConstants.TYPE_DEFINITION, namespace);
    SortedSet<String> simpleTypeNames = new TreeSet<String>();
    for (int i = 0; i < elementMap.getLength(); i++) {
        XSTypeDefinition typeDef = (XSTypeDefinition) elementMap.item(i);
        if (typeDef.getTypeCategory() == XSTypeDefinition.SIMPLE_TYPE) {
            System.out.println(toString(((XSSimpleTypeDefinition) typeDef)));
            simpleTypeNames.add(typeDef.getName());
        }//ww w . ja v a2s.c o m
    }
    System.out.println(simpleTypeNames.size() + " simple types in namespace: '" + namespace + "':\n");
    for (String typeName : simpleTypeNames) {
        System.out.println(typeName);
    }
}

From source file:relationalFramework.agentObservations.NonRedundantBackgroundKnowledge.java

/**
 * Adds background knowledge to the current knowledge set if it represents a
 * unique. non-redundant rule. If the knowledge is able to be added, it may
 * result in other knowledge being removed.
 * /* w  w  w  . j  a v  a 2  s .c o  m*/
 * @param bckKnow
 *            The knowledge to add.
 * @return True if the knowledge was added, false otherwise.
 */
public boolean addBackgroundKnowledge(BackgroundKnowledge bckKnow) {
    try {
        SortedSet<RelationalPredicate> nonPreferredFacts = new TreeSet<RelationalPredicate>(
                bckKnow.getNonPreferredFacts());
        SortedSet<RelationalPredicate> preferredFacts = new TreeSet<RelationalPredicate>(
                bckKnow.getPreferredFacts());
        String[] factStrings = formFactsKeys(preferredFacts, nonPreferredFacts);
        // If an implication rule
        if (!bckKnow.isEquivalence()) {
            for (String equivPostString : equivalencePostConds_) {
                // If any equivalent post conditions are in this implication
                // rule, return false
                if (factStrings[0].contains(equivPostString) || factStrings[0].contains(equivPostString))
                    return false;
            }

            // Rule isn't present, can add freely
            addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]);
            return true;
        } else {
            // Equivalence rule
            if (currentKnowledge_.containsKey(factStrings[0])) {
                // If the background knowledge rule is an equivalence rule,
                // it may be redundant
                SortedSet<BackgroundKnowledge> existingRules = currentKnowledge_.getSortedSet(factStrings[0]);
                // If the existing rules are only an equivalence rule, this
                // rule is redundant
                if (existingRules.size() == 1 && existingRules.first().isEquivalence()) {
                    return false;
                }
            }
            if (currentKnowledge_.containsKey(factStrings[1])) {
                // Fact already exists in another rule - it may be redundant
                SortedSet<BackgroundKnowledge> existingRules = currentKnowledge_.getSortedSet(factStrings[1]);
                if (existingRules.size() > 1 || !existingRules.first().isEquivalence()) {
                    // If the existing rules are inference rules, this rule
                    // trumps them all
                    removeRules(factStrings[1]);
                    addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]);
                    return true;
                } else {
                    // Check if this rule's preconditions are more general
                    // than the existing equivalence rule's
                    if (bckKnow.compareTo(existingRules.first()) == -1) {
                        removeRules(factStrings[1]);
                        addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]);
                        return true;
                    }
                }

                return false;
            }
            // Rule isn't present, can add freely
            addRule(bckKnow, preferredFacts, nonPreferredFacts, factStrings[1]);
            return true;
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
    return false;
}

From source file:org.deegree.tools.feature.gml.SchemaAnalyzer.java

public void printComplexTypesSummary(String namespace) {
    XSNamedMap elementMap = schema.getComponentsByNamespace(XSConstants.TYPE_DEFINITION, namespace);
    SortedSet<String> complexTypeNames = new TreeSet<String>();
    for (int i = 0; i < elementMap.getLength(); i++) {
        XSTypeDefinition typeDef = (XSTypeDefinition) elementMap.item(i);
        if (typeDef.getTypeCategory() == XSTypeDefinition.COMPLEX_TYPE) {
            System.out.println(toString(((XSComplexTypeDefinition) typeDef)));
            complexTypeNames.add(typeDef.getName());
        }//from   ww w. jav  a 2 s . co m
    }
    System.out.println(complexTypeNames.size() + " complex types in namespace: '" + namespace + "':\n");
    for (String typeName : complexTypeNames) {
        System.out.println(typeName);
    }
}

From source file:kmi.taa.core.Crawler.java

public void crawlAll(TreeMap<Integer, String> targetUrls, String service, String proxy, String otfile) {
    SortedSet<Integer> results = Collections.synchronizedSortedSet(new TreeSet<Integer>());
    ExecutorService pool = Executors.newFixedThreadPool(100);

    int howManyUrls = targetUrls.size();
    System.out.println("total " + howManyUrls + " to be processed");

    List<String> output = Collections.synchronizedList(new ArrayList<String>());
    for (Integer targetId : targetUrls.navigableKeySet()) {
        String uri = targetUrls.get(targetId);
        pool.execute(new Explorer(targetId, uri, service, proxy, results, otfile, output));
    }//from   w  ww.  j a  v  a  2  s .c  o  m
    pool.shutdown();

    while (results.size() < howManyUrls) {
        System.out.println("already processed " + results.size() + " subject links");
        try {
            Thread.sleep(1000);
        } catch (InterruptedException e) {
            log.error("crawlAll error", e);
        }

    }

    resultToFile(output, otfile);
    System.out.println("already processed " + results.size() + " subject links");

}