Example usage for java.util SortedSet size

List of usage examples for java.util SortedSet size

Introduction

In this page you can find the example usage for java.util SortedSet size.

Prototype

int size();

Source Link

Document

Returns the number of elements in this set (its cardinality).

Usage

From source file:org.geoserver.security.iride.IrideRoleServiceTest.java

public void testGetRolesForBaseUser() throws IOException {
    //config.setServerURL("http://localhost:8085/iride2simApplIridepepWsfad/services/iride2simIridepep");
    IrideRoleService roleService = wrapRoleService(createRoleService(), "base");
    SortedSet<GeoServerRole> roles = roleService.getRolesForUser(BASE_SAMPLE_USER);
    assertNotNull(roles);/*w w w .  j  ava2 s .c om*/
    assertEquals(1, roles.size());
    assertEquals("BASEUSER_SIIG", roles.iterator().next().toString());
}

From source file:org.geoserver.security.iride.IrideRoleServiceTest.java

public void testGetRolesForMajorUser() throws IOException {
    IrideRoleService roleService = wrapRoleService(createRoleService(), "major");
    SortedSet<GeoServerRole> roles = roleService.getRolesForUser(MAJOR_SAMPLE_USER);
    assertNotNull(roles);//from   w  w  w .  j a  va 2s.com
    assertEquals(1, roles.size());
    assertEquals("MAJORUSER_SIIG", roles.iterator().next().toString());
}

From source file:org.eclipse.gyrex.jobs.internal.storage.CloudPreferncesJobHistoryStorage.java

private void shrinkToSizeLimit(final SortedSet<JobHistoryEntryStorable> entries) {
    // remove the last entries if over size
    while (entries.size() > MAX_HISTORY_SIZE) {
        entries.remove(entries.last());//  w  w w.  ja v  a 2  s . c o  m
    }
}

From source file:org.commoncrawl.mapred.ec2.postprocess.linkCollector.LinkGraphDataEmitterJob.java

public LinkGraphDataEmitterJob(Configuration conf) throws Exception {
    FileSystem fs = FileSystem.get(new URI("s3n://aws-publicdatasets"), conf);
    LOG.info("FileSystem is:" + fs.getUri() + " Scanning for valid segments");
    SortedSet<Long> validSegments = scanForValidSegments(fs);
    LOG.info("There are: " + validSegments.size() + " valid segments. Scanning for Merged Segments");
    SortedSet<Long> mergedSegments = scanForMergedSegments(fs);
    LOG.info("There are: " + mergedSegments.size() + " merged Segments");
    // calculate difference 
    Set<Long> segmentsToProcess = Sets.difference(validSegments, mergedSegments);
    LOG.info("There are: " + segmentsToProcess.size() + " Segments that need to be merged");
    // ok we are ready to go .. 
    int iteration = 0;
    for (long segmentId : segmentsToProcess) {
        LOG.info("Queueing Segment:" + segmentId + " for Merge");
        queue(fs, conf, segmentId);/*from www  .jav a2  s.  c o m*/
    }
    // queue shutdown items 
    for (int i = 0; i < MAX_SIMULTANEOUS_JOBS; ++i) {
        _queue.put(new QueueItem());
    }
}

From source file:org.commoncrawl.mapred.ec2.postprocess.crawldb.LinkGraphDataEmitterJob.java

public LinkGraphDataEmitterJob(Configuration conf) throws Exception {
    FileSystem fs = FileSystem.get(new URI("s3n://aws-publicdatasets"), conf);
    LOG.info("FileSystem is:" + fs.getUri() + " Scanning for valid segments");
    SortedSet<Long> validSegments = scanForValidSegments(fs);
    LOG.info("There are: " + validSegments.size() + " valid segments. Scanning for Merged Segments");
    SortedSet<Long> mergedSegments = scanForMergedSegments(fs);
    LOG.info("There are: " + mergedSegments.size() + " merged Segments");
    // calculate difference 
    Set<Long> segmentsToProcess = Sets.difference(validSegments, mergedSegments);
    LOG.info("There are: " + segmentsToProcess.size() + " Segments that need to be merged");
    // ok we are ready to go .. 
    //int iteration = 0;
    for (long segmentId : segmentsToProcess) {
        LOG.info("Queueing Segment:" + segmentId + " for Merge");
        queue(fs, conf, segmentId);/*  w  ww .j ava 2  s .c om*/
    }
    // queue shutdown items 
    for (int i = 0; i < MAX_SIMULTANEOUS_JOBS; ++i) {
        _queue.put(new QueueItem());
    }
}

From source file:org.geoserver.security.iride.IrideRoleServiceTest.java

public void testGetRolesForSuperUser() throws IOException {
    IrideRoleService roleService = wrapRoleService(createRoleService(), "super");
    SortedSet<GeoServerRole> roles = roleService.getRolesForUser(SUPER_SAMPLE_USER);
    assertNotNull(roles);/*from ww w.  j  a va  2s. co m*/
    assertEquals(1, roles.size());
    assertEquals("SUPERUSER_SIIG", roles.iterator().next().toString());
    //assertEquals(GeoServerRole.ADMIN_ROLE, roles.iterator().next());

    RoleCalculator roleCalc = new RoleCalculator(roleService);
    roles = roleCalc.calculateRoles(SUPER_SAMPLE_USER);
    assertNotNull(roles);
    assertEquals(3, roles.size());
    boolean foundAdmin = false;
    for (GeoServerRole role : roles) {
        if (role.equals(GeoServerRole.ADMIN_ROLE)) {
            foundAdmin = true;
        }
    }
    assertTrue(foundAdmin);
}

From source file:pl.edu.agh.samm.metrics.SuggestedMetricsComputationEngineImpl.java

private void addCorrelation(IMetric metricOne, IMetric metricTwo, double correlation) {
    if (!metricsWithCorrelation.containsKey(metricOne)) {
        metricsWithCorrelation.put(metricOne, new TreeSet<MetricWithCorrelation>());
    }//from   w w w. j a  v  a 2 s. c  om
    SortedSet<MetricWithCorrelation> set = metricsWithCorrelation.get(metricOne);
    set.add(new MetricWithCorrelation(metricOne, metricTwo, correlation));
    if (set.size() > SUGGESTED_METRICS_COUNT) {
        set.remove(set.first());
    }
}

From source file:org.commoncrawl.mapred.ec2.postprocess.linkCollector.LinkCollectorJob.java

public LinkCollectorJob(Configuration conf) throws Exception {
    FileSystem fs = FileSystem.get(new URI("s3n://aws-publicdatasets"), conf);
    LOG.info("FileSystem is:" + fs.getUri() + " Scanning for valid segments");
    SortedSet<Long> validSegments = scanForValidSegments(fs);
    LOG.info("There are: " + validSegments.size() + " valid segments. Scanning for Merged Segments");
    SortedSet<Long> mergedSegments = scanForMergedSegments(fs);
    LOG.info("There are: " + mergedSegments.size() + " merged Segments");
    // calculate difference 
    Set<Long> segmentsToProcess = Sets.difference(validSegments, mergedSegments);
    LOG.info("There are: " + segmentsToProcess.size() + " Segments that need to be merged");
    // ok we are ready to go .. 
    int iteration = 0;
    for (long segmentId : segmentsToProcess) {
        LOG.info("Queueing Segment:" + segmentId + " for Merge");
        queue(fs, conf, segmentId);/*ww w  .  j av a  2  s.  c o m*/
    }
    // queue shutdown items 
    for (int i = 0; i < MAX_SIMULTANEOUS_JOBS; ++i) {
        _queue.put(new QueueItem());
    }
}

From source file:com.liveramp.hank.storage.curly.AbstractCurlyPartitionUpdater.java

@Override
protected Integer detectCurrentVersionNumber() throws IOException {
    SortedSet<CueballFilePath> localCueballBases = Cueball.getBases(localPartitionRoot);
    SortedSet<CurlyFilePath> localCurlyBases = Curly.getBases(localPartitionRoot);
    if (localCueballBases.size() > 0 && localCurlyBases.size() > 0) {
        if (localCueballBases.last().getVersion() == localCurlyBases.last().getVersion()) {
            return localCurlyBases.last().getVersion();
        } else {//from   ww w.j av  a2s  .  c  o m
            return null;
        }
    } else {
        return null;
    }
}

From source file:org.stockwatcher.web.StockController.java

@RequestMapping(value = "/{symbol}", method = RequestMethod.GET)
public String displayStockDetail(@PathVariable String symbol, Model model, HttpServletRequest request) {
    model.addAttribute("stock", dao.getStockBySymbol(symbol));
    Date tradeDate = applicationProps.getLastTradeDate();
    SortedSet<Trade> trades = dao.getTradesBySymbolAndDate(symbol, tradeDate);
    model.addAttribute("trades", getUniqueTrades(trades));
    long elapsedTime = trades.size() == 0 ? 0
            : getElapsedTime(trades.first().getTimestamp(), trades.last().getTimestamp());
    User user = (User) request.getSession().getAttribute("user");
    model.addAttribute("watchLists",
            user == null ? Collections.EMPTY_SET : watchListDao.getWatchListsByUserId(user.getId()));
    model.addAttribute("lastClosePrice", dao.getLastClosePriceForSymbol(symbol));
    model.addAttribute("elapsedTime", elapsedTime);
    model.addAttribute("liveTrading", applicationProps.isTradingLive());
    model.addAttribute("watchCount", watchListDao.getWatchCount(symbol));
    dao.incrementStockViewCount(symbol);
    return "stock";
}