Example usage for java.util ArrayList subList

List of usage examples for java.util ArrayList subList

Introduction

In this page you can find the example usage for java.util ArrayList subList.

Prototype

public List<E> subList(int fromIndex, int toIndex) 

Source Link

Document

Returns a view of the portion of this list between the specified fromIndex , inclusive, and toIndex , exclusive.

Usage

From source file:org.aotorrent.tracker.TrackerConnectionHandler.java

@Override
protected void channelRead0(ChannelHandlerContext ctx, FullHttpRequest request) throws Exception {
    final Map<String, String> parameters = splitQuery(new URI(request.getUri()));

    final String infoHash = parameters.get("info_hash");

    if (infoHash == null || infoHash.length() != Torrent.INFO_HASH_LENGTH || !parameters.containsKey("port")) {
        FullHttpResponse response = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1,
                HttpResponseStatus.BAD_REQUEST);
        ctx.writeAndFlush(response);/* ww  w . j a  v a 2  s .c o m*/
        ctx.close();
        return;
    }

    final InetAddress address = ((InetSocketAddress) ctx.channel().remoteAddress()).getAddress();
    final int port = Integer.parseInt(parameters.get("port"));

    final InetSocketAddress peerAddress = new InetSocketAddress(address, port);

    final String hexHash = Hex.encodeHexString(infoHash.getBytes());
    Cache<InetSocketAddress, Integer> peersCache = torrents.get(hexHash);
    if (peersCache == null) {
        peersCache = CacheBuilder.newBuilder().maximumSize(10000)
                .expireAfterWrite(HTTPTrackerResponse.DEFAULT_INTERVAL * 2, TimeUnit.SECONDS).build();
        torrents.put(hexHash, peersCache);
    }

    final Collection<InetSocketAddress> filteredPeers;
    if (peersCache.asMap().size() > 0) {
        final ArrayList<InetSocketAddress> peers = Lists.newArrayList(peersCache.asMap().keySet());
        Collections.shuffle(peers);
        int numWant = parameters.containsKey("numwant") ? Integer.parseInt(parameters.get("numwant")) : 50;
        if (numWant < 1) {
            numWant = 50;
        }
        if (numWant >= peers.size()) {
            numWant = peers.size() - 1;
        }

        filteredPeers = peers.subList(0, numWant);
    } else {
        filteredPeers = Collections.emptyList();
    }

    HTTPTrackerResponse response = new HTTPTrackerResponse(filteredPeers);

    HttpResponse httpResponse = new DefaultFullHttpResponse(HttpVersion.HTTP_1_1, HttpResponseStatus.OK,
            response.toTransmit());

    ctx.writeAndFlush(httpResponse);
    ctx.close();

    peersCache.put(peerAddress, 0);
}

From source file:com.jzboy.couchdb.DatabaseDocReadTest.java

@Test
public void testGetFromView() throws Exception {
    List<NameValuePair> params = new ArrayList<NameValuePair>() {
        {/*from www. j  ava 2s.c  om*/
            add(new BasicNameValuePair("offset", "0"));
            add(new BasicNameValuePair("include_docs", "true"));
        }
    };
    ArrayList<String> allViewDocIds = new ArrayList<String>();
    for (Map.Entry<String, String> entry : docs.entrySet()) {
        if (entry.getValue().contains("\"show\":true")) {
            allViewDocIds.add(entry.getKey());
        }
    }
    List<String> keys = allViewDocIds.subList(0, 1);
    ArrayList<Document> results = instance.getFromView(designDocName, viewName, keys, params);
    assertEquals("getFromView did not return all the requested docs", keys.size(), results.size());
    for (Document doc : results) {
        assertTrue("getFromView returned a doc that wasn't requested", keys.contains(doc.getId()));
        String str = docs.get(doc.getId());
        JsonNode exp = JsonUtils.createParser(str).readValueAsTree();
        assertEquals("JSON contents of doc returned by getFromView didn't match the original", exp,
                doc.getJson());
    }
}

From source file:c5db.tablet.TabletService.java

private void notifyCohortsForTabletCreation(final List<Long> peers, final HTableDescriptor hTableDescriptor,
        final HRegionInfo hRegionInfo, final int maximumNumberOfCohorts) {
    int numberOfCohorts = peers.size() < 3 ? peers.size() : maximumNumberOfCohorts;
    ArrayList<Long> shuffledPeers = new ArrayList<>(peers);
    Collections.shuffle(shuffledPeers);
    List<Long> subList = shuffledPeers.subList(0, numberOfCohorts);
    for (long peer : subList) {
        ModuleSubCommand moduleSubCommand = prepareTabletModuleSubCommand(
                prepareLaunchTabletString(hTableDescriptor, hRegionInfo, subList));
        relayRequest(prepareRequest(peer, moduleSubCommand));
    }//from w w  w . j a  va2  s  .  c  o  m
}

From source file:com.datatorrent.demos.wordcount.FileWordCount.java

private void getTopNMap(final Map<String, WCPair> map, List<Map<String, Object>> result) {
    final ArrayList<WCPair> list = new ArrayList<>(map.values());

    // sort entries in descending order of frequency
    Collections.sort(list, new Comparator<WCPair>() {
        @Override/*from   ww w. jav  a 2s  . c o m*/
        public int compare(WCPair o1, WCPair o2) {
            return (int) (o2.freq - o1.freq);
        }
    });

    if (topN > 0) {
        list.subList(topN, map.size()).clear(); // retain only the first topN entries
    }

    // convert each pair (word, freq) of list to a map with 2 elements
    // {("word": <word>, "count": freq)} and append to list
    //
    result.clear();
    for (WCPair pair : list) {
        Map<String, Object> wmap = new HashMap<>(2);
        wmap.put("word", pair.word);
        wmap.put("count", pair.freq);
        result.add(wmap);
    }
    LOG.info("FileWordCount:getTopNMap: result.size = {}", result.size());
    list.clear();
}

From source file:org.apache.hadoop.hbase.regionserver.compactions.SortedCompactionPolicy.java

/**
 * @param candidates pre-filtrate// w ww .  ja v a  2  s.  c o m
 * @return filtered subset exclude all files above maxCompactSize
 *   Also save all references. We MUST compact them
 */
protected ArrayList<StoreFile> skipLargeFiles(ArrayList<StoreFile> candidates, boolean mayUseOffpeak) {
    int pos = 0;
    while (pos < candidates.size() && !candidates.get(pos).isReference()
            && (candidates.get(pos).getReader().length() > comConf.getMaxCompactSize(mayUseOffpeak))) {
        ++pos;
    }
    if (pos > 0) {
        LOG.debug("Some files are too large. Excluding " + pos + " files from compaction candidates");
        candidates.subList(0, pos).clear();
    }
    return candidates;
}

From source file:jp.go.nict.langrid.serviceexecutor.google.GoogleTranslation.java

private ArrayList<String[]> separateSourcesConsideredBatchAPILimitation(String[] sources) {
    ArrayList<String[]> result = new ArrayList<String[]>();
    ArrayList<String> sourceArray = new ArrayList<String>(Arrays.asList(sources));

    int totalLength = 0;
    int index = 0;
    int count = 0;
    for (String source : sourceArray) {
        totalLength += source.length();/*from  w  ww  .  j  a  v a  2s  . c om*/
        if (totalLength >= maxTotalQueryLength || (count - index) >= maxQueryCount) {
            result.add(sourceArray.subList(index, count).toArray(new String[] {}));
            index = count;
            totalLength = source.length();
        }
        count++;
    }

    if (index < sourceArray.size()) {
        result.add(sourceArray.subList(index, sourceArray.size()).toArray(new String[] {}));
    }

    return result;
}

From source file:org.eurekastreams.server.action.execution.GetStreamDiscoverListsDTOExecution.java

/**
 * Get the suggested streams for the current user, and populate them in the input StreamDiscoverListsDTO.
 *
 * @param inPersonId/* w w w.j a v  a2 s  . c  o  m*/
 *            the person id to fetch suggested streams for
 * @param inStreamDiscoverLists
 *            the StreamDiscoverListsDTO to add the results to
 */
private void getSuggestionsForPerson(final Long inPersonId,
        final StreamDiscoverListsDTO inStreamDiscoverLists) {
    SuggestedStreamsRequest mapperRequest = new SuggestedStreamsRequest(inPersonId, suggestionCount);
    ArrayList<StreamDTO> suggestions = new ArrayList<StreamDTO>();

    suggestions.addAll(suggestedPersonMapper.execute(mapperRequest));
    suggestions.addAll(suggestedGroupMapper.execute(mapperRequest));

    // sort the list
    Collections.sort(suggestions, new StreamDTOFollowerCountDescendingComparator());

    // return those requested
    if (suggestions.size() > suggestionCount) {
        suggestions = new ArrayList<StreamDTO>(suggestions.subList(0, suggestionCount));
    }
    inStreamDiscoverLists.setSuggestedStreams(suggestions);
}

From source file:com.kaytat.simpleprotocolplayer.MainActivity.java

private ArrayList<String> getUpdatedArrayList(SharedPreferences prefs, AutoCompleteTextView view,
        String keyJson, String keySingle) {
    // Retrieve the values from the shared preferences
    ArrayList<String> arrayList = getListFromPrefs(prefs, keyJson, keySingle);

    // Make sure the most recent IP is on top
    arrayList.remove(view.getText().toString());
    arrayList.add(0, view.getText().toString());

    if (arrayList.size() >= 4) {
        arrayList.subList(4, arrayList.size()).clear();
    }// w  w  w .  ja v  a  2 s  .  c  o  m

    return arrayList;
}

From source file:org.apache.hadoop.hbase.regionserver.compactions.RatioBasedCompactionPolicy.java

/**
 * @param candidates pre-filtrate//from w  w  w.jav  a  2 s.  c o  m
 * @return filtered subset
 * exclude all files above maxCompactSize
 * Also save all references. We MUST compact them
 */
private ArrayList<StoreFile> skipLargeFiles(ArrayList<StoreFile> candidates) {
    int pos = 0;
    while (pos < candidates.size() && !candidates.get(pos).isReference()
            && (candidates.get(pos).getReader().length() > comConf.getMaxCompactSize())) {
        ++pos;
    }
    if (pos > 0) {
        LOG.debug("Some files are too large. Excluding " + pos + " files from compaction candidates");
        candidates.subList(0, pos).clear();
    }
    return candidates;
}

From source file:org.mmadsen.sim.transmissionlab.analysis.IndividualTraitFrequencyAnalyzer.java

private List<Integer> getTopNTraits(List<TraitCount> traitCounts) {
    ArrayList<Integer> listOfTraits = new ArrayList<Integer>();
    for (TraitCount trait : traitCounts) {
        listOfTraits.add(trait.getTrait());
    }/*w w  w .  j a  v a2 s. com*/
    if (listOfTraits.size() > topNListSize) {
        return listOfTraits.subList(0, topNListSize);
    }
    // otherwise return the whole list if it's smaller than "top N"
    return listOfTraits;
}