Example usage for java.util List subList

List of usage examples for java.util List subList

Introduction

In this page you can find the example usage for java.util List subList.

Prototype

List<E> subList(int fromIndex, int toIndex);

Source Link

Document

Returns a view of the portion of this list between the specified fromIndex , inclusive, and toIndex , exclusive.

Usage

From source file:com.bluexml.side.framework.alfresco.webscriptExtension.CMISWebScriptExtension.java

public List<TemplateNode> getChildren(NodeRef node, String types, Page page) {
    List<TemplateNode> ret = getChildren(node, types);
    int startAt = page.getNumber();
    int max = page.getSize();
    if (max > 0) {
        return ret.subList(startAt, startAt + max);
    } else {//  w  w w.  j  av  a  2s.com
        return ret;
    }

}

From source file:mulavito.algorithms.shortestpath.ksp.Yen.java

/**
 * Blocks all incident edges of the vertices in head as well as the edge
 * connecting head to the next node by creating a new filtered graph.
 * /*from  w w  w .j  a v  a2 s  .  c  o m*/
 * @param head
 *            The current head, from source to deviation node
 * @param deviation
 *            The edge to the next node
 * @param foundPaths
 *            The solutions already found and to check against
 * @return The filtered graph without the blocked edges.
 */
private Graph<V, E> blockFilter(List<E> head, V deviation, List<V> curShortestPathNodes,
        List<List<E>> foundPaths) {
    final Set<E> blocked = new HashSet<E>();

    // Block incident edges to make all vertices in head unreachable.
    for (V v : curShortestPathNodes) {
        if (v.equals(deviation))
            break;
        for (E e2 : graph.getIncidentEdges(v))
            blocked.add(e2);
    }
    /*for (E e : head)
       for (E e2 : graph.getIncidentEdges(graph.getEndpoints(e).getFirst()))
    blocked.add(e2);*/

    // Block all outgoing edges that have been used at deviation vertex
    for (List<E> path : foundPaths)
        if (path.size() > head.size() && ListUtils.isEqualList(path.subList(0, head.size()), head))
            for (E e : path)

                if (graph.getEndpoints(e).contains(deviation)) {
                    blocked.add(e);
                    //break; // Continue with next path.
                }

    EdgePredicateFilter<V, E> filter = new EdgePredicateFilter<V, E>(new Predicate<E>() {
        @Override
        public boolean evaluate(E e) {
            return !blocked.contains(e);
        }
    });

    return filter.transform(graph);
}

From source file:com.alibaba.otter.node.etl.load.loader.db.interceptor.log.LogLoadInterceptor.java

/**
 * ?//from  ww  w  .  j a v a2  s  . c  o m
 */
private void logEventDatas(List<EventData> eventDatas) {
    int size = eventDatas.size();
    // ??
    int index = 0;
    do {
        if (index + batchSize >= size) {
            logger.info(DbLoadDumper.dumpEventDatas(eventDatas.subList(index, size)));
        } else {
            logger.info(DbLoadDumper.dumpEventDatas(eventDatas.subList(index, index + batchSize)));
        }
        index += batchSize;
    } while (index < size);
}

From source file:com.fujitsu.dc.core.odata.DcExpressionParser.java

private static List<Token> trimWhitespace(List<Token> tokens) {
    int start = 0;
    while (tokens.get(start).type == TokenType.WHITESPACE) {
        start++;//  w ww. j a  v  a2  s .  co  m
    }
    int end = tokens.size() - 1;
    while (tokens.get(end).type == TokenType.WHITESPACE) {
        end--;
    }
    return tokens.subList(start, end + 1);

}

From source file:com.nebhale.gpxconverter.GoogleMapsAugmenter.java

@Override
public List<Point> augment(List<Point> points) {
    List<Future<List<Point>>> futures = new ArrayList<>();

    for (int i = 0, delay = 0; i < points.size(); i += CHUNK_SIZE, delay++) {
        int max = CHUNK_SIZE + i;
        List<Point> slice = points.subList(i, max < points.size() ? max : points.size());

        futures.add(this.scheduledExecutorService.schedule(
                new PointAugmenter(delay, this.encoder, this.restOperations, slice), delay * 10 * CHUNK_SIZE,
                TimeUnit.MILLISECONDS));
    }//from  w ww .ja  va 2  s. com

    List<Point> augmented = new ArrayList<>(points.size());
    for (Future<List<Point>> future : futures) {
        try {
            augmented.addAll(future.get());
        } catch (ExecutionException | InterruptedException e) {
            throw new RuntimeException(e);
        }
    }
    return augmented;
}

From source file:org.pivotal.sqlfire.CommonTransactionalBatchInsertsTest.java

@Test
public void testBatchInserts() {
    assertEquals(0, getUserService().getNumberOfUsers());

    List<User> userList = Arrays.asList(USERS);

    assertNotNull(userList);/*from w  w w  .j  a va  2 s .  co m*/
    assertEquals(USERS.length, userList.size());
    assertTrue(userList.containsAll(Arrays.asList(USERS)));

    List<User> userBatchOne = userList.subList(0, userList.size() / 2);
    List<User> userBatchTwo = userList.subList(userList.size() / 2, userList.size());

    assertNotNull(userBatchOne);
    assertNotNull(userBatchTwo);
    assertEquals(userList.size(), userBatchOne.size() + userBatchTwo.size());
    assertTrue(userBatchOne.contains(createUser("jondoe")));
    assertFalse(userBatchOne.contains(createUser("imapigg")));
    assertTrue(userBatchTwo.contains(createUser("jondoe")));
    assertFalse(userBatchTwo.contains(createUser("piedoe")));

    getUserService().addAll(userBatchOne);

    assertEquals(userBatchOne.size(), getUserService().getNumberOfUsers());

    try {
        getUserService().addAll(userBatchTwo);
    }
    // TODO refactor!
    // NOTE the following assertions are very fragile and naive but used temporarily only for testing
    // and experimentation purposes
    catch (ConstraintViolationException expected) {
        assertTrue(expected.getCause() instanceof SQLException);
        System.err.printf("%1$s%n", expected.getCause().getMessage());
    } catch (DataIntegrityViolationException expected) {
        System.err.printf("%1$s%n", expected);
    } catch (DataAccessException expected) {
        assertTrue(expected.getCause() instanceof PersistenceException);
        assertTrue(expected.getCause().getCause() instanceof SQLIntegrityConstraintViolationException
                || expected.getCause() instanceof SQLException);
        System.err.printf("%1$s%n", expected.getCause().getMessage());
    } catch (PersistenceException expected) {
        assertTrue(expected.getCause() instanceof ConstraintViolationException);
        assertTrue(expected.getCause().getCause() instanceof SQLException);
        System.err.printf("%1$s%n", expected.getCause().getCause().getMessage());
        //assertTrue(expected.getCause().getCause().getMessage().contains(
        //  "duplicate value(s) for column(s) USERNAME in statement"));
    }

    assertEquals(userBatchOne.size(), getUserService().getNumberOfUsers());

    List<User> users = getUserService().list();

    System.out.printf("%1$s%n", users);
    System.out.printf("%1$s%n", toString(users.get(0)));
}

From source file:com.hpe.application.automation.tools.octane.executor.UftTestDiscoveryDispatcher.java

private static boolean postTests(MqmRestClient client, List<AutomatedTest> tests, String workspaceId,
        String scmRepositoryId) {

    if (!tests.isEmpty()) {
        try {/*ww  w.j  a v a 2s .c o  m*/
            completeTestProperties(client, Long.parseLong(workspaceId), tests, scmRepositoryId);
        } catch (RequestErrorException e) {
            logger.error("Failed to completeTestProperties : " + e.getMessage());
            return false;
        }

        for (int i = 0; i < tests.size(); i += POST_BULK_SIZE) {
            try {
                AutomatedTests data = AutomatedTests
                        .createWithTests(tests.subList(i, Math.min(i + POST_BULK_SIZE, tests.size())));
                String uftTestJson = convertToJsonString(data);
                client.postEntities(Long.parseLong(workspaceId), OctaneConstants.Tests.COLLECTION_NAME,
                        uftTestJson);
            } catch (RequestErrorException e) {
                return checkIfExceptionCanBeIgnoredInPOST(e, "Failed to post tests");
            }
        }
    }
    return true;
}

From source file:com.xpn.xwiki.plugin.chronopolys.Utils.java

public List intelliSubList(int limit, int start, List list) {
    int min = start;
    if (start < 0 || start > list.size()) {
        min = 0;// ww  w.jav a 2  s  . co  m
    }
    int max = start + limit;
    if (max > list.size()) {
        max = list.size();
    }
    return list.subList(min, max);
}

From source file:de.micromata.genome.gwiki.page.impl.wiki.parser.GWikiWikiParser.java

public static List<GWikiFragment> addWrappedP(List<GWikiFragment> l) {
    List<GWikiFragment> ret = new ArrayList<GWikiFragment>();
    int ls = 0;// w  w  w.  j  a va 2s . c o  m
    for (int i = 0; i < l.size(); ++i) {
        GWikiFragment f = l.get(i);
        if (isParagraphLike(f) == true) {
            if (i > ls) {
                List<GWikiFragment> lp = l.subList(ls, i);
                lp = trimBrs(lp);
                // lp = trimEndP(lp);
                ret.add(new GWikiFragmentP(new ArrayList<>(lp)));
            }
            if ((f instanceof GWikiFragmentP) == false) {
                ret.add(f);
            }
            ls = i + 1;
        }
    }
    if (ls < l.size()) {
        List<GWikiFragment> lp = l.subList(ls, l.size());
        lp = trimBrs(lp);
        if (lp.isEmpty() == false) {
            ret.add(new GWikiFragmentP(new ArrayList<>(lp)));
        }
    }
    return ret;
}

From source file:software.uncharted.service.ClusterService.java

public ClusterResponse getTopClusters(Integer from_) {
    final Integer from = from_ != null ? from_ : 0;

    final String AGGREGATION_NAME = "clusters";
    TermsBuilder termsBuilder = AggregationBuilders.terms(AGGREGATION_NAME).field("lsh")
            .size(from + CLUSTERS_PER_PAGE);

    SearchRequestBuilder searchRequestBuilder = client.prepareSearch().setQuery(QueryBuilders.matchAllQuery())
            .setSize(0).setPreference(preference).addAggregation(termsBuilder);

    SearchResponse searchResponse = client.executeSearch(searchRequestBuilder);

    List<String> topBuckets = ((Terms) searchResponse.getAggregations().get(AGGREGATION_NAME)).getBuckets()
            .stream().map(MultiBucketsAggregation.Bucket::getKeyAsString) // pull out the term as a string
            .collect(Collectors.toList());

    topBuckets = topBuckets.subList(from, Math.min(topBuckets.size(), from + CLUSTERS_PER_PAGE));

    Set<Cluster> clusters = topBuckets.stream().map(lsh -> getCluster(lsh)).collect(Collectors.toSet());

    final boolean hasMore = ((Terms) searchResponse.getAggregations().get(AGGREGATION_NAME))
            .getSumOfOtherDocCounts() > 0L;

    return new ClusterResponse().setClusters(clusters).setHasMore(hasMore);

}