Example usage for java.util NavigableSet pollLast

List of usage examples for java.util NavigableSet pollLast

Introduction

In this page you can find the example usage for java.util NavigableSet pollLast.

Prototype

E pollLast();

Source Link

Document

Retrieves and removes the last (highest) element, or returns null if this set is empty.

Usage

From source file:Main.java

public static void main(String[] args) {
    NavigableSet<Integer> ns = new TreeSet<>();
    ns.add(0);/*from   w w  w.  j  a  va  2s .c o  m*/
    ns.add(1);
    ns.add(2);
    ns.add(3);
    ns.add(4);
    ns.add(5);
    ns.add(6);

    // Get a reverse view of the navigable set
    NavigableSet<Integer> reverseNs = ns.descendingSet();

    // Print the normal and reverse views
    System.out.println("Normal order: " + ns);
    System.out.println("Reverse order: " + reverseNs);

    NavigableSet<Integer> threeOrMore = ns.tailSet(3, true);
    System.out.println("3 or  more:  " + threeOrMore);
    System.out.println("lower(3): " + ns.lower(3));
    System.out.println("floor(3): " + ns.floor(3));
    System.out.println("higher(3): " + ns.higher(3));
    System.out.println("ceiling(3): " + ns.ceiling(3));

    System.out.println("pollFirst(): " + ns.pollFirst());
    System.out.println("Navigable Set:  " + ns);

    System.out.println("pollLast(): " + ns.pollLast());
    System.out.println("Navigable Set:  " + ns);

    System.out.println("pollFirst(): " + ns.pollFirst());
    System.out.println("Navigable Set:  " + ns);

    System.out.println("pollFirst(): " + ns.pollFirst());
    System.out.println("Navigable Set:  " + ns);

    System.out.println("pollFirst(): " + ns.pollFirst());
    System.out.println("Navigable Set:  " + ns);

    System.out.println("pollFirst(): " + ns.pollFirst());
    System.out.println("pollLast(): " + ns.pollLast());
}

From source file:com.google.gwt.emultest.java.util.TreeSetTest.java

public void testPollLast() {
    NavigableSet<E> set = createNavigableSet();

    assertNull(set.pollLast());
    assertEquals(0, set.size());//from   www .ja  v a  2 s  .  com

    set.add(getKeys()[0]);
    assertEquals(getKeys()[0], set.pollLast());
    assertEquals(0, set.size());

    set.add(getKeys()[0]);
    set.add(getKeys()[1]);
    assertEquals(getKeys()[1], set.pollLast());
    assertEquals(1, set.size());
    assertEquals(getKeys()[0], set.pollLast());
    assertEquals(0, set.size());
    assertNull(set.pollLast());
}

From source file:org.apache.hadoop.hbase.client.coprocessor.AggregationClient.java

/**
 * This is the client side interface/handler for calling the median method for a
 * given cf-cq combination. This method collects the necessary parameters
 * to compute the median and returns the median.
 * @param table//from  w  ww . j a v a  2 s . co  m
 * @param ci
 * @param scan
 * @return R the median
 * @throws Throwable
 */
public <R, S, P extends Message, Q extends Message, T extends Message> R median(final HTable table,
        ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) throws Throwable {
    Pair<NavigableMap<byte[], List<S>>, List<S>> p = getMedianArgs(table, ci, scan);
    byte[] startRow = null;
    byte[] colFamily = scan.getFamilies()[0];
    NavigableSet<byte[]> quals = scan.getFamilyMap().get(colFamily);
    NavigableMap<byte[], List<S>> map = p.getFirst();
    S sumVal = p.getSecond().get(0);
    S sumWeights = p.getSecond().get(1);
    double halfSumVal = ci.divideForAvg(sumVal, 2L);
    double movingSumVal = 0;
    boolean weighted = false;
    if (quals.size() > 1) {
        weighted = true;
        halfSumVal = ci.divideForAvg(sumWeights, 2L);
    }

    for (Map.Entry<byte[], List<S>> entry : map.entrySet()) {
        S s = weighted ? entry.getValue().get(1) : entry.getValue().get(0);
        double newSumVal = movingSumVal + ci.divideForAvg(s, 1L);
        if (newSumVal > halfSumVal)
            break; // we found the region with the median
        movingSumVal = newSumVal;
        startRow = entry.getKey();
    }
    // scan the region with median and find it
    Scan scan2 = new Scan(scan);
    // inherit stop row from method parameter
    if (startRow != null)
        scan2.setStartRow(startRow);
    ResultScanner scanner = null;
    try {
        int cacheSize = scan2.getCaching();
        if (!scan2.getCacheBlocks() || scan2.getCaching() < 2) {
            scan2.setCacheBlocks(true);
            cacheSize = 5;
            scan2.setCaching(cacheSize);
        }
        scanner = table.getScanner(scan2);
        Result[] results = null;
        byte[] qualifier = quals.pollFirst();
        // qualifier for the weight column
        byte[] weightQualifier = weighted ? quals.pollLast() : qualifier;
        R value = null;
        do {
            results = scanner.next(cacheSize);
            if (results != null && results.length > 0) {
                for (int i = 0; i < results.length; i++) {
                    Result r = results[i];
                    // retrieve weight
                    Cell kv = r.getColumnLatest(colFamily, weightQualifier);
                    R newValue = ci.getValue(colFamily, weightQualifier, kv);
                    S s = ci.castToReturnType(newValue);
                    double newSumVal = movingSumVal + ci.divideForAvg(s, 1L);
                    // see if we have moved past the median
                    if (newSumVal > halfSumVal) {
                        return value;
                    }
                    movingSumVal = newSumVal;
                    kv = r.getColumnLatest(colFamily, qualifier);
                    value = ci.getValue(colFamily, qualifier, kv);
                }
            }
        } while (results != null && results.length > 0);
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }
    return null;
}

From source file:org.apache.hadoop.hbase.client.transactional.TransactionalAggregationClient.java

/**
 * This is the client side interface/handler for calling the median method for a
 * given cf-cq combination. This method collects the necessary parameters
 * to compute the median and returns the median.
 * @param table//from w w  w  . jav a2  s . co m
 * @param ci
 * @param scan
 * @return R the median
 * @throws Throwable
 */
public <R, S, P extends Message, Q extends Message, T extends Message> R median(final long transactionId,
        final TransactionalTable table, ColumnInterpreter<R, S, P, Q, T> ci, Scan scan) throws Throwable {
    Pair<NavigableMap<byte[], List<S>>, List<S>> p = getMedianArgs(transactionId, table, ci, scan);
    byte[] startRow = null;
    byte[] colFamily = scan.getFamilies()[0];
    NavigableSet<byte[]> quals = scan.getFamilyMap().get(colFamily);
    NavigableMap<byte[], List<S>> map = p.getFirst();
    S sumVal = p.getSecond().get(0);
    S sumWeights = p.getSecond().get(1);
    double halfSumVal = ci.divideForAvg(sumVal, 2L);
    double movingSumVal = 0;
    boolean weighted = false;
    if (quals.size() > 1) {
        weighted = true;
        halfSumVal = ci.divideForAvg(sumWeights, 2L);
    }

    for (Map.Entry<byte[], List<S>> entry : map.entrySet()) {
        S s = weighted ? entry.getValue().get(1) : entry.getValue().get(0);
        double newSumVal = movingSumVal + ci.divideForAvg(s, 1L);
        if (newSumVal > halfSumVal)
            break; // we found the region with the median
        movingSumVal = newSumVal;
        startRow = entry.getKey();
    }
    // scan the region with median and find it
    Scan scan2 = new Scan(scan);
    // inherit stop row from method parameter
    if (startRow != null)
        scan2.setStartRow(startRow);
    ResultScanner scanner = null;
    try {
        int cacheSize = scan2.getCaching();
        if (!scan2.getCacheBlocks() || scan2.getCaching() < 2) {
            scan2.setCacheBlocks(true);
            cacheSize = 5;
            scan2.setCaching(cacheSize);
        }
        scanner = table.getScanner(scan2);
        Result[] results = null;
        byte[] qualifier = quals.pollFirst();
        // qualifier for the weight column
        byte[] weightQualifier = weighted ? quals.pollLast() : qualifier;
        R value = null;
        do {
            results = scanner.next(cacheSize);
            if (results != null && results.length > 0) {
                for (int i = 0; i < results.length; i++) {
                    Result r = results[i];
                    // retrieve weight
                    Cell kv = r.getColumnLatest(colFamily, weightQualifier);
                    R newValue = ci.getValue(colFamily, weightQualifier, kv);
                    S s = ci.castToReturnType(newValue);
                    double newSumVal = movingSumVal + ci.divideForAvg(s, 1L);
                    // see if we have moved past the median
                    if (newSumVal > halfSumVal) {
                        return value;
                    }
                    movingSumVal = newSumVal;
                    kv = r.getColumnLatest(colFamily, qualifier);
                    value = ci.getValue(colFamily, qualifier, kv);
                }
            }
        } while (results != null && results.length > 0);
    } finally {
        if (scanner != null) {
            scanner.close();
        }
    }
    return null;
}

From source file:org.apache.hadoop.hbase.coprocessor.AggregateImplementation.java

/**
 * Gives a List containing sum of values and sum of weights.
 * It is computed for the combination of column
 * family and column qualifier(s) in the given row range as defined in the
 * Scan object. In its current implementation, it takes one column family and
 * two column qualifiers. The first qualifier is for values column and 
 * the second qualifier (optional) is for weight column.
 *///from   w  ww .  ja v  a  2  s.  c o m
@Override
public void getMedian(RpcController controller, AggregateRequest request, RpcCallback<AggregateResponse> done) {
    AggregateResponse response = null;
    InternalScanner scanner = null;
    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        scanner = env.getRegion().getScanner(scan);
        byte[] colFamily = scan.getFamilies()[0];
        NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
        byte[] valQualifier = null, weightQualifier = null;
        if (qualifiers != null && !qualifiers.isEmpty()) {
            valQualifier = qualifiers.pollFirst();
            // if weighted median is requested, get qualifier for the weight column
            weightQualifier = qualifiers.pollLast();
        }
        List<Cell> results = new ArrayList<Cell>();

        boolean hasMoreRows = false;

        do {
            tempVal = null;
            tempWeight = null;
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily, valQualifier, kv)));
                if (weightQualifier != null) {
                    tempWeight = ci.add(tempWeight,
                            ci.castToReturnType(ci.getValue(colFamily, weightQualifier, kv)));
                }
            }
            results.clear();
            sumVal = ci.add(sumVal, tempVal);
            sumWeights = ci.add(sumWeights, tempWeight);
        } while (hasMoreRows);
        ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString();
        S s = sumWeights == null ? ci.castToReturnType(ci.getMinValue()) : sumWeights;
        ByteString first_sumWeights = ci.getProtoForPromotedType(s).toByteString();
        AggregateResponse.Builder pair = AggregateResponse.newBuilder();
        pair.addFirstPart(first_sumVal);
        pair.addFirstPart(first_sumWeights);
        response = pair.build();
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    done.run(response);
}

From source file:org.apache.hadoop.hbase.coprocessor.transactional.SsccRegionEndpoint.java

/**
 * Gives a List containing sum of values and sum of weights.
 * It is computed for the combination of column
 * family and column qualifier(s) in the given row range as defined in the
 * Scan object. In its current implementation, it takes one column family and
 * two column qualifiers. The first qualifier is for values column and 
 * the second qualifier (optional) is for weight column.
 *//* w  ww.  ja va 2  s  .  c  om*/
@Override
public void getMedian(RpcController controller, SsccTransactionalAggregateRequest request,
        RpcCallback<SsccTransactionalAggregateResponse> done) {
    if (LOG.isTraceEnabled())
        LOG.trace("SsccRegionEndpoint coprocessor: getMedian entry");
    SsccTransactionalAggregateResponse response = null;
    RegionScanner scanner = null;
    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        long transactionId = request.getTransactionId();
        long startId = request.getStartId();
        scanner = getScanner(transactionId, startId, scan);
        byte[] colFamily = scan.getFamilies()[0];
        NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
        byte[] valQualifier = null, weightQualifier = null;
        if (qualifiers != null && !qualifiers.isEmpty()) {
            valQualifier = qualifiers.pollFirst();
            // if weighted median is requested, get qualifier for the weight column
            weightQualifier = qualifiers.pollLast();
        }
        List<Cell> results = new ArrayList<Cell>();

        boolean hasMoreRows = false;

        do {
            tempVal = null;
            tempWeight = null;
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily, valQualifier, kv)));
                if (weightQualifier != null) {
                    tempWeight = ci.add(tempWeight,
                            ci.castToReturnType(ci.getValue(colFamily, weightQualifier, kv)));
                }
            }
            results.clear();
            sumVal = ci.add(sumVal, tempVal);
            sumWeights = ci.add(sumWeights, tempWeight);
        } while (hasMoreRows);
        ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString();
        S s = sumWeights == null ? ci.castToReturnType(ci.getMinValue()) : sumWeights;
        ByteString first_sumWeights = ci.getProtoForPromotedType(s).toByteString();
        SsccTransactionalAggregateResponse.Builder pair = SsccTransactionalAggregateResponse.newBuilder();
        pair.addFirstPart(first_sumVal);
        pair.addFirstPart(first_sumWeights);
        response = pair.build();
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    done.run(response);
}

From source file:org.apache.hadoop.hbase.coprocessor.transactional.TrxRegionEndpoint.java

/**
 * Gives a List containing sum of values and sum of weights.
 * It is computed for the combination of column
 * family and column qualifier(s) in the given row range as defined in the
 * Scan object. In its current implementation, it takes one column family and
 * two column qualifiers. The first qualifier is for values column and 
 * the second qualifier (optional) is for weight column.
 *///from w  w  w  . j av  a  2 s.c  o  m
@Override
public void getMedian(RpcController controller, TransactionalAggregateRequest request,
        RpcCallback<TransactionalAggregateResponse> done) {
    TransactionalAggregateResponse response = null;
    RegionScanner scanner = null;
    try {
        ColumnInterpreter<T, S, P, Q, R> ci = constructColumnInterpreterFromRequest(request);
        S sumVal = null, sumWeights = null, tempVal = null, tempWeight = null;
        Scan scan = ProtobufUtil.toScan(request.getScan());
        long transactionId = request.getTransactionId();
        scanner = getScanner(transactionId, scan);
        byte[] colFamily = scan.getFamilies()[0];
        NavigableSet<byte[]> qualifiers = scan.getFamilyMap().get(colFamily);
        byte[] valQualifier = null, weightQualifier = null;
        if (qualifiers != null && !qualifiers.isEmpty()) {
            valQualifier = qualifiers.pollFirst();
            // if weighted median is requested, get qualifier for the weight column
            weightQualifier = qualifiers.pollLast();
        }
        List<Cell> results = new ArrayList<Cell>();

        boolean hasMoreRows = false;

        do {
            tempVal = null;
            tempWeight = null;
            hasMoreRows = scanner.next(results);
            for (Cell kv : results) {
                tempVal = ci.add(tempVal, ci.castToReturnType(ci.getValue(colFamily, valQualifier, kv)));
                if (weightQualifier != null) {
                    tempWeight = ci.add(tempWeight,
                            ci.castToReturnType(ci.getValue(colFamily, weightQualifier, kv)));
                }
            }
            results.clear();
            sumVal = ci.add(sumVal, tempVal);
            sumWeights = ci.add(sumWeights, tempWeight);
        } while (hasMoreRows);
        ByteString first_sumVal = ci.getProtoForPromotedType(sumVal).toByteString();
        S s = sumWeights == null ? ci.castToReturnType(ci.getMinValue()) : sumWeights;
        ByteString first_sumWeights = ci.getProtoForPromotedType(s).toByteString();
        TransactionalAggregateResponse.Builder pair = TransactionalAggregateResponse.newBuilder();
        pair.addFirstPart(first_sumVal);
        pair.addFirstPart(first_sumWeights);
        response = pair.build();
    } catch (IOException e) {
        ResponseConverter.setControllerException(controller, e);
    } finally {
        if (scanner != null) {
            try {
                scanner.close();
            } catch (IOException ignored) {
            }
        }
    }
    done.run(response);
}

From source file:org.apache.nifi.cluster.manager.impl.WebClusterManager.java

/**
 * Merges the listing requests in the specified map into the specified listing request
 *
 * @param listingRequest the target listing request
 * @param listingRequestMap the mapping of all responses being merged
 *//*w ww.j a v  a 2s  .  c  o  m*/
private void mergeListingRequests(final ListingRequestDTO listingRequest,
        final Map<NodeIdentifier, ListingRequestDTO> listingRequestMap) {
    final Comparator<FlowFileSummaryDTO> comparator = new Comparator<FlowFileSummaryDTO>() {
        @Override
        public int compare(final FlowFileSummaryDTO dto1, final FlowFileSummaryDTO dto2) {
            int positionCompare = dto1.getPosition().compareTo(dto2.getPosition());
            if (positionCompare != 0) {
                return positionCompare;
            }

            final String address1 = dto1.getClusterNodeAddress();
            final String address2 = dto2.getClusterNodeAddress();
            if (address1 == null && address2 == null) {
                return 0;
            }
            if (address1 == null) {
                return 1;
            }
            if (address2 == null) {
                return -1;
            }
            return address1.compareTo(address2);
        }
    };

    final NavigableSet<FlowFileSummaryDTO> flowFileSummaries = new TreeSet<>(comparator);

    ListFlowFileState state = null;
    int numStepsCompleted = 0;
    int numStepsTotal = 0;
    int objectCount = 0;
    long byteCount = 0;
    boolean finished = true;
    for (final Map.Entry<NodeIdentifier, ListingRequestDTO> entry : listingRequestMap.entrySet()) {
        final NodeIdentifier nodeIdentifier = entry.getKey();
        final String nodeAddress = nodeIdentifier.getApiAddress() + ":" + nodeIdentifier.getApiPort();

        final ListingRequestDTO nodeRequest = entry.getValue();

        numStepsTotal++;
        if (Boolean.TRUE.equals(nodeRequest.getFinished())) {
            numStepsCompleted++;
        }

        final QueueSizeDTO nodeQueueSize = nodeRequest.getQueueSize();
        objectCount += nodeQueueSize.getObjectCount();
        byteCount += nodeQueueSize.getByteCount();

        if (!nodeRequest.getFinished()) {
            finished = false;
        }

        if (nodeRequest.getLastUpdated().after(listingRequest.getLastUpdated())) {
            listingRequest.setLastUpdated(nodeRequest.getLastUpdated());
        }

        // Keep the state with the lowest ordinal value (the "least completed").
        final ListFlowFileState nodeState = ListFlowFileState.valueOfDescription(nodeRequest.getState());
        if (state == null || state.compareTo(nodeState) > 0) {
            state = nodeState;
        }

        if (nodeRequest.getFlowFileSummaries() != null) {
            for (final FlowFileSummaryDTO summaryDTO : nodeRequest.getFlowFileSummaries()) {
                summaryDTO.setClusterNodeId(nodeIdentifier.getId());
                summaryDTO.setClusterNodeAddress(nodeAddress);

                flowFileSummaries.add(summaryDTO);

                // Keep the set from growing beyond our max
                if (flowFileSummaries.size() > listingRequest.getMaxResults()) {
                    flowFileSummaries.pollLast();
                }
            }
        }

        if (nodeRequest.getFailureReason() != null) {
            listingRequest.setFailureReason(nodeRequest.getFailureReason());
        }
    }

    final List<FlowFileSummaryDTO> summaryDTOs = new ArrayList<>(flowFileSummaries);
    listingRequest.setFlowFileSummaries(summaryDTOs);

    final int percentCompleted = numStepsCompleted / numStepsTotal;
    listingRequest.setPercentCompleted(percentCompleted);
    listingRequest.setFinished(finished);

    listingRequest.getQueueSize().setByteCount(byteCount);
    listingRequest.getQueueSize().setObjectCount(objectCount);
}