Example usage for java.util TreeSet floor

List of usage examples for java.util TreeSet floor

Introduction

In this page you can find the example usage for java.util TreeSet floor.

Prototype

public E floor(E e) 

Source Link

Usage

From source file:MyRange.java

public static void main(String[] args) {
    TreeSet<MyRange> set = new TreeSet<>();
    set.add(new MyRange(1, 0));
    set.add(new MyRange(2, 100));
    set.add(new MyRange(3, 500));
    set.add(new MyRange(4, 250));
    System.out.println(set.floor(new MyRange(50)));
    System.out.println(set.floor(new MyRange(300)));
    System.out.println(set.floor(new MyRange(600)));
}

From source file:Main.java

public static void main(String[] args) {

    TreeSet<Integer> treeadd = new TreeSet<Integer>();

    treeadd.add(12);/*from   w  w w .j  av  a  2  s.co m*/
    treeadd.add(11);
    treeadd.add(16);
    treeadd.add(15);

    // getting the floor value for 13
    System.out.println("Floor value for 13: " + treeadd.floor(13));
}

From source file:juicebox.data.HiCFileTools.java

private static int closestValue(int val, TreeSet<Integer> valSet) {
    int floorVal = valSet.floor(val);
    int ceilVal = valSet.ceiling(val);

    if (Math.abs(ceilVal - val) < Math.abs(val - floorVal))
        return ceilVal;

    return floorVal;
}

From source file:cpsControllers.ConversionController.java

public ArrayList<Double> kwantyzacjaZobcieciem(ArrayList<Double> sygSprobkowanyY) {
    //List<Double[]> results = signal;
    //List<Double[]> results2 = new ArrayList<Double[]>();
    double max = sygSprobkowanyY.get(0);
    double min = sygSprobkowanyY.get(0);
    for (int i = 0; i < sygSprobkowanyY.size(); i++) {
        if (max < sygSprobkowanyY.get(i)) {
            max = sygSprobkowanyY.get(i);
        }/* w  w  w .ja  v a2 s . c o  m*/
        if (min > sygSprobkowanyY.get(i)) {
            min = sygSprobkowanyY.get(i);
        }
    }
    double sub = max - min;
    TreeSet<Double> treeset = new TreeSet<Double>();
    for (int i = 0; i < iloscPoziomowKwantyzacji; i++) {
        treeset.add(min + ((sub / (iloscPoziomowKwantyzacji)) * i));
    }
    for (int i = 0; i < sygSprobkowanyY.size(); i++) {
        Double tempX, tempY;
        //            tempX = sygSprobkowanyX.get(i);
        tempY = treeset.floor(sygSprobkowanyY.get(i));
        //            sygSkwantowanyX.add(tempX);
        sygSkwantowanyY.add(tempY);
    }
    return sygSkwantowanyY;
}

From source file:com.datatorrent.stram.StreamingContainerManager.java

/**
 * Compute checkpoints required for a given operator instance to be recovered.
 * This is done by looking at checkpoints available for downstream dependencies first,
 * and then selecting the most recent available checkpoint that is smaller than downstream.
 *
 * @param operator Operator instance for which to find recovery checkpoint
 * @param ctx      Context into which to collect traversal info
 *///from  w  ww.  j  a  va 2s. com
public void updateRecoveryCheckpoints(PTOperator operator, UpdateCheckpointsContext ctx) {
    if (operator.getRecoveryCheckpoint().windowId < ctx.committedWindowId.longValue()) {
        ctx.committedWindowId.setValue(operator.getRecoveryCheckpoint().windowId);
    }

    if (operator.getState() == PTOperator.State.ACTIVE && (ctx.currentTms
            - operator.stats.lastWindowIdChangeTms) > operator.stats.windowProcessingTimeoutMillis) {
        // if the checkpoint is ahead, then it is not blocked but waiting for activation (state-less recovery, at-most-once)
        if (ctx.committedWindowId.longValue() >= operator.getRecoveryCheckpoint().windowId) {
            LOG.debug("Marking operator {} blocked committed window {}, recovery window {}", operator,
                    Codec.getStringWindowId(ctx.committedWindowId.longValue()),
                    Codec.getStringWindowId(operator.getRecoveryCheckpoint().windowId));
            ctx.blocked.add(operator);
        }
    }

    // the most recent checkpoint eligible for recovery based on downstream state
    Checkpoint maxCheckpoint = Checkpoint.INITIAL_CHECKPOINT;

    Set<OperatorMeta> checkpointGroup = ctx.checkpointGroups.get(operator.getOperatorMeta());
    if (checkpointGroup == null) {
        checkpointGroup = Collections.singleton(operator.getOperatorMeta());
    }
    // find intersection of checkpoints that group can collectively move to
    TreeSet<Checkpoint> commonCheckpoints = new TreeSet<>(new Checkpoint.CheckpointComparator());
    synchronized (operator.checkpoints) {
        commonCheckpoints.addAll(operator.checkpoints);
    }
    Set<PTOperator> groupOpers = new HashSet<>(checkpointGroup.size());
    boolean pendingDeploy = operator.getState() == PTOperator.State.PENDING_DEPLOY;
    if (checkpointGroup.size() > 1) {
        for (OperatorMeta om : checkpointGroup) {
            Collection<PTOperator> operators = plan.getAllOperators(om);
            for (PTOperator groupOper : operators) {
                synchronized (groupOper.checkpoints) {
                    commonCheckpoints.retainAll(groupOper.checkpoints);
                }
                // visit all downstream operators of the group
                ctx.visited.add(groupOper);
                groupOpers.add(groupOper);
                pendingDeploy |= operator.getState() == PTOperator.State.PENDING_DEPLOY;
            }
        }
        // highest common checkpoint
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = commonCheckpoints.last();
        }
    } else {
        // without logical grouping, treat partitions as independent
        // this is especially important for parallel partitioning
        ctx.visited.add(operator);
        groupOpers.add(operator);
        maxCheckpoint = operator.getRecentCheckpoint();
        if (ctx.recovery && maxCheckpoint.windowId == Stateless.WINDOW_ID && operator.isOperatorStateLess()) {
            long currentWindowId = WindowGenerator.getWindowId(ctx.currentTms, this.vars.windowStartMillis,
                    this.getLogicalPlan().getValue(LogicalPlan.STREAMING_WINDOW_SIZE_MILLIS));
            maxCheckpoint = new Checkpoint(currentWindowId, 0, 0);
        }
    }

    // DFS downstream operators
    for (PTOperator groupOper : groupOpers) {
        for (PTOperator.PTOutput out : groupOper.getOutputs()) {
            for (PTOperator.PTInput sink : out.sinks) {
                PTOperator sinkOperator = sink.target;
                if (groupOpers.contains(sinkOperator)) {
                    continue; // downstream operator within group
                }
                if (!ctx.visited.contains(sinkOperator)) {
                    // downstream traversal
                    updateRecoveryCheckpoints(sinkOperator, ctx);
                }
                // recovery window id cannot move backwards
                // when dynamically adding new operators
                if (sinkOperator.getRecoveryCheckpoint().windowId >= operator
                        .getRecoveryCheckpoint().windowId) {
                    maxCheckpoint = Checkpoint.min(maxCheckpoint, sinkOperator.getRecoveryCheckpoint());
                }

                if (ctx.blocked.contains(sinkOperator)) {
                    if (sinkOperator.stats.getCurrentWindowId() == operator.stats.getCurrentWindowId()) {
                        // downstream operator is blocked by this operator
                        ctx.blocked.remove(sinkOperator);
                    }
                }
            }
        }
    }

    // find the common checkpoint that is <= downstream recovery checkpoint
    if (!commonCheckpoints.contains(maxCheckpoint)) {
        if (!commonCheckpoints.isEmpty()) {
            maxCheckpoint = Objects.firstNonNull(commonCheckpoints.floor(maxCheckpoint), maxCheckpoint);
        }
    }

    for (PTOperator groupOper : groupOpers) {
        // checkpoint frozen during deployment
        if (!pendingDeploy || ctx.recovery) {
            // remove previous checkpoints
            Checkpoint c1 = Checkpoint.INITIAL_CHECKPOINT;
            LinkedList<Checkpoint> checkpoints = groupOper.checkpoints;
            synchronized (checkpoints) {
                if (!checkpoints.isEmpty() && (checkpoints.getFirst()).windowId <= maxCheckpoint.windowId) {
                    c1 = checkpoints.getFirst();
                    Checkpoint c2;
                    while (checkpoints.size() > 1
                            && ((c2 = checkpoints.get(1)).windowId) <= maxCheckpoint.windowId) {
                        checkpoints.removeFirst();
                        //LOG.debug("Checkpoint to delete: operator={} windowId={}", operator.getName(), c1);
                        this.purgeCheckpoints.add(new Pair<PTOperator, Long>(groupOper, c1.windowId));
                        c1 = c2;
                    }
                } else {
                    if (ctx.recovery && checkpoints.isEmpty() && groupOper.isOperatorStateLess()) {
                        LOG.debug("Adding checkpoint for stateless operator {} {}", groupOper,
                                Codec.getStringWindowId(maxCheckpoint.windowId));
                        c1 = groupOper.addCheckpoint(maxCheckpoint.windowId, this.vars.windowStartMillis);
                    }
                }
            }
            //LOG.debug("Operator {} checkpoints: commit {} recent {}", new Object[] {operator.getName(), c1, operator.checkpoints});
            groupOper.setRecoveryCheckpoint(c1);
        } else {
            LOG.debug("Skipping checkpoint update {} during {}", groupOper, groupOper.getState());
        }
    }

}

From source file:org.apache.hyracks.storage.am.btree.OrderedIndexTestUtils.java

@SuppressWarnings("unchecked")
// Create a new TreeSet containing the elements satisfying the prefix search.
// Implementing prefix search by changing compareTo() in CheckTuple does not
// work.//from  www  . j a  v  a2 s. c  o m
public static SortedSet<CheckTuple> getPrefixExpectedSubset(TreeSet<CheckTuple> checkTuples, CheckTuple lowKey,
        CheckTuple highKey) {
    lowKey.setIsHighKey(false);
    highKey.setIsHighKey(true);
    CheckTuple low = checkTuples.ceiling(lowKey);
    CheckTuple high = checkTuples.floor(highKey);
    if (low == null || high == null) {
        // Must be empty.
        return new TreeSet<>();
    }
    if (high.compareTo(low) < 0) {
        // Must be empty.
        return new TreeSet<>();
    }
    return checkTuples.subSet(low, true, high, true);
}

From source file:org.apache.hyracks.storage.am.btree.OrderedIndexTestUtils.java

@Override
protected boolean checkDiskOrderScanResult(ITupleReference tuple, CheckTuple checkTuple, IIndexTestContext ctx)
        throws HyracksDataException {
    @SuppressWarnings("unchecked")
    TreeSet<CheckTuple> checkTuples = (TreeSet<CheckTuple>) ctx.getCheckTuples();
    CheckTuple matchingCheckTuple = checkTuples.floor(checkTuple);
    if (matchingCheckTuple == null) {
        return false;
    }// w  w w .j  av a  2s. c  o m
    compareActualAndExpected(tuple, matchingCheckTuple, ctx.getFieldSerdes());
    return true;
}

From source file:org.jenkinsci.plugins.scriptsecurity.scripts.EntryApprovalTest.java

@WithoutJenkins
@Test// w w  w .  java  2 s.c  o  m
public void getPendingClasspathEntry() throws Exception {
    TreeSet<ScriptApproval.PendingClasspathEntry> pendingClasspathEntries = new TreeSet<ScriptApproval.PendingClasspathEntry>();
    for (int i = 1; i < 100; i++) {
        pendingClasspathEntries.add(new ScriptApproval.PendingClasspathEntry(hashOf(i),
                new URL("file:/x" + i + ".jar"), ApprovalContext.create()));
    }
    ScriptApproval.PendingClasspathEntry dummy = new ScriptApproval.PendingClasspathEntry(hashOf(77), null,
            null);
    ScriptApproval.PendingClasspathEntry real = pendingClasspathEntries.floor(dummy);
    assertEquals(real, dummy);
    assertEquals("file:/x77.jar", real.getURL().toString());
}

From source file:org.yes.cart.shoppingcart.impl.DeliveryTimeEstimationVisitorImpl.java

protected void skipDatesExclusions(final CarrierSla sla, final Calendar date,
        final Map<Date, Date> exclusions) {

    if (!exclusions.isEmpty()) {

        final TreeSet<Date> startDates = new TreeSet<Date>(exclusions.keySet());

        while (true) {
            final Date thisDate = date.getTime();
            final Date beforeOfEqual = startDates.floor(thisDate);
            if (beforeOfEqual == null) {
                return; // no exclusions before
            } else if (beforeOfEqual.before(thisDate)) {
                final Date rangeEnd = exclusions.get(beforeOfEqual);
                // Two cases here:
                // 1) Single date - same as beforeOfEqual
                // 2) Range - need to make sure it is before this date
                if (thisDate.after(rangeEnd)) {
                    return; // This date is after the min in exclusions
                } else {
                    thisDate.setTime(rangeEnd.getTime());
                    date.add(Calendar.DAY_OF_YEAR, 1);
                    skipWeekdayExclusions(sla, date);
                }//  ww w.j a va2s. co m
            } else {
                // equal, so need to move next day and check weekdays
                date.add(Calendar.DAY_OF_YEAR, 1);
                skipWeekdayExclusions(sla, date);
            }
        }

    }

}