Example usage for java.util Comparator comparingDouble

List of usage examples for java.util Comparator comparingDouble

Introduction

In this page you can find the example usage for java.util Comparator comparingDouble.

Prototype

public static <T> Comparator<T> comparingDouble(ToDoubleFunction<? super T> keyExtractor) 

Source Link

Document

Accepts a function that extracts a double sort key from a type T , and returns a Comparator that compares by that sort key.

Usage

From source file:Main.java

public static void main(String[] args) {
    Optional<Employee> person = Employee.persons().stream()
            .max(Comparator.comparingDouble(Employee::getIncome));

    if (person.isPresent()) {
        System.out.println("Highest earner: " + person.get());
    } else {//  w ww  .ja v  a2  s.c  o  m
        System.out.println("Could not  get   the   highest earner.");
    }
}

From source file:com.rapidminer.operator.learner.tree.NonParallelBootstrappingTreeBuilder.java

/**
 * Create a start selection that is a random selection of rows. This has the same effect as doing a bootstrapping on
 * the column table./*from w  w w.  j a v a 2  s  .c o m*/
 */
@Override
protected Map<Integer, int[]> createExampleStartSelection() {
    Map<Integer, int[]> selection = new HashMap<>();
    if (columnTable.getNumberOfRegularNumericalAttributes() == 0) {
        selection.put(0, createFullRandomArray(columnTable.getNumberOfExamples()));
    } else {
        Integer[] bigSelectionArray = createFullBigRandomArray(columnTable.getNumberOfExamples());
        for (int j = columnTable.getNumberOfRegularNominalAttributes(); j < columnTable
                .getTotalNumberOfRegularAttributes(); j++) {
            final double[] attributeColumn = columnTable.getNumericalAttributeColumn(j);
            Integer[] startSelection = Arrays.copyOf(bigSelectionArray, bigSelectionArray.length);
            Arrays.sort(startSelection, Comparator.comparingDouble(a -> attributeColumn[a]));
            selection.put(j, ArrayUtils.toPrimitive(startSelection));
        }
    }
    return selection;
}

From source file:com.tussle.main.Utility.java

public static Collection<ProjectionVector> prunedProjections(Collection<ProjectionVector> vectors) {
    SortedSet<ProjectionVector> sortedVectors = new ConcurrentSkipListSet<>(
            Comparator.comparingDouble((ProjectionVector p) -> -p.magnitude()));
    sortedVectors.addAll(vectors);/*from  w  ww.  j  ava  2  s .  c o  m*/
    if (isPruned(sortedVectors))
        return sortedVectors;
    double reduceMagnitude = 0;
    Iterator<ProjectionVector> i = sortedVectors.iterator();
    ProjectionVector p0 = i.next();
    ProjectionVector p1 = i.next();
    double cos0 = p0.xNorm();
    double sin0 = p0.yNorm();
    double cos1 = p1.xNorm();
    double sin1 = p1.yNorm();
    //zeroth on the right, first on the left
    if (cos0 * sin1 < cos1 * sin0) {
        double tmpcos = cos1;
        double tmpsin = sin1;
        cos1 = cos0;
        sin1 = sin0;
        cos0 = tmpcos;
        sin0 = tmpsin;
    }
    while (i.hasNext()) {
        ProjectionVector next = i.next();
        double nextcos = next.xNorm();
        double nextsin = next.yNorm();
        if (nextcos * sin0 >= cos0 * nextsin && cos1 * nextsin >= nextcos * sin1) {
            //Case 0: Within cross product bounds
        } else if (nextcos * sin0 >= cos0 * nextsin) {
            //Case 1: Over the left, extend those bounds
            cos1 = nextcos;
            sin1 = nextsin;
        } else if (cos1 * nextsin >= nextcos * sin1) {
            //Case 2: Over the right, extend those bounds
            cos0 = nextcos;
            sin0 = nextsin;
        } else {
            //Case 3: Opposite side, immediately return false
            reduceMagnitude = next.magnitude();
            break;
        }
    }
    //Now given reduceMagnitude, remove elements with lesser magnitude and
    //reduce the magnitude of remaining elements
    if (Double.isFinite(reduceMagnitude)) {
        for (Iterator<ProjectionVector> j = sortedVectors.iterator(); j.hasNext();) {
            ProjectionVector vec = j.next();
            if (vec.magnitude() <= reduceMagnitude)
                j.remove();
            else
                vec = new ProjectionVector(vec.xNorm(), vec.yNorm(), vec.magnitude() - reduceMagnitude);
        }
    }
    return sortedVectors;
}

From source file:io.pravega.controller.server.eventProcessor.AutoScaleRequestHandler.java

private CompletableFuture<Void> processScaleDown(final AutoScaleEvent request, final ScalingPolicy policy,
        final OperationContext context) {
    log.debug("scale down request received for stream {} segment {}", request.getStream(),
            request.getSegmentNumber());
    if (policy.getType().equals(ScalingPolicy.Type.FIXED_NUM_SEGMENTS)) {
        return CompletableFuture.completedFuture(null);
    }/*  w  w w  . ja v  a2 s  .c  om*/

    return streamMetadataStore
            .markCold(request.getScope(), request.getStream(), request.getSegmentNumber(),
                    request.isSilent() ? Long.MAX_VALUE : request.getTimestamp() + REQUEST_VALIDITY_PERIOD,
                    context, executor)
            .thenCompose(x -> streamMetadataStore.getActiveSegments(request.getScope(), request.getStream(),
                    context, executor))
            .thenApply(activeSegments -> {
                assert activeSegments != null;
                final Optional<Segment> currentOpt = activeSegments.stream()
                        .filter(y -> y.getNumber() == request.getSegmentNumber()).findAny();
                if (!currentOpt.isPresent() || activeSegments.size() == policy.getMinNumSegments()) {
                    // if we are already at min-number of segments, we cant scale down, we have put the marker,
                    // we should simply return and do nothing.
                    return null;
                } else {
                    final List<Segment> candidates = activeSegments.stream()
                            .filter(z -> z.getKeyEnd() == currentOpt.get().getKeyStart()
                                    || z.getKeyStart() == currentOpt.get().getKeyEnd()
                                    || z.getNumber() == request.getSegmentNumber())
                            .sorted(Comparator.comparingDouble(Segment::getKeyStart))
                            .collect(Collectors.toList());
                    return new ImmutablePair<>(candidates, activeSegments.size() - policy.getMinNumSegments());
                }
            }).thenCompose(input -> {
                if (input != null && input.getLeft().size() > 1) {
                    final List<Segment> candidates = input.getLeft();
                    final int maxScaleDownFactor = input.getRight();

                    // fetch their cold status for all candidates
                    return FutureHelpers
                            .filter(candidates,
                                    candidate -> streamMetadataStore.isCold(request.getScope(),
                                            request.getStream(), candidate.getNumber(), context, executor))
                            .thenApply(segments -> {
                                if (maxScaleDownFactor == 1 && segments.size() == 3) {
                                    // Note: sorted by keystart so just pick first two.
                                    return Lists.newArrayList(segments.get(0), segments.get(1));
                                } else {
                                    return segments;
                                }
                            });
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            }).thenCompose(toMerge -> {
                if (toMerge != null && toMerge.size() > 1) {
                    toMerge.forEach(x -> {
                        log.debug("merging stream {}: segment {} ", request.getStream(), x.getNumber());
                    });

                    final ArrayList<AbstractMap.SimpleEntry<Double, Double>> simpleEntries = new ArrayList<>();
                    double min = toMerge.stream().mapToDouble(Segment::getKeyStart).min().getAsDouble();
                    double max = toMerge.stream().mapToDouble(Segment::getKeyEnd).max().getAsDouble();
                    simpleEntries.add(new AbstractMap.SimpleEntry<>(min, max));
                    final ArrayList<Integer> segments = new ArrayList<>();
                    toMerge.forEach(segment -> segments.add(segment.getNumber()));
                    return postScaleRequest(request, segments, simpleEntries);
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            });
}

From source file:io.pravega.controller.server.eventProcessor.requesthandlers.AutoScaleTask.java

private CompletableFuture<Void> processScaleDown(final AutoScaleEvent request, final ScalingPolicy policy,
        final OperationContext context) {
    log.info("scale down request received for stream {} segment {}", request.getStream(),
            request.getSegmentNumber());
    if (policy.getScaleType().equals(ScalingPolicy.ScaleType.FIXED_NUM_SEGMENTS)) {
        return CompletableFuture.completedFuture(null);
    }/*from ww w  .  ja  v  a2s .co  m*/

    return streamMetadataStore
            .markCold(request.getScope(), request.getStream(), request.getSegmentNumber(),
                    request.isSilent() ? Long.MAX_VALUE : request.getTimestamp() + REQUEST_VALIDITY_PERIOD,
                    context, executor)
            .thenCompose(x -> streamMetadataStore.getActiveSegments(request.getScope(), request.getStream(),
                    context, executor))
            .thenApply(activeSegments -> {
                assert activeSegments != null;
                final Optional<Segment> currentOpt = activeSegments.stream()
                        .filter(y -> y.getNumber() == request.getSegmentNumber()).findAny();
                if (!currentOpt.isPresent() || activeSegments.size() == policy.getMinNumSegments()) {
                    // if we are already at min-number of segments, we cant scale down, we have put the marker,
                    // we should simply return and do nothing.
                    return null;
                } else {
                    final List<Segment> candidates = activeSegments.stream()
                            .filter(z -> z.getKeyEnd() == currentOpt.get().getKeyStart()
                                    || z.getKeyStart() == currentOpt.get().getKeyEnd()
                                    || z.getNumber() == request.getSegmentNumber())
                            .sorted(Comparator.comparingDouble(Segment::getKeyStart))
                            .collect(Collectors.toList());
                    return new ImmutablePair<>(candidates, activeSegments.size() - policy.getMinNumSegments());
                }
            }).thenCompose(input -> {
                if (input != null && input.getLeft().size() > 1) {
                    final List<Segment> candidates = input.getLeft();
                    final int maxScaleDownFactor = input.getRight();

                    // fetch their cold status for all candidates
                    return Futures
                            .filter(candidates,
                                    candidate -> streamMetadataStore.isCold(request.getScope(),
                                            request.getStream(), candidate.getNumber(), context, executor))
                            .thenApply(segments -> {
                                if (maxScaleDownFactor == 1 && segments.size() == 3) {
                                    // Note: sorted by keystart so just pick first two.
                                    return Lists.newArrayList(segments.get(0), segments.get(1));
                                } else {
                                    return segments;
                                }
                            });
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            }).thenCompose(toMerge -> {
                if (toMerge != null && toMerge.size() > 1) {
                    toMerge.forEach(x -> {
                        log.debug("merging stream {}: segment {} ", request.getStream(), x.getNumber());
                    });

                    final ArrayList<AbstractMap.SimpleEntry<Double, Double>> simpleEntries = new ArrayList<>();
                    double min = toMerge.stream().mapToDouble(Segment::getKeyStart).min().getAsDouble();
                    double max = toMerge.stream().mapToDouble(Segment::getKeyEnd).max().getAsDouble();
                    simpleEntries.add(new AbstractMap.SimpleEntry<>(min, max));
                    final ArrayList<Integer> segments = new ArrayList<>();
                    toMerge.forEach(segment -> segments.add(segment.getNumber()));
                    return postScaleRequest(request, segments, simpleEntries);
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            });
}

From source file:com.strider.datadefender.DatabaseDiscoverer.java

@SuppressWarnings("unchecked")
public List<MatchMetaData> discover(final IDBFactory factory, final Properties dataDiscoveryProperties,
        String vendor) throws ParseException, DatabaseDiscoveryException {
    log.info("Data discovery in process");

    // Get the probability threshold from property file
    final double probabilityThreshold = parseDouble(
            dataDiscoveryProperties.getProperty("probability_threshold"));
    String calculate_score = dataDiscoveryProperties.getProperty("score_calculation");

    if (CommonUtils.isEmptyString(calculate_score)) {
        calculate_score = "false";
    }//from  w  ww . ja  v  a2s . co  m

    log.info("Probability threshold [" + probabilityThreshold + "]");

    // Get list of models used in data discovery
    final String models = dataDiscoveryProperties.getProperty("models");

    modelList = models.split(",");
    log.info("Model list [" + Arrays.toString(modelList) + "]");

    List<MatchMetaData> finalList = new ArrayList<>();

    for (final String model : modelList) {
        log.info("********************************");
        log.info("Processing model " + model);
        log.info("********************************");

        final Model modelPerson = createModel(dataDiscoveryProperties, model);

        matches = discoverAgainstSingleModel(factory, dataDiscoveryProperties, modelPerson,
                probabilityThreshold, vendor);
        finalList = ListUtils.union(finalList, matches);
    }

    final DecimalFormat decimalFormat = new DecimalFormat("#.##");

    log.info("List of suspects:");
    log.info(String.format("%20s %20s %20s %20s", "Table*", "Column*", "Probability*", "Model*"));

    final Score score = new Score();
    int highRiskColumns = 0;
    int rowCount = 0;

    for (final MatchMetaData data : finalList) {

        // Row count
        if (YES.equals(calculate_score)) {
            log.debug("Skipping table rowcount...");
            rowCount = ReportUtil.rowCount(factory, data.getTableName(),
                    Integer.valueOf(dataDiscoveryProperties.getProperty("limit")));
        }

        // Getting 5 sample values
        final List<String> sampleDataList = ReportUtil.sampleData(factory, data.getTableName(),
                data.getColumnName());
        // Output
        log.info("Column                      : " + data.toString());
        log.info(CommonUtils.fixedLengthString('=', data.toString().length() + 30));
        log.info("Number of rows in the table: " + rowCount);
        log.info("Score                      : " + score.columnScore(rowCount));

        log.info("Model                       : " + data.getModel());

        if (YES.equals(calculate_score)) {
            log.info("Number of rows in the table : " + rowCount);
            log.info("Score                       : " + score.columnScore(rowCount));
        } else {
            log.info("Number of rows in the table : N/A");
            log.info("Score                       : N/A");
        }

        log.info("Sample data");
        log.info(CommonUtils.fixedLengthString('-', 11));

        for (final String sampleData : sampleDataList) {
            log.info(sampleData);
        }

        log.info("");

        final List<Probability> probabilityList = data.getProbabilityList();

        Collections.sort(probabilityList,
                Comparator.comparingDouble(Probability::getProbabilityValue).reversed());

        int y = 0;

        if (data.getProbabilityList().size() >= 5) {
            y = 5;
        } else {
            y = data.getProbabilityList().size();
        }

        for (int i = 0; i < y; i++) {
            final Probability p = data.getProbabilityList().get(i);

            log.info(p.getSentence() + ":" + p.getProbabilityValue());
        }

        log.info("");

        // Score calculation is evaluated with score_calculation parameter
        if (YES.equals(calculate_score) && score.columnScore(rowCount).equals("High")) {
            highRiskColumns++;
        }
    }

    // Only applicable when parameter table_rowcount=yes otherwise score calculation should not be done
    if (YES.equals(calculate_score)) {
        log.info("Overall score: " + score.dataStoreScore());
        log.info("");

        if ((finalList != null) && (finalList.size() > 0)) {
            log.info("============================================");

            final int threshold_count = Integer.valueOf(dataDiscoveryProperties.getProperty("threshold_count"));

            if (finalList.size() > threshold_count) {
                log.info("Number of PI [" + finalList.size() + "] columns is higher than defined threashold ["
                        + threshold_count + "]");
            } else {
                log.info("Number of PI [" + finalList.size()
                        + "] columns is lower or equal than defined threashold [" + threshold_count + "]");
            }

            final int threshold_highrisk = Integer
                    .valueOf(dataDiscoveryProperties.getProperty("threshold_highrisk"));

            if (highRiskColumns > threshold_highrisk) {
                log.info("Number of High risk PI [" + highRiskColumns
                        + "] columns is higher than defined threashold [" + threshold_highrisk + "]");
            } else {
                log.info("Number of High risk PI [" + highRiskColumns
                        + "] columns is lower or equal than defined threashold [" + threshold_highrisk + "]");
            }
        }
    } else {
        log.info("Overall score: N/A");
    }

    log.info("matches: " + matches.toString());

    return matches;
}

From source file:io.pravega.controller.server.eventProcessor.ScaleRequestHandler.java

private CompletableFuture<Void> processScaleDown(final ScaleEvent request, final ScalingPolicy policy,
        final OperationContext context) {
    log.debug("scale down request received for stream {} segment {}", request.getStream(),
            request.getSegmentNumber());
    if (policy.getType().equals(ScalingPolicy.Type.FIXED_NUM_SEGMENTS)) {
        return CompletableFuture.completedFuture(null);
    }/* w w w  .  ja  va 2  s  .c o m*/

    return streamMetadataStore
            .markCold(request.getScope(), request.getStream(), request.getSegmentNumber(),
                    request.isSilent() ? Long.MAX_VALUE : request.getTimestamp() + REQUEST_VALIDITY_PERIOD,
                    context, executor)
            .thenCompose(x -> streamMetadataStore.getActiveSegments(request.getScope(), request.getStream(),
                    context, executor))
            .thenApply(activeSegments -> {
                assert activeSegments != null;
                final Optional<Segment> currentOpt = activeSegments.stream()
                        .filter(y -> y.getNumber() == request.getSegmentNumber()).findAny();
                if (!currentOpt.isPresent() || activeSegments.size() == policy.getMinNumSegments()) {
                    // if we are already at min-number of segments, we cant scale down, we have put the marker,
                    // we should simply return and do nothing.
                    return null;
                } else {
                    final List<Segment> candidates = activeSegments.stream()
                            .filter(z -> z.getKeyEnd() == currentOpt.get().getKeyStart()
                                    || z.getKeyStart() == currentOpt.get().getKeyEnd()
                                    || z.getNumber() == request.getSegmentNumber())
                            .sorted(Comparator.comparingDouble(Segment::getKeyStart))
                            .collect(Collectors.toList());
                    return new ImmutablePair<>(candidates, activeSegments.size() - policy.getMinNumSegments());
                }
            }).thenCompose(input -> {
                if (input != null && input.getLeft().size() > 1) {
                    final List<Segment> candidates = input.getLeft();
                    final int maxScaleDownFactor = input.getRight();

                    // fetch their cold status for all candidates
                    return FutureHelpers
                            .filter(candidates,
                                    candidate -> streamMetadataStore.isCold(request.getScope(),
                                            request.getStream(), candidate.getNumber(), context, executor))
                            .thenApply(segments -> {
                                if (maxScaleDownFactor == 1 && segments.size() == 3) {
                                    // Note: sorted by keystart so just pick first two.
                                    return Lists.newArrayList(segments.get(0), segments.get(1));
                                } else {
                                    return segments;
                                }
                            });
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            }).thenCompose(toMerge -> {
                if (toMerge != null && toMerge.size() > 1) {
                    toMerge.forEach(x -> {
                        log.debug("merging stream {}: segment {} ", request.getStream(), x.getNumber());
                    });

                    final ArrayList<AbstractMap.SimpleEntry<Double, Double>> simpleEntries = new ArrayList<>();
                    double min = toMerge.stream().mapToDouble(Segment::getKeyStart).min().getAsDouble();
                    double max = toMerge.stream().mapToDouble(Segment::getKeyEnd).max().getAsDouble();
                    simpleEntries.add(new AbstractMap.SimpleEntry<>(min, max));
                    final ArrayList<Integer> segments = new ArrayList<>();
                    toMerge.forEach(segment -> segments.add(segment.getNumber()));
                    return executeScaleTask(request, segments, simpleEntries, context);
                } else {
                    return CompletableFuture.completedFuture(null);
                }
            });
}

From source file:io.pravega.controller.server.ControllerService.java

private List<SegmentRange> getSegmentRanges(List<Segment> activeSegments, String scope, String stream) {
    List<SegmentRange> listOfSegment = activeSegments.stream().map(segment -> convert(scope, stream, segment))
            .collect(Collectors.toList());
    listOfSegment.sort(Comparator.comparingDouble(SegmentRange::getMinKey));
    return listOfSegment;
}

From source file:edu.msViz.mzTree.MzTree.java

/**
 * Recursively divides the dataset into the mzTree, a depth first construction
 * starting with the head node. /*from   w  ww  . j a v a  2 s.co m*/
 * @param sort_by_rt sorting flag, rt or mz
 * @param dataset The recursive call's data partition
 * @param head The recursive call's top level node
 * @param curHeight current height in three (root is 0)
 */
private void divide(Boolean sort_by_rt, List<MsDataPoint> dataset, MzTreeNode head, int curHeight) {
    // leaf flag
    boolean isLeaf = dataset.size() <= MzTree.NUM_POINTS_PER_NODE;

    // LEAF: save points, get mins/maxes
    if (isLeaf) {
        // leaf node submits its dataset to be written to data store
        try {
            this.dataStorage.savePoints(new StorageFacade.SavePointsTask(head, dataset), this.importState);
            this.pointCache.putAll(dataset);
        } catch (Exception e) {
            LOGGER.log(Level.WARNING, "Could not save points to datastorage for leaf node: " + head.toString(),
                    e);
        }

        // collect point IDs, mz/rt/intensity min/max
        head.initLeaf(dataset);

        dataset = null; // garbage collect away   
    }

    // ROOT/INTERMEDIATE: summarize, partition and recurse
    else {

        // if sort_by_rt is null then don't sort, implies initial partition
        // on mzml sourced data which is already sorted by RT
        if (sort_by_rt != null) {
            if (sort_by_rt)
                Collections.sort(dataset, Comparator.comparingDouble((MsDataPoint dataPoint) -> dataPoint.rt));
            else
                Collections.sort(dataset, Comparator.comparingDouble((MsDataPoint dataPoint) -> dataPoint.mz));
        }

        // the partition size is the subset length divided by the numChildrenPerNode
        int partitionSize = (int) Math.ceil((double) dataset.size() / (double) this.branchingFactor);

        // split the dataset into partitions
        List<List<MsDataPoint>> partitions = new ArrayList<>();
        int i = 0;
        while (i < dataset.size()) {
            // populate partition
            final List<MsDataPoint> partition = dataset.subList(i, Math.min(i + partitionSize, dataset.size()));
            i += partition.size();

            // collect partition
            partitions.add(partition);
        }

        // free dataset for GC
        dataset = null;

        // distribute the partitions to child nodes
        for (List<MsDataPoint> partition : partitions) {
            // instantiate child node
            MzTreeNode child = new MzTreeNode(this.branchingFactor);

            // resolve sort_by_rt
            Boolean my_sort_by_rt = sort_by_rt;

            // if null (initial call mzML) set to true
            if (my_sort_by_rt == null)
                my_sort_by_rt = true;

            // recursively divide child node (depth first)
            this.divide(!my_sort_by_rt, partition, child, curHeight + 1);

            // collect child node
            head.addChildGetBounds(child);
        }

        // collect summary of points from child nodes (additionally saves pointIDs)
        head.summarizeFromChildren(MzTree.NUM_POINTS_PER_NODE, this.summarizer, this.pointCache);

    } // END ROOT/INTERMEDIATE NODE

}

From source file:blusunrize.immersiveengineering.api.energy.wires.ImmersiveNetHandler.java

public Set<AbstractConnection> getIndirectEnergyConnections(BlockPos node, World world,
        boolean ignoreIsEnergyOutput) {
    int dimension = world.provider.getDimension();
    if (!ignoreIsEnergyOutput && indirectConnections.containsKey(dimension)
            && indirectConnections.get(dimension).containsKey(node))
        return indirectConnections.get(dimension).get(node);
    else if (ignoreIsEnergyOutput && indirectConnectionsIgnoreOut.containsKey(dimension)
            && indirectConnectionsIgnoreOut.get(dimension).containsKey(node))
        return indirectConnectionsIgnoreOut.get(dimension).get(node);

    PriorityQueue<Pair<IImmersiveConnectable, Float>> queue = new PriorityQueue<>(
            Comparator.comparingDouble(Pair::getRight));
    Set<AbstractConnection> closedList = newSetFromMap(new ConcurrentHashMap<AbstractConnection, Boolean>());
    List<BlockPos> checked = new ArrayList<>();
    HashMap<BlockPos, BlockPos> backtracker = new HashMap<>();

    checked.add(node);/*w ww  . j  a  v a2 s  .  c om*/
    Set<Connection> conL = getConnections(world, node);
    if (conL != null)
        for (Connection con : conL) {
            IImmersiveConnectable end = toIIC(con.end, world);
            if (end != null) {
                queue.add(new ImmutablePair<>(end, con.getBaseLoss()));
                backtracker.put(con.end, node);
            }
        }

    IImmersiveConnectable next;
    final int closedListMax = 1200;

    while (closedList.size() < closedListMax && !queue.isEmpty()) {
        Pair<IImmersiveConnectable, Float> pair = queue.poll();
        next = pair.getLeft();
        float loss = pair.getRight();
        BlockPos nextPos = toBlockPos(next);
        if (!checked.contains(nextPos) && queue.stream().noneMatch((p) -> p.getLeft().equals(nextPos))) {
            boolean isOutput = next.isEnergyOutput();
            if (ignoreIsEnergyOutput || isOutput) {
                BlockPos last = toBlockPos(next);
                WireType minimumType = null;
                int distance = 0;
                List<Connection> connectionParts = new ArrayList<>();
                while (last != null) {
                    BlockPos prev = last;
                    last = backtracker.get(last);
                    if (last != null) {

                        Set<Connection> conLB = getConnections(world, last);
                        if (conLB != null)
                            for (Connection conB : conLB)
                                if (conB.end.equals(prev)) {
                                    connectionParts.add(0, conB);
                                    distance += conB.length;
                                    if (minimumType == null
                                            || conB.cableType.getTransferRate() < minimumType.getTransferRate())
                                        minimumType = conB.cableType;
                                    break;
                                }
                    }
                }
                closedList.add(new AbstractConnection(toBlockPos(node), toBlockPos(next), minimumType, distance,
                        isOutput, connectionParts.toArray(new Connection[connectionParts.size()])));
            }

            Set<Connection> conLN = getConnections(world, toBlockPos(next));
            if (conLN != null)
                for (Connection con : conLN)
                    if (next.allowEnergyToPass(con)) {
                        IImmersiveConnectable end = toIIC(con.end, world);

                        Optional<Pair<IImmersiveConnectable, Float>> existing = queue.stream()
                                .filter((p) -> p.getLeft() == end).findAny();
                        float newLoss = con.getBaseLoss() + loss;
                        if (end != null && !checked.contains(con.end)
                                && existing.map(Pair::getRight).orElse(Float.MAX_VALUE) > newLoss) {
                            existing.ifPresent(p1 -> queue.removeIf((p2) -> p1.getLeft() == p2.getLeft()));
                            queue.add(new ImmutablePair<>(end, newLoss));
                            backtracker.put(con.end, toBlockPos(next));
                        }
                    }
            checked.add(toBlockPos(next));
        }
    }
    if (FMLCommonHandler.instance().getEffectiveSide() == Side.SERVER) {
        if (ignoreIsEnergyOutput) {
            if (!indirectConnectionsIgnoreOut.containsKey(dimension))
                indirectConnectionsIgnoreOut.put(dimension, new ConcurrentHashMap<>());
            Map<BlockPos, Set<AbstractConnection>> conns = indirectConnectionsIgnoreOut.get(dimension);
            if (!conns.containsKey(node))
                conns.put(node, newSetFromMap(new ConcurrentHashMap<>()));
            conns.get(node).addAll(closedList);
        } else {
            if (!indirectConnections.containsKey(dimension))
                indirectConnections.put(dimension, new ConcurrentHashMap<>());
            Map<BlockPos, Set<AbstractConnection>> conns = indirectConnections.get(dimension);
            if (!conns.containsKey(node))
                conns.put(node, newSetFromMap(new ConcurrentHashMap<>()));
            conns.get(node).addAll(closedList);
        }
    }
    return closedList;
}