Example usage for java.lang Double MIN_VALUE

List of usage examples for java.lang Double MIN_VALUE

Introduction

In this page you can find the example usage for java.lang Double MIN_VALUE.

Prototype

double MIN_VALUE

To view the source code for java.lang Double MIN_VALUE.

Click Source Link

Document

A constant holding the smallest positive nonzero value of type double , 2-1074.

Usage

From source file:tracing.ShollAnalysisDialog.java

public ShollResults getCurrentResults() {
    List<ShollPoint> pointsToUse;
    String description = "Sholl analysis ";
    String postDescription = " for " + originalImage.getTitle();
    boolean useAllPaths = !useSelectedPathsCheckbox.getState();
    if (useAllPaths) {
        pointsToUse = shollPointsAllPaths;
        description += "of all paths" + postDescription;
    } else {//w  w w .  j  a v a 2 s  . co m
        pointsToUse = shollPointsSelectedPaths;
        description += "of selected paths " + postDescription;
    }

    int axes = 0;
    if (normalAxes.getState())
        axes = AXES_NORMAL;
    else if (semiLogAxes.getState())
        axes = AXES_SEMI_LOG;
    else if (logLogAxes.getState())
        axes = AXES_LOG_LOG;
    else
        throw new RuntimeException("BUG: somehow no axis checkbox was selected");

    int normalization = 0;
    if (noNormalization.getState())
        normalization = NOT_NORMALIZED;
    else if (normalizationForSphereVolume.getState())
        normalization = NORMALIZED_FOR_SPHERE_VOLUME;
    else
        throw new RuntimeException("BUG: somehow no normalization checkbox was selected");

    String sphereSeparationString = sampleSeparation.getText();
    double sphereSeparation = Double.MIN_VALUE;

    try {
        sphereSeparation = Double.parseDouble(sphereSeparationString);
    } catch (NumberFormatException nfe) {
        return null;
    }

    ShollResults results = new ShollResults(pointsToUse, originalImage, useAllPaths,
            useAllPaths ? numberOfAllPaths : numberOfSelectedPaths, x_start, y_start, z_start, description,
            axes, normalization, sphereSeparation, twoDimensional);

    return results;
}

From source file:org.apache.myfaces.shared_impl.renderkit.RendererUtils.java

/**
 * See JSF Spec. 8.5 Table 8-1//  www .j  a v  a 2s .  c  o m
 *
 * @param value
 * @return boolean
 */
public static boolean isDefaultAttributeValue(Object value) {
    if (value == null) {
        return true;
    } else if (value instanceof Boolean) {
        return !((Boolean) value).booleanValue();
    } else if (value instanceof Number) {
        if (value instanceof Integer) {
            return ((Number) value).intValue() == Integer.MIN_VALUE;
        } else if (value instanceof Double) {
            return ((Number) value).doubleValue() == Double.MIN_VALUE;
        } else if (value instanceof Long) {
            return ((Number) value).longValue() == Long.MIN_VALUE;
        } else if (value instanceof Byte) {
            return ((Number) value).byteValue() == Byte.MIN_VALUE;
        } else if (value instanceof Float) {
            return ((Number) value).floatValue() == Float.MIN_VALUE;
        } else if (value instanceof Short) {
            return ((Number) value).shortValue() == Short.MIN_VALUE;
        }
    }
    return false;
}

From source file:org.apache.hadoop.hbase.master.balancer.LocalityAwareLoadBalancer.java

/**
 * This implements the Locality Aware Load Balancer.
 * Information for the algorithm can be found here: https://issues.apache.org/jira/browse/HBASE-10075
 *
 * @param clusterMap Map of regionservers and their load/region information to
 *                   a list of their most loaded regions
 * @return a list of regions to be moved, including source and destination,
 *         or null if cluster is already balanced
 *///from  w w w  .jav  a2 s  . com
public List<RegionPlan> balanceCluster(Map<ServerName, List<HRegionInfo>> clusterMap) {
    long startTime = System.currentTimeMillis();

    ClusterLoadState cs = new ClusterLoadState(clusterMap);

    float average = cs.getLoadAverage(); // for logging
    int ceiling = (int) Math.ceil(average * (1 + slop));
    NavigableMap<ServerAndLoad, List<HRegionInfo>> serversByLoad = cs.getServersByLoad();

    if (!this.needsBalance(cs)) {
        /*LOG.info("Skipping load balancing because balanced cluster; " +
                "servers=" + cs.getNumServers() + " " +
                "regions=" + cs.getNumRegions() + " average=" + average + " " +
                "mostloaded=" + serversByLoad.lastKey().getLoad() +
                " leastloaded=" + serversByLoad.firstKey().getLoad());*/
        return null;
    }

    // Additional check for locality aware load balancer as it only considers
    // only max loaded servers
    if (!(cs.getMaxLoad() > ceiling)) {
        return null;
    }

    Cluster cluster = new Cluster(clusterMap, new HashMap<String, Deque<RegionLoad>>(), regionLocationFinder);
    int numRegions = cs.getNumRegions();

    LOG.info(" ####################################################################################");
    LOG.info(" Before Locality-aware Balancing");
    LOG.info(" Average=" + average + " Ceiling=" + ceiling + " slop=" + slop);
    /* for (ServerAndLoad server : serversByLoad.keySet()) {
      LOG.info("---------------" + "Server Name: " + server.getServerName() + "---------------");
      List<HRegionInfo> hRegionInfos = serversByLoad.get(server);
      LOG.info("Number of Regions:" + hRegionInfos.size());
      for (HRegionInfo hRegionInfo : hRegionInfos){
        LOG.info(String.format("Name of Region: %s ", hRegionInfo.getRegionNameAsString()));
        //LOG.info(String.format("Size of Region in number of rows"+(Bytes.toInt(hRegionInfo.getStartKey())-Bytes.toInt(hRegionInfo.getEndKey()))));
        LOG.info("Start Key: " + Bytes.toString(hRegionInfo.getStartKey()));
        LOG.info("End Key: " + Bytes.toString(hRegionInfo.getEndKey()));
      }
      LOG.info("------------------------------------------------------------------------------");
    } */

    // calculate allTableRegionNumber = total number of regions per table.
    Map<Integer, Integer> allTableRegionNumberMap = new HashMap<Integer, Integer>();
    for (int i = 0; i < cluster.numServers; ++i) {
        for (int j = 0; j < cluster.numTables; ++j) {
            if (allTableRegionNumberMap.containsKey(j)) {
                Integer integer = allTableRegionNumberMap.get(j);
                integer = integer + cluster.numRegionsPerServerPerTable[i][j];
                allTableRegionNumberMap.put(j, integer);
            } else {
                allTableRegionNumberMap.put(j, cluster.numRegionsPerServerPerTable[i][j]);
            }
        }
    }

    List<RegionPlan> regionsToReturn = new ArrayList<RegionPlan>();

    for (ServerAndLoad server : serversByLoad.keySet()) {
        List<HRegionInfo> hRegionInfos = serversByLoad.get(server);
        // Check if number of regions on current server is greater than floor.
        // Continue only if number regions is greater than floor.
        if (hRegionInfos.size() <= ceiling) {
            LOG.debug("Number of HRegions <= ceiling (" + hRegionInfos.size() + " <= " + ceiling + ")");
            continue;
        }
        PriorityQueue<RegionServerRegionAffinity> queue = new PriorityQueue<RegionServerRegionAffinity>();
        int numberOfRegionsToMove = hRegionInfos.size() - ceiling;
        double regionAffinityNumber = (1 - hRegionInfos.size() / numRegions) * SERVER_BALANCER_WEIGHT;
        double tableRegionAffinityNumber = 0;
        // Calculate allTableRegionNumber
        for (HRegionInfo hRegionInfo : hRegionInfos) {
            // Do not move metaregion.
            if (hRegionInfo.isMetaRegion()) {
                continue;
            }
            TableName table = hRegionInfo.getTable();
            String tableName = table.getNameAsString();
            int tableIndex = cluster.tablesToIndex.get(tableName);
            int serverIndex = cluster.serversToIndex.get(server.getServerName().getHostAndPort());
            tableRegionAffinityNumber = (1 - cluster.numRegionsPerServerPerTable[serverIndex][tableIndex]
                    / allTableRegionNumberMap.get(tableIndex)) * TABLE_BALANCER_WEIGHT;
            float localityIndex = getLocalityIndex(hRegionInfo, server) * LOCALITY_WEIGHT;
            LOG.info("tableRegionaffinity: " + tableRegionAffinityNumber);
            LOG.info("regionAffinityNUmber: " + regionAffinityNumber);
            LOG.info("localityIndex: " + localityIndex);
            double finalAffinity = regionAffinityNumber + tableRegionAffinityNumber + localityIndex
                    + getStickinessWeight(hRegionInfo);
            queue.add(new RegionServerRegionAffinity(server, hRegionInfo, finalAffinity));
            LOG.info("Affinity between server=" + server.getServerName() + " and region="
                    + hRegionInfo.getRegionNameAsString() + " is " + finalAffinity);
        }

        LOG.info("Number of regions to move=" + numberOfRegionsToMove + " All server and region affinities: "
                + queue);

        // Get top numberOfRegionsToMove
        List<RegionServerRegionAffinity> listOfRegionsToMove = new ArrayList<RegionServerRegionAffinity>();
        for (int i = 0; i < numberOfRegionsToMove; ++i) {
            if (queue.isEmpty()) {
                continue;
            }
            listOfRegionsToMove.add(queue.poll());
        }

        // Search for the most affine servers to these listOfRegionsToMove
        for (RegionServerRegionAffinity regionServerRegionAffinity : listOfRegionsToMove) {
            HRegionInfo hRegionInfoToMove = regionServerRegionAffinity.getHRegionInfo();
            ServerAndLoad serverToMove = null;
            double maxAffinity = Double.MIN_VALUE;
            // Get the most affine server to hRegionInfoToMove
            for (ServerAndLoad activeServer : serversByLoad.keySet()) {
                hRegionInfos = serversByLoad.get(activeServer);
                if (activeServer.equals(regionServerRegionAffinity.getServer())) {
                    continue;
                }
                if (hRegionInfos.size() >= ceiling) {
                    LOG.debug("Number of HRegions >= ceiling (" + hRegionInfos.size() + " >= " + ceiling + ")");
                    continue;
                }
                regionAffinityNumber = (1 - hRegionInfos.size() / numRegions) * SERVER_BALANCER_WEIGHT;
                TableName table = hRegionInfoToMove.getTable();
                String tableNameAsString = table.getNameAsString();
                int serverIndex = cluster.serversToIndex.get(activeServer.getServerName().getHostAndPort());
                tableRegionAffinityNumber = 0;
                if (cluster.tablesToIndex.containsKey(tableNameAsString)) {
                    Integer tableIndex = cluster.tablesToIndex.get(tableNameAsString);
                    tableRegionAffinityNumber = (1
                            - cluster.numRegionsPerServerPerTable[serverIndex][tableIndex]
                                    / allTableRegionNumberMap.get(tableIndex))
                            * TABLE_BALANCER_WEIGHT;
                } else {
                    LOG.error("Table " + tableNameAsString + "not present in cluster.tablesToIndex");
                }
                double finalAffinity = regionAffinityNumber + tableRegionAffinityNumber
                        + getLocalityIndex(hRegionInfoToMove, activeServer) * LOCALITY_WEIGHT
                        + getStickinessWeight(hRegionInfoToMove);
                if (finalAffinity > maxAffinity) {
                    maxAffinity = finalAffinity;
                    serverToMove = activeServer;
                }
            }
            regionsToReturn.add(new RegionPlan(hRegionInfoToMove,
                    regionServerRegionAffinity.getServer().getServerName(), serverToMove.getServerName()));
        }
    }

    LOG.info("Returning plan: " + regionsToReturn);

    // Reset previuosly moved regions and add new regions
    previouslyMovedRegions.clear();
    for (RegionPlan regionPlan : regionsToReturn) {
        previouslyMovedRegions.add(regionPlan.getRegionInfo());
    }

    long endTime = System.currentTimeMillis();
    LOG.info("Calculated a load balance in " + (endTime - startTime) + "ms. " + "Moving "
            + regionsToReturn.size() + " regions");
    return regionsToReturn;
}

From source file:io.lavagna.service.StatisticsServiceTest.java

@Test
public void getAverageCardsPerUserOnBoardTest() {
    double averageUsers = statisticsService.getAverageCardsPerUserOnBoard(board.getId());
    Assert.assertEquals(0, averageUsers, Double.MIN_VALUE);

    labelService.addLabelValueToCard(assigned.getId(), card.getId(), labelValueToUser, user, new Date());

    averageUsers = statisticsService.getAverageCardsPerUserOnBoard(board.getId());
    Assert.assertEquals(1, averageUsers, Double.MIN_VALUE);

    createArchivedCard();//w  ww  .  j av  a  2  s .co m

    averageUsers = statisticsService.getAverageCardsPerUserOnBoard(board.getId());
    Assert.assertEquals(1, averageUsers, Double.MIN_VALUE);
}

From source file:com.mobiperf.speedometer.measurements.PingTask.java

private double filterPingResults(final ArrayList<Double> rrts, double avg) {
    double rrtAvg = avg;
    // Our # of results should be less than the # of times we ping
    try {//from   w ww .j a v  a  2 s.  c  om
        ArrayList<Double> filteredResults = Util.applyInnerBandFilter(rrts, Double.MIN_VALUE,
                rrtAvg * Config.PING_FILTER_THRES);
        // Now we compute the average again based on the filtered results
        if (filteredResults != null && filteredResults.size() > 0) {
            rrtAvg = Util.getSum(filteredResults) / filteredResults.size();
        }
    } catch (InvalidParameterException e) {
        // Log.wtf(SpeedometerApp.TAG,
        // "This should never happen because rrts is never empty");
    }
    return rrtAvg;
}

From source file:IK.G.java

public static DVector triangleIntersectTest(DVector R, DVector ta, DVector tb, DVector tc, double[] uvw) {

    /*println("------------"+ta); 
         println("------------"+tb+ "-------------------" + tc);*/
    DVector I = new DVector();
    DVector u = new DVector(tb.x, tb.y, tb.z);
    DVector v = new DVector(tc.x, tc.y, tc.z);
    DVector n;//from   ww w.j a v a 2 s.  c  om
    DVector dir = new DVector(R.x, R.y, R.z);
    DVector w0 = new DVector();
    double r, a, b;

    DVector.sub(u, ta, u);
    DVector.sub(v, ta, v);
    n = new DVector(); // cross product
    DVector.cross(u, v, n);

    if (n.mag() == 0) {
        return null;
    }

    w0 = new DVector(0, 0, 0);
    DVector.sub(w0, ta, w0);
    a = -(new DVector(n.x, n.y, n.z).dot(w0));
    b = new DVector(n.x, n.y, n.z).dot(dir);

    if ((double) Math.abs(b) < Double.MIN_VALUE) {
        return null;
    }

    r = a / b;
    if (r < 0.0) {
        return null;
    }

    I = new DVector(0, 0, 0);
    I.x += r * dir.x;
    I.y += r * dir.y;
    I.z += r * dir.z;
    double[] barycentric = new double[3];
    barycentric(ta, tb, tc, I, barycentric);

    if (barycentric[0] >= 0 && barycentric[1] >= 0 && barycentric[2] >= 0) {
        uvw[0] = barycentric[0];
        uvw[1] = barycentric[1];
        uvw[2] = barycentric[2];
        return I;
    } else {
        return null;
    }
}

From source file:MedArkRef.AnalysisUtilities.java

public arkref.parsestuff.AnalysisUtilities.ParseResult parseSentence(String sentence) {
    String result = "";
    //System.err.println(sentence);
    //see if a parser socket server is available
    int port = new Integer(GlobalProperties.getProperties().getProperty("parserServerPort", "5556"));
    String host = "127.0.0.1";
    Socket client;/*from w  w  w .j  av  a2  s  . c  o m*/
    PrintWriter pw;
    BufferedReader br;
    String line;
    Tree parse = null;
    double parseScore = Double.MIN_VALUE;

    try {
        client = new Socket(host, port);

        pw = new PrintWriter(client.getOutputStream());
        br = new BufferedReader(new InputStreamReader(client.getInputStream()));
        pw.println(sentence);
        pw.flush(); //flush to complete the transmission

        while ((line = br.readLine()) != null) {
            //if(!line.matches(".*\\S.*")){
            //        System.out.println();
            //}
            if (br.ready()) {
                line = line.replaceAll("\n", "");
                line = line.replaceAll("\\s+", " ");
                result += line + " ";
            } else {
                parseScore = new Double(line);
            }
        }

        br.close();
        pw.close();
        client.close();

        if (parse == null) {
            parse = readTreeFromString("(ROOT (. .))");
            parseScore = -99999.0;
        }

        if (GlobalProperties.getDebug())
            System.err.println("result (parse):" + result);
        parse = readTreeFromString(result);
        return new arkref.parsestuff.AnalysisUtilities.ParseResult(true, parse, parseScore);

    } catch (Exception ex) {
        if (GlobalProperties.getDebug())
            System.err.println("Could not connect to parser server.");
        //ex.printStackTrace();
    }

    System.err.println("parsing:" + sentence);

    //if socket server not available, then use a local parser object
    if (parser == null) {
        try {
            Options op = new Options();
            String serializedInputFileOrUrl = GlobalProperties.getProperties().getProperty("parserGrammarFile",
                    "config" + File.separator + "englishFactored.ser.gz");
            parser = new LexicalizedParser(serializedInputFileOrUrl, op);
            int maxLength = new Integer(GlobalProperties.getProperties().getProperty("parserMaxLength", "40"))
                    .intValue();
            parser.setMaxLength(maxLength);
            parser.setOptionFlags("-outputFormat", "oneline");
        } catch (Exception e) {
            e.printStackTrace();
        }
    }

    try {
        if (parser.parse(sentence)) {
            parse = parser.getBestParse();

            //remove all the parent annotations (this is a hacky way to do it)
            String ps = parse.toString().replaceAll("\\[[^\\]]+/[^\\]]+\\]", "");
            parse = AnalysisUtilities.getInstance().readTreeFromString(ps);

            parseScore = parser.getPCFGScore();
            return new arkref.parsestuff.AnalysisUtilities.ParseResult(true, parse, parseScore);
        }
    } catch (Exception e) {
    }

    parse = readTreeFromString("(ROOT (. .))");
    parseScore = -99999.0;
    return new arkref.parsestuff.AnalysisUtilities.ParseResult(false, parse, parseScore);
}

From source file:au.org.ala.delta.intkey.model.ReportUtils.java

/**
 * Generates summary information for the supplied real character and list of
 * taxa/* w ww  . jav  a  2s. c om*/
 * 
 * @param ch
 *            The character
 * @param attrs
 *            All attributes for the character
 * @param taxa
 *            The taxa
 * @param outputToDeltaFormat
 *            True if the summary is being output to delta format. The
 *            output is slightly different in this situation.
 * @return A list of objects:
 *         <ol>
 *         <li>The number of unknown attributes for the supplied character
 *         and taxa (int)</li>
 *         <li>The number of inapplicable attributes for the supplied
 *         character and taxa (int)</li>
 *         <li>The number of inapplicable attributes for the supplied
 *         character and taxa (int)</li>
 *         <li>The number of recorded attributes for the supplied character
 *         and taxa (int)</li>
 *         <li>Minimum value for the attributes for the supplied character
 *         and taxa (Double)</li>
 *         <li>Maximum value for the attributes for the supplied character
 *         and taxa (Double)</li>
 *         <li>The number of the taxon whose attribute for the supplied
 *         character has the minimum value (int)</li>
 *         <li>The number of the taxon whose attribute for the supplied
 *         character has the maximum value (int)</li>
 *         <li>Mean of the values for the attributes for the supplied
 *         character and taxa (Double)</li>
 *         <li>Standard deviation of the values for the attributes for the
 *         supplied character and taxa (Double)</li>
 *         </ol>
 */
public static List<Object> generateRealSummaryInformation(RealCharacter ch, List<Attribute> attrs,
        List<Item> taxa, boolean outputToDeltaFormat) {
    int numUnknown = 0;
    int numInapplicable = 0;
    int numRecorded = 0;

    double minValue = Double.MAX_VALUE;
    double maxValue = Double.MIN_VALUE;
    int minValueTaxonIndex = 0;
    int maxValueTaxonIndex = 0;

    // Collect data points to use to calculate mean and standard deviation
    List<Double> valuesForMeanAndStdDev = new ArrayList<Double>();

    for (Item taxon : taxa) {
        RealAttribute attr = (RealAttribute) attrs.get(taxon.getItemNumber() - 1);
        if (attr.isUnknown() && !attr.isInapplicable()) {
            numUnknown++;
            continue;
        } else if (attr.isInapplicable()) {
            if (outputToDeltaFormat && attr.getCharacter().getControllingCharacters().isEmpty()) {
                numInapplicable++;
            } else if (!outputToDeltaFormat && attr.isUnknown()) {
                numInapplicable++;
            }

            if (attr.isUnknown()) {
                continue;
            }
        }

        numRecorded++;

        FloatRange presentRange = attr.getPresentRange();

        if (presentRange.getMinimumDouble() < minValue) {
            minValue = presentRange.getMinimumDouble();
            minValueTaxonIndex = taxon.getItemNumber();
        }

        if (presentRange.getMaximumDouble() > maxValue) {
            maxValue = presentRange.getMaximumDouble();
            maxValueTaxonIndex = taxon.getItemNumber();
        }

        // for calculating the mean and standard deviation, use the average
        // the two numbers that
        // specify the range.
        valuesForMeanAndStdDev.add((presentRange.getMinimumDouble() + presentRange.getMaximumDouble()) / 2);
    }

    Pair<Double, Double> pairMeanStdDev = calcuateMeanAndStandardDeviation(valuesForMeanAndStdDev);
    double mean = pairMeanStdDev.getFirst();
    double stdDev = pairMeanStdDev.getSecond();

    return Arrays.asList(new Object[] { numUnknown, numInapplicable, numRecorded, minValue, maxValue,
            minValueTaxonIndex, maxValueTaxonIndex, mean, stdDev });
}

From source file:org.dcache.util.collector.pools.PoolInfoCollectorUtils.java

/**
 * <p>Combines the pool last access data into an aggregate for the group.</p>
 *
 * @param pools the pools of the group/*from   www  . j a va  2s .  co m*/
 * @return aggregated histogram model
 */
public static CountingHistogram mergeLastAccess(List<PoolInfoWrapper> pools) {
    List<CountingHistogram> allHistograms = pools.stream().map(PoolInfoWrapper::getInfo)
            .map(PoolData::getSweeperData).map(SweeperData::getLastAccessHistogram)
            .collect(Collectors.toList());

    CountingHistogram groupHistogram = SweeperData.createLastAccessHistogram();

    if (allHistograms.isEmpty()) {
        groupHistogram.setData(Collections.EMPTY_LIST);
        groupHistogram.configure();
        return groupHistogram;
    }

    /*
     *  Find the histogram with the highest last bin (and consequently
     *  the widest bins).
     *
     *  Merge the statistics.
     */
    double maxBinValue = Double.MIN_VALUE;
    CountingHistogram standard = null;
    HistogramMetadata metadata = new HistogramMetadata();

    for (CountingHistogram h : allHistograms) {
        double currentMaxBin = h.getHighestBin();
        if (currentMaxBin > maxBinValue) {
            standard = h;
            maxBinValue = currentMaxBin;
        }
        metadata.mergeStatistics(h.getMetadata());
    }

    int binCount = standard.getBinCount();
    double binSize = standard.getBinSize();

    groupHistogram.setBinCount(binCount);
    groupHistogram.setBinSize(binSize);
    groupHistogram.setBinUnit(standard.getBinUnit());
    groupHistogram.setBinWidth(standard.getBinWidth());
    groupHistogram.setHighestBin(standard.getHighestBin());
    groupHistogram.setLowestBin(standard.getLowestBin());
    groupHistogram.setMetadata(metadata);

    /*
     *  Configuration of counting histogram assumes raw unordered
     *  data.  To merge counting histograms, we just need to sum the
     *  already configured data to the correct bin.
     */
    double[] dataArray = new double[binCount];
    for (CountingHistogram h : allHistograms) {
        List<Double> currentData = h.getData();
        double currentBinSize = h.getBinSize();
        int numBins = currentData.size();
        for (int bin = 0; bin < numBins; ++bin) {
            int groupBin = (int) FastMath.floor((bin * currentBinSize) / binSize);
            dataArray[groupBin] += currentData.get(bin);
        }
    }

    List<Double> groupData = new ArrayList<>();
    for (double d : dataArray) {
        groupData.add(d);
    }

    groupHistogram.setData(groupData);

    return groupHistogram;
}

From source file:org.asoem.greyfish.core.space.WalledPointSpace.java

/**
 * Checks if the line {@code xo, yo, xd, yd} crosses an edge of the {@code tile} or any adjacent tile in the
 * direction of movement which has a wall present. If such a crossing is found, than the point closest to this
 * crossing is returned, {@code null}, otherwise.
 *
 * @param tile the tile to check for a collision
 * @param xo   Movement line x origin//from w ww  .j av a  2s. c o  m
 * @param yo   Movement line y origin
 * @param xd   Movement line x destination
 * @param yd   Movement line x destination
 * @return the location on the line closest to the point of a collision with a wall or {@code null} if none could be
 * found
 */
@Nullable
private Point2D collision(final WalledTile tile, final double xo, final double yo, final double xd,
        final double yd) {
    assert tile != null;

    if (tile.covers(xd, yd)) {
        return null;
    }

    TileDirection follow1 = null;
    TileDirection follow2 = null;

    if (yd < yo) { // north
        final ImmutablePoint2D intersection = intersection(tile.getX(), tile.getY(),
                Math.nextAfter(tile.getX() + 1.0, -Double.MIN_VALUE), tile.getY(), xo, yo, xd, yd);

        if (intersection != null) {
            if (tile.hasWall(TileDirection.NORTH)) {
                return intersection;
            } else {
                follow1 = TileDirection.NORTH;
            }
        }
    }

    if (xd > xo) { // east
        final ImmutablePoint2D intersection = intersection(Math.nextAfter(tile.getX() + 1.0, -Double.MIN_VALUE),
                tile.getY(), Math.nextAfter(tile.getX() + 1.0, -Double.MIN_VALUE),
                Math.nextAfter(tile.getY() + 1.0, -Double.MIN_VALUE), xo, yo, xd, yd);

        if (intersection != null) {
            if (tile.hasWall(TileDirection.EAST)) {
                return intersection;
            } else {
                if (follow1 == null) {
                    follow1 = TileDirection.EAST;
                } else {
                    follow2 = TileDirection.EAST;
                }
            }
        }
    }

    if (yd > yo) { // south
        final ImmutablePoint2D intersection = intersection(tile.getX(),
                Math.nextAfter(tile.getY() + 1.0, -Double.MIN_VALUE),
                Math.nextAfter(tile.getX() + 1.0, -Double.MIN_VALUE),
                Math.nextAfter(tile.getY() + 1.0, -Double.MIN_VALUE), xo, yo, xd, yd);

        if (intersection != null) {
            if (tile.hasWall(TileDirection.SOUTH)) {
                return intersection;
            } else {
                if (follow1 == null) {
                    follow1 = TileDirection.SOUTH;
                } else {
                    follow2 = TileDirection.SOUTH;
                }
            }
        }
    }

    if (xd < xo) { // west
        final ImmutablePoint2D intersection = intersection(tile.getX(),
                Math.nextAfter(tile.getY() + 1.0, -Double.MIN_VALUE), tile.getX(), tile.getY(), xo, yo, xd, yd);

        if (intersection != null) {
            if (tile.hasWall(TileDirection.WEST)) {
                return intersection;
            } else {
                if (follow1 == null) {
                    follow1 = TileDirection.WEST;
                } else {
                    follow2 = TileDirection.WEST;
                }
            }
        }
    }

    if (follow1 != null && hasAdjacentTile(tile, follow1)) {
        final Point2D collision = collision(getAdjacentTile(tile, follow1), xo, yo, xd, yd);
        if (collision != null) {
            return collision;
        } else if (follow2 != null && hasAdjacentTile(tile, follow2)) {
            final Point2D collision1 = collision(getAdjacentTile(tile, follow2), xo, yo, xd, yd);
            if (collision1 != null) {
                return collision1;
            }
        }
    }

    return null;
}