Example usage for java.lang Math ceil

List of usage examples for java.lang Math ceil

Introduction

In this page you can find the example usage for java.lang Math ceil.

Prototype

public static double ceil(double a) 

Source Link

Document

Returns the smallest (closest to negative infinity) double value that is greater than or equal to the argument and is equal to a mathematical integer.

Usage

From source file:Main.java

private static int computeInitialSampleSize(Options options, int minSideLength, int maxNumOfPixels) {
    double w = options.outWidth;
    double h = options.outHeight;

    int lowerBound = (maxNumOfPixels == UNCONSTRAINED) ? 1 : (int) Math.ceil(Math.sqrt(w * h / maxNumOfPixels));
    int upperBound = (minSideLength == UNCONSTRAINED) ? 128
            : (int) Math.min(Math.floor(w / minSideLength), Math.floor(h / minSideLength));

    if (upperBound < lowerBound) {
        // return the larger one when there is no overlapping zone.
        return lowerBound;
    }/*from   w  w  w. ja  v  a2 s  . co m*/

    if ((maxNumOfPixels == UNCONSTRAINED) && (minSideLength == UNCONSTRAINED)) {
        return 1;
    } else if (minSideLength == UNCONSTRAINED) {
        return lowerBound;
    } else {
        return upperBound;
    }
}

From source file:com.ms.commons.file.impl.FileServiceImpl.java

/**
 * ??//  www  .j a  va2  s .c  o  m
 * 
 * @param url
 * @param quality
 * @param widths
 * @param heights
 * @param savePaths
 * @return
 */
public boolean reduceAllPicture(String url, Double quality, int[] widths, int[] heights, String[] savePaths) {
    if (url == null || url.trim().length() == 0 || widths == null || widths.length == 0 || heights == null
            || heights.length == 0 || savePaths == null || savePaths.length == 0
            || widths.length != savePaths.length || heights.length != savePaths.length) {
        return false;
    }
    try {
        Map<String, Integer> imageBasicInfo = ImageUtil.getImageWH(url);
        int originalImageWidth = imageBasicInfo.get(ImageUtil.IMAGE_WIDTH);
        int originalImageHeight = imageBasicInfo.get(ImageUtil.IMAGE_HEIGHT);
        for (int i = 0, len = widths.length; i < len; i++) {
            int width = widths[i];
            int height = heights[i];
            if (width <= 0 && height <= 0) {
                continue;
            }
            // ?
            if (width <= 0) {
                double heightBo = (double) height / originalImageHeight;
                if (heightBo > 1) {
                    width = originalImageWidth;
                    height = originalImageHeight;
                } else {
                    width = (int) Math.ceil(heightBo * originalImageWidth);
                }
            }
            // ?
            else {
                double widthBo = (double) width / originalImageWidth;
                if (widthBo > 1) {
                    width = originalImageWidth;
                    height = originalImageHeight;
                } else {
                    height = (int) Math.ceil(widthBo * originalImageHeight);
                }
            }
            ImageUtil.scaleImage(url, width, height, savePaths[i], quality);
        }
        return true;
    } catch (Exception e) {
        logger.error("", e);
        return false;
    }
}

From source file:io.vit.vitio.Instances.Attendance.java

public int getModifiedPercentage(int attended, int total) {
    if (total != 0) {
        return (int) Math.ceil(((double) attended / total) * 100f);
    }/*from ww  w . j a  v a  2s. c om*/
    return 0;
}

From source file:edu.asu.ca.kaushik.algorithms.permvector.MTPermutationVector.java

private int permVecLLLBound(int t, int k, int v) {
    double vTotm1 = Math.pow(v, t - 1);
    double vTotm2 = Math.pow(v, t - 2);

    double nume1 = CombinatoricsUtils.factorialDouble(t);
    double nume2 = CombinatoricsUtils.binomialCoefficientDouble((int) vTotm1, t);
    double nume3 = v * (vTotm1 - 1) / (v - 1);
    double nume4 = CombinatoricsUtils.binomialCoefficientDouble((int) vTotm2, t);
    double nume = nume1 * (nume2 - nume3 * nume4);
    double dnom = Math.pow(vTotm1, t);
    double q = 1 - (nume / dnom);

    double d = CombinatoricsUtils.binomialCoefficientDouble(k, t)
            - CombinatoricsUtils.binomialCoefficientDouble(k - t, t) - 1;

    return (int) Math.ceil((1 + Math.log(d + 1)) / Math.log(1 / q));
}

From source file:de.unijena.bioinf.FragmentationTreeConstruction.computation.recalibration.MedianSlope.java

public double[][] getMedianSubsetFairDistributed(final Spectrum<Peak> measured,
        final Spectrum<Peak> reference) {

    // for each mass range of 100 Da choose the most intensive peaks
    final SimpleSpectrum massOrderedSpectrum = new SimpleSpectrum(measured);
    final double highestMass = massOrderedSpectrum.getMzAt(massOrderedSpectrum.size() - 1);
    final ArrayList<Integer>[] chosenPeaks = new ArrayList[(int) Math.ceil(highestMass / 100)];
    for (int k = 0; k < chosenPeaks.length; ++k)
        chosenPeaks[k] = new ArrayList<Integer>();
    for (int k = 0; k < massOrderedSpectrum.size(); ++k) {
        final int bin = (int) Math.floor(massOrderedSpectrum.getMzAt(k) / 100);
        chosenPeaks[bin].add(k);/*from w  w  w.  j  av  a 2  s . c o  m*/
    }
    for (int k = 0; k < chosenPeaks.length; ++k) {
        Collections.sort(chosenPeaks[k], new Comparator<Integer>() {
            @Override
            public int compare(Integer o1, Integer o2) {
                return new Double(measured.getIntensityAt(o2)).compareTo(measured.getIntensityAt(o1));
            }
        });
    }

    while (chosenPeaks[chosenPeaks.length - 1].size() < 4) {
        chosenPeaks[chosenPeaks.length - 1].add(massOrderedSpectrum.size() - 1);
    }

    // take median of bin size
    Arrays.sort(chosenPeaks, new Comparator<ArrayList<Integer>>() {
        @Override
        public int compare(ArrayList<Integer> o1, ArrayList<Integer> o2) {
            return o2.size() - o1.size();
        }
    });
    final int median = Math.max(5, chosenPeaks[chosenPeaks.length / 2].size());
    //System.err.println(median);

    final TIntArrayList allPeaks = new TIntArrayList();
    for (ArrayList<Integer> bin : chosenPeaks)
        allPeaks.addAll(bin.subList(0, Math.min(bin.size(), median)));

    // assuming that all peaks are correct, choose all peaks for recalibration
    final double[][] peaks = new double[2][allPeaks.size()];

    for (int k = 0; k < allPeaks.size(); ++k) {
        peaks[0][k] = measured.getMzAt(allPeaks.get(k));
        peaks[1][k] = reference.getMzAt(allPeaks.get(k));
    }
    return peaks;
}

From source file:dao.SchemaHistoryDAO.java

public static ObjectNode getPagedSchemaDataset(String name, Long datasetId, int page, int size) {
    ObjectNode result = Json.newObject();

    javax.sql.DataSource ds = getJdbcTemplate().getDataSource();
    DataSourceTransactionManager tm = new DataSourceTransactionManager(ds);
    TransactionTemplate txTemplate = new TransactionTemplate(tm);

    result = txTemplate.execute(new TransactionCallback<ObjectNode>() {
        public ObjectNode doInTransaction(TransactionStatus status) {

            List<SchemaDataset> pagedScripts = null;
            if (StringUtils.isNotBlank(name)) {
                if (datasetId != null && datasetId > 0) {
                    pagedScripts = getJdbcTemplate().query(GET_SPECIFIED_SCHEMA_DATASET_WITH_FILTER,
                            new SchemaDatasetRowMapper(), datasetId, "%" + name + "%", (page - 1) * size, size);

                } else {
                    pagedScripts = getJdbcTemplate().query(GET_PAGED_SCHEMA_DATASET_WITH_FILTER,
                            new SchemaDatasetRowMapper(), "%" + name + "%", (page - 1) * size, size);
                }/*from   w  w w .  ja v a  2 s .  c  om*/
            } else {
                if (datasetId != null && datasetId > 0) {
                    pagedScripts = getJdbcTemplate().query(GET_SPECIFIED_SCHEMA_DATASET,
                            new SchemaDatasetRowMapper(), datasetId, (page - 1) * size, size);
                } else {
                    pagedScripts = getJdbcTemplate().query(GET_PAGED_SCHEMA_DATASET,
                            new SchemaDatasetRowMapper(), (page - 1) * size, size);
                }
            }

            long count = 0;
            try {
                count = getJdbcTemplate().queryForObject("SELECT FOUND_ROWS()", Long.class);
            } catch (EmptyResultDataAccessException e) {
                Logger.error("Exception = " + e.getMessage());
            }

            ObjectNode resultNode = Json.newObject();
            resultNode.put("count", count);
            resultNode.put("page", page);
            resultNode.put("itemsPerPage", size);
            resultNode.put("totalPages", (int) Math.ceil(count / ((double) size)));
            resultNode.set("datasets", Json.toJson(pagedScripts));

            return resultNode;
        }
    });

    return result;
}

From source file:mase.spec.BasicHybridExchanger.java

protected double[][] computeMetapopDistances(EvolutionState state) {
    // Retrieve agent behaviours, aggregated by MetaPopulation
    List<BehaviourResult>[] mpBehavs = new List[metaPops.size()];
    for (int i = 0; i < metaPops.size(); i++) {
        MetaPopulation mp = metaPops.get(i);
        Individual[] inds = getElitePortion(mp.inds, (int) Math.ceil(elitePortion * popSize));
        mpBehavs[i] = new ArrayList<>(mp.agents.size() * inds.length);
        for (Individual ind : inds) {
            for (Integer a : mp.agents) {
                mpBehavs[i].add(getAgentBR(ind, a));
            }//from www. java2s  . c  o m
        }
    }

    // Compute distance matrix
    double[][] dm = distanceMatrix(mpBehavs, state);
    dm = normalisedDistanceMatrix(dm, state);
    return dm;
}

From source file:com.gwac.action.UserList.java

@SuppressWarnings("unchecked")
public String execute() {
    log.debug("Page " + getPage() + " Rows " + getRows() + " Sorting Order " + getSord() + " Index Row :"
            + getSidx());// www .  j  ava2 s .c  o m
    log.debug("Search :" + searchField + " " + searchOper + " " + searchString);

    // Count all record (select count(*) from your_custumers)
    int tn = userService.count();
    log.debug("number=" + tn);

    if (totalrows != null) {
        records = totalrows;
    }

    //    if(rows==-1){
    //      rows = records;
    //    }

    // Calucalate until rows ware selected
    int to = (rows * page);

    // Calculate the first row to read
    int from = to - rows;

    // Set to = max rows
    if (to > records) {
        to = records;
    }

    //    setGridModel(dpmDao.findAll(page, rows));
    gridModel = userService.listUser(from, rows);
    //    gridModel = tspDao.findAll();
    log.debug("from=" + from);
    log.debug("to=" + to);
    log.debug("size=" + gridModel.size());
    //    for(Telescope dpm: gridModel){
    //      log.debug("name="+dpm.getName());
    //    }

    // Calculate total Pages
    total = (int) Math.ceil((double) records / (double) rows);
    return SUCCESS;
}

From source file:org.soyatec.windowsazure.blob.internal.PageBlob.java

void setContentsImpl(IBlobProperties blobProperties, IBlobContents blobContents,
        final NameValueCollection headerParameters) {
    //To create a new page blob, first initialize the blob by calling Put Blob and specify its maximum size, up to 1 TB.
    // When creating a page blob, do not include content in the request body. Once the blob has been created, call Put Page to add content to the blob or to modify it.
    BlobStream stream = blobContents.getStream();
    if (stream == null)
        throw new IllegalArgumentException("Stream is null.");
    try {//  w  w  w . ja va 2s .c o m
        int size = (int) stream.length();
        if (size % 512 != 0)
            size = (int) (Math.ceil(1.0 * size / 512) * 512);
        // create empty page
        container.createPageBlob(blobProperties, size, headerParameters);

        int numPages = (int) Math.ceil(1.0 * size / PageSize);
        int startOffset = 0;
        int endOffset = 0;
        for (int i = 0; i < numPages; i++) {
            endOffset = (int) Math.min(size, startOffset + PageSize);
            // pages must be aligned with 512-byte boundaries
            PageRange range = new PageRange(startOffset, endOffset - 1);
            writePages(stream, range, null);
            startOffset = endOffset;
        }
    } catch (IOException e) {
        throw HttpUtilities.translateWebException(e);
    }
}

From source file:com.mycompany.flooringmvc.OrderSQLTest.java

@Test
public void tester() throws ParseException {
    Assert.assertTrue(order != null);/*from w  w  w. j  a  v  a  2 s  .co  m*/

    Order testGet = dao.get(order.getId());
    Assert.assertTrue(testGet != null);

    List<Order> orders = dao.getOrders();
    Assert.assertTrue(orders != null);

    order.setName("Bennett");
    SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd");
    Date date = fmt.parse("2013-05-06");
    order.setDate(date);
    //        order.setProduct(1);
    //        order.setState(3);

    //        String state = order.getState();
    //        state = state.toUpperCase();
    double tax = tdao.getTax("CA");
    String type = order.getProduct();

    double results[] = getCosts(1);

    double area = 100;
    double mcs = results[0];
    double lcs = results[1];
    double lc = Math.ceil(lcs * area);
    double mc = Math.ceil(mcs * area);
    double pretotal = Math.ceil(lc + mc);
    double ttax = tax / 100;
    double taxtotal = pretotal * ttax;
    double total = pretotal + taxtotal;

    order.setArea(area);
    order.setTotal(total);
    order.setStateId(3);
    order.setProductId(1);

    //        dao.update(order);

}