List of usage examples for java.lang Math ceil
public static double ceil(double a)
From source file:fi.arcusys.oulu.web.AjaxController.java
/** * Processes task query and gets task list * @param taskType task type/*w w w .j av a 2 s . co m*/ * @param page page id * @param numPerPage number of tasks per page to be shown * @param keyword keyword for searching/filtering * @param orderType order type of tasks * @param token user participant token * @param username user name * @return task information in Json format */ public JSONObject getJsonModel(int taskType, int page, int numPerPage, String keyword, String orderType, String token, String username) { JSONObject jsonModel = new JSONObject(); if (token == null) { jsonModel.put("tokenStatus", "INVALID"); } else { TaskHandle taskhandle = new TaskHandle(token, username); int totalTasksNum; int totalPages; List<Task> tasks; String first = String.valueOf((page - 1) * numPerPage); String max = String.valueOf(numPerPage); tasks = taskhandle.getTasksByParams(taskType, keyword, orderType, first, max); totalTasksNum = taskhandle.getTotalTasksNumber(taskType, keyword); totalPages = (totalTasksNum == 0) ? 1 : (int) Math.ceil((double) totalTasksNum / numPerPage); jsonModel.put("totalItems", totalTasksNum); jsonModel.put("totalPages", totalPages); jsonModel.put("tasks", tasks); jsonModel.put("tokenStatus", "VALID"); } return jsonModel; }
From source file:org.geowebcache.diskquota.storage.PagePyramid.java
/** * //from w ww . jav a 2 s .c o m * @param coverage * {@code [minx, miny, maxx, maxy, zoomlevel]} gridsubset coverage for a given zoom * level * @return {@code [numTilesPerPageX, numTilesPerPageY, numPagesX, numPagesY]} number of pages in * both directions for the given coverage */ public PageLevelInfo calculatePageInfo(final long[] coverage) { final int level = (int) coverage[4]; final long coverageMinX = coverage[0]; final long coverageMaxX = coverage[2]; final long coverageMinY = coverage[1]; final long coverageMaxY = coverage[3]; final long coverageTilesWide = 1 + coverageMaxX - coverageMinX; final long coverageTilesHigh = 1 + coverageMaxY - coverageMinY; final int tilesPerPageX = calculateNumTilesPerPage(coverageTilesWide); final int tilesPerPageY = calculateNumTilesPerPage(coverageTilesHigh); final int numPagesX = (int) Math.ceil((double) coverageTilesWide / tilesPerPageX); final int numPagesY = (int) Math.ceil((double) coverageTilesHigh / tilesPerPageY); PageLevelInfo pli = new PageLevelInfo(numPagesX, numPagesY, tilesPerPageX, tilesPerPageY, coverageMinX, coverageMinY, coverageMaxX, coverageMaxY); // if (log.isDebugEnabled()) { // log.debug("Coverage: " + Arrays.toString(coverage) + " (" + coverageTilesWide + "x" // + coverageTilesHigh + ") tiles. Tiles perpage: " + tilesPerPageX + " x " // + tilesPerPageY + " for a total of " + numPagesX + " x " + numPagesY // + " pages and " + (tilesPerPageX * (long) tilesPerPageY) + " tiles per page"); // } return pli; }
From source file:info.raack.appliancedetection.evaluation.model.Simulation.java
public Simulation(DateUtils dateUtils, Date startTime, long durationInSeconds, int numAppliances, int labelsPerOnOff, int onConcurrency, List<SimulatedAppliance> possibleAppliances, SimulationGroup group) {//w ww . j a va 2 s .c om this.dateUtils = dateUtils; this.durationInSeconds = durationInSeconds; this.numAppliances = numAppliances; this.labelsPerOnOff = labelsPerOnOff; this.onConcurrency = onConcurrency; this.id = UUID.randomUUID().toString(); this.group = group; // random number of unlabeled appliances (between 1 and 5) this.maxUnlabeledAppliances = (int) Math.ceil((Math.random() * 5)); setStartTime(startTime.getTime()); logger.debug("Starting simulation " + id + " at " + startTime); constructAppliances(possibleAppliances); }
From source file:com.comphenix.xp.listeners.ExperienceEnhancementsListener.java
private int getCustomBookshelfCount(Block table, int maxBookshelfCount, int yOffset) { final int bookID = Material.BOOKSHELF.getId(); final World world = table.getWorld(); // Usually 15 bookshelves per vertical stack int height = (int) Math.ceil(maxBookshelfCount / 15.0); int x = table.getX(); int y = table.getY(); int z = table.getZ(); int count = 0; for (int i = yOffset; i < height; i++) for (int j = -1; j <= 1; j++) for (int k = -1; k <= 1; k++) { // We check every block in 2x2, except the middle, for air blocks. // (seen from above) // A A A // A t A // A A A // // t is at origin, where j = 0 and k = 0. // // Legend: A = air, t = table, # = bookcase. if ((j != 0 || k != 0) && world.getBlockTypeIdAt(x + k, y + i, z + j) == 0) { // Next, we count the bookcases in a star shape around the air blocks and the enchanting table. // # # # // A A A // # A t A # // A A A // # # # if (world.getBlockTypeIdAt(x + k * 2, y + i, z + j * 2) == bookID) { count++;/*from w w w . ja v a 2 s. c o m*/ } // Count the two left over blocks in the corners: // # # // # A x A # // x t x // # A x A # // # # // Legend: x = ignored blocks if (k != 0 && j != 0) { if (world.getBlockTypeIdAt(x + k * 2, y + i, z + j) == bookID) { count++; } if (world.getBlockTypeIdAt(x + k, y + i, z + j * 2) == bookID) { count++; } } } } return count; }
From source file:com.insthub.O2OMobile.Model.MessageListModel.java
public void getListMore() { messagelistRequest request = new messagelistRequest(); request.uid = SESSION.getInstance().uid; request.sid = SESSION.getInstance().sid; request.ver = O2OMobileAppConst.VERSION_CODE; request.by_no = (int) Math.ceil(publicMessageList.size() * 1.0 / NUMPERPAGE) + 1; request.count = NUMPERPAGE;//ww w . jav a 2 s . co m BeeCallback<JSONObject> cb = new BeeCallback<JSONObject>() { @Override public void callback(String url, JSONObject jo, AjaxStatus status) { try { MessageListModel.this.callback(this, url, jo, status); if (null != jo) { messagelistResponse response = new messagelistResponse(); response.fromJson(jo); publicMore = response.more; if (response.succeed == 1) { publicMessageList.addAll(response.messages); MessageListModel.this.OnMessageResponse(url, jo, status); } else { MessageListModel.this.callback(url, response.error_code, response.error_desc); } } } catch (JSONException e) { } } }; Map<String, Object> params = new HashMap<String, Object>(); try { JSONObject requestJson = request.toJson(); requestJson.remove("by_id"); params.put("json", requestJson.toString()); } catch (JSONException e) { } if (isSendingMessage(ApiInterface.MESSAGE_LIST)) { return; } cb.url(ApiInterface.MESSAGE_LIST).type(JSONObject.class).params(params); ajax(cb); }
From source file:gdsc.smlm.ij.plugins.FIRE.java
private MemoryPeakResults cropToRoi(MemoryPeakResults results) { if (roiBounds == null) return results; // Adjust bounds relative to input results image double xscale = roiImageWidth / results.getBounds().width; double yscale = roiImageHeight / results.getBounds().height; roiBounds.x /= xscale;/* w w w. j av a 2 s. co m*/ roiBounds.width /= xscale; roiBounds.y /= yscale; roiBounds.height /= yscale; float minX = (int) (roiBounds.x); float maxX = (int) Math.ceil(roiBounds.x + roiBounds.width); float minY = (int) (roiBounds.y); float maxY = (int) Math.ceil(roiBounds.y + roiBounds.height); // Create a new set of results within the bounds MemoryPeakResults newResults = new MemoryPeakResults(); newResults.begin(); for (PeakResult peakResult : results.getResults()) { float x = peakResult.params[Gaussian2DFunction.X_POSITION]; float y = peakResult.params[Gaussian2DFunction.Y_POSITION]; if (x < minX || x > maxX || y < minY || y > maxY) continue; newResults.add(peakResult); } newResults.end(); newResults.copySettings(results); newResults.setBounds(new Rectangle((int) minX, (int) minY, (int) (maxX - minX), (int) (maxY - minY))); return newResults; }
From source file:com.github.aptd.simulation.elements.passenger.CPassengerSource.java
protected synchronized Instant determinenextstatechange() { if (m_passengersgenerated >= m_passengers) return Instant.MAX; return Instant.ofEpochMilli(m_startmillis + (long) Math.ceil( m_distribution.inverseCumulativeProbability(1.0 * (m_passengersgenerated + 1) / m_passengers))); }
From source file:com.turn.griffin.GriffinLibCacheUtil.java
public Optional<FileInfo> addFileToLocalLibCache(String blobName, long version, String dest, String filepath) { FileInfo fileInfo = FileInfo.newBuilder().setFilename(blobName).setVersion(version) .setHash(computeMD5(filepath).get()).setDest(dest).setCompression(FILE_COMPRESSION.name()) .setBlockSize(FILE_BLOCK_SIZE).build(); try {/* w w w .j a va 2 s . co m*/ FileUtils.forceMkdir(new File(getTempCacheDirectory(fileInfo))); String tempCacheFilePath = getTempCacheFilePath(fileInfo); FileUtils.copyFile(new File(filepath), new File(tempCacheFilePath)); /* The compression type is decided only once at the time a file is pushed. This design will ensure that any change to default file compression would not affect any previously pushed file as long as the new code supports the old compression. */ compressFile(tempCacheFilePath, FILE_COMPRESSION); writeTempCacheMetaDataFile(fileInfo); /* Now move everything to local libcache */ moveFromTempCacheToLibCache(fileInfo); } catch (IOException ioe) { logger.error(String.format("Unable to create local repository for %s", blobName), ioe); return Optional.absent(); } String libCacheCompressedFilePath = getLibCacheCompressedFilePath(fileInfo); /* NOTE: Number of blocks are for the compressed file and not the original file. */ int blockCount = (int) Math .ceil(FileUtils.sizeOf(new File(libCacheCompressedFilePath)) / (double) FILE_BLOCK_SIZE); FileInfo finalFileInfo = FileInfo.newBuilder().mergeFrom(fileInfo).setBlockCount(blockCount).build(); return Optional.of(finalFileInfo); }
From source file:gr.iti.mklab.reveal.forensics.maps.dq.DQExtractor.java
public void detectDQDiscontinuities() { int imWidth = dcts.length; int imHeight = dcts[0].length; int[] p_h_avg = new int[maxCoeffs]; int[] p_h_fft = new int[maxCoeffs]; int[] p_final = new int[maxCoeffs]; double[][] pTampered = new double[maxCoeffs][]; double[][] pUntampered = new double[maxCoeffs][]; for (int coeffIndex = 0; coeffIndex < maxCoeffs; coeffIndex++) { int coe = coeff[coeffIndex]; int startY = coe % 8 - 1; if (startY == -1) { startY = 8;//from www .j av a 2s.c om } int startX = (int) Math.floor((coe - 1) / 8); List<Integer> selectedCoeffs = new ArrayList<Integer>(); for (int ii = startX; ii < imWidth; ii += 8) { for (int jj = startY; jj < imHeight; jj += 8) { selectedCoeffs.add(dcts[ii][jj]); } } int minCoeffValue = Collections.min(selectedCoeffs); int maxCoeffValue = Collections.max(selectedCoeffs); int s_0; Double[] coeffHist = new Double[0]; if (maxCoeffValue - minCoeffValue > 0) { //will be a power of 2 to allow for fft (zero padded) int trueHistRange = maxCoeffValue - minCoeffValue + 1; //int histLength = trueHistRange; int histLength = (int) Math.pow(2, Math.ceil(Math.log(trueHistRange) / Math.log(2))); coeffHist = new Double[histLength]; for (int ii = 0; ii < coeffHist.length; ii++) { coeffHist[ii] = 0.0; } for (Integer selectedCoeff : selectedCoeffs) { coeffHist[selectedCoeff - minCoeffValue] += 1; } List<Double> coeffHistList = Arrays.asList(coeffHist); s_0 = coeffHistList.indexOf(Collections.max(coeffHistList)); List<Double> h = new ArrayList<>(); DescriptiveStatistics vals; for (int coeffInd = 1; coeffInd < coeffHistList.size(); coeffInd++) { vals = new DescriptiveStatistics(); for (int leapInd = s_0; leapInd < coeffHistList.size(); leapInd += coeffInd) { vals.addValue(coeffHistList.get(leapInd)); } for (int leapInd = s_0 - coeffInd; leapInd >= 0; leapInd -= coeffInd) { vals.addValue(coeffHistList.get(leapInd)); } h.add(vals.getMean()); } p_h_avg[coeffIndex] = (h.indexOf(Collections.max(h))); FastFourierTransformer fastFourierTransformer = new FastFourierTransformer( DftNormalization.STANDARD); Complex[] fft = fastFourierTransformer.transform(ArrayUtils.toPrimitive(coeffHist), TransformType.FORWARD); double[] power = new double[fft.length]; for (int ii = 0; ii < power.length; ii++) { power[ii] = fft[ii].abs(); } //Find first local minimum, to bypass DC peak double DC = power[0]; int FreqValley = 1; while (FreqValley < power.length - 1 & power[FreqValley] >= power[FreqValley + 1]) { FreqValley++; } int maxFFTInd = 0; double maxFFTVal = 0; double minFFTVal = Double.MAX_VALUE; for (int ii = FreqValley; ii < power.length / 2; ii++) { if (power[ii] > maxFFTVal) { maxFFTInd = ii; maxFFTVal = power[ii]; } if (power[ii] < minFFTVal) { minFFTVal = power[ii]; } } if (maxFFTInd == 0 | maxFFTVal < (DC / 5) | minFFTVal / maxFFTVal > 0.9) { p_h_fft[coeffIndex] = 1; } else { p_h_fft[coeffIndex] = Math.round(coeffHist.length / maxFFTInd); } } else { p_h_avg[coeffIndex] = 1; p_h_fft[coeffIndex] = 1; s_0 = 0; } if (p_h_avg[coeffIndex] < p_h_fft[coeffIndex]) { p_final[coeffIndex] = p_h_avg[coeffIndex]; } else { p_final[coeffIndex] = p_h_fft[coeffIndex]; } pTampered[coeffIndex] = new double[selectedCoeffs.size()]; pUntampered[coeffIndex] = new double[selectedCoeffs.size()]; int[] adjustedCoeffs = new int[selectedCoeffs.size()]; int[] period_start = new int[selectedCoeffs.size()]; int[] period; int[] num = new int[selectedCoeffs.size()]; int[] denom = new int[selectedCoeffs.size()]; double[] P_u = new double[selectedCoeffs.size()]; double[] P_t = new double[selectedCoeffs.size()]; if (p_final[coeffIndex] != 1) { for (int ii = 0; ii < adjustedCoeffs.length; ii++) { adjustedCoeffs[ii] = selectedCoeffs.get(ii) - minCoeffValue; period_start[ii] = adjustedCoeffs[ii] - rem(adjustedCoeffs[ii] - s_0, p_final[coeffIndex]); } for (int kk = 0; kk < selectedCoeffs.size(); kk++) { if (period_start[kk] > s_0) { period = new int[p_final[coeffIndex]]; for (int ii = 0; ii < p_final[coeffIndex]; ii++) { period[ii] = period_start[kk] + ii; if (period[ii] >= coeffHist.length) { period[ii] = period[ii] - p_final[coeffIndex]; } } num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue(); denom[kk] = 0; for (int ll = 0; ll < period.length; ll++) { denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue(); } } else { period = new int[p_final[coeffIndex]]; for (int ii = 0; ii < p_final[coeffIndex]; ii++) { period[ii] = period_start[kk] - ii; if (period_start[kk] - p_final[coeffIndex] + 1 <= 0) { if (period[ii] <= 0) { period[ii] = period[ii] + p_final[coeffIndex]; } } } num[kk] = (int) coeffHist[adjustedCoeffs[kk]].doubleValue(); denom[kk] = 0; for (int ll = 0; ll < period.length; ll++) { denom[kk] = denom[kk] + (int) coeffHist[period[ll]].doubleValue(); } } P_u[kk] = ((double) num[kk] / denom[kk]); P_t[kk] = (1.0 / p_final[coeffIndex]); if (P_u[kk] + P_t[kk] != 0) { pTampered[coeffIndex][kk] = P_t[kk] / (P_u[kk] + P_t[kk]); pUntampered[coeffIndex][kk] = P_u[kk] / (P_u[kk] + P_t[kk]); } else { pTampered[coeffIndex][kk] = 0.5; pUntampered[coeffIndex][kk] = 0.5; } } } else { for (int kk = 0; kk < selectedCoeffs.size(); kk++) { pTampered[coeffIndex][kk] = 0.5; pUntampered[coeffIndex][kk] = 0.5; } } } double[] pTamperedOverall = new double[pTampered[0].length]; double pTamperedProd; double pUntamperedProd; for (int locationIndex = 0; locationIndex < pTampered[0].length; locationIndex++) { pTamperedProd = 1; pUntamperedProd = 1; for (int coeffIndex = 0; coeffIndex < pTampered.length; coeffIndex++) { pTamperedProd = pTamperedProd * pTampered[coeffIndex][locationIndex]; pUntamperedProd = pUntamperedProd * pUntampered[coeffIndex][locationIndex]; } if (pTamperedProd + pUntamperedProd != 0) { pTamperedOverall[locationIndex] = pTamperedProd / (pTamperedProd + pUntamperedProd); } else { pTamperedOverall[locationIndex] = 0; } } int blocksH = imWidth / 8; int blocksV = imHeight / 8; double[][] outputMap = new double[blocksV][blocksH]; for (int kk = 0; kk < pTamperedOverall.length; kk++) { outputMap[kk % blocksV][(int) Math.floor(kk / blocksV)] = pTamperedOverall[kk]; if (pTamperedOverall[kk] > maxProbValue) { maxProbValue = pTamperedOverall[kk]; } if (pTamperedOverall[kk] < minProbValue) { minProbValue = pTamperedOverall[kk]; } } probabilityMap = outputMap; BufferedImage outputIm = visualizeWithJet(outputMap); // output displaySurface = outputIm; }