List of usage examples for java.lang Math ceil
public static double ceil(double a)
From source file:de.nx42.maps4cim.map.texture.osm.OsmHash.java
protected static String getQueryHashLocation(Area bounds) throws IOException { /*/*from www .j a v a 2 s .c o m*/ * - base64 encode, every char holds 6 bits (2^6 = 64) * - 3 chars per float = 18bit precision = enough for 3 sigificant digits * for numbers <256 (which is the case in WGS84) * - 4 values -> 12 chars. 72bit or 8 byte of information * - use URL safe encoding, or file name failures are expected! */ // calculate size: 4 values, 8 bits per byte int bufSize = (int) Math.ceil(4 * locationPrecision / 8.0); ByteBuffer byteBuf = ByteBuffer.allocate(bufSize); BitOutput bitOut = BitOutput.newInstance(byteBuf); // direct storeCoordinate(bounds.getMinLat(), bitOut); storeCoordinate(bounds.getMaxLat(), bitOut); storeCoordinate(bounds.getMinLon(), bitOut); storeCoordinate(bounds.getMaxLon(), bitOut); // get array, return as Base64 (URL safe) byte[] ar = byteBuf.array(); return Base64.encodeBase64URLSafeString(ar); }
From source file:com.linkedin.pinot.core.index.writer.impl.FixedByteSkipListSCMVWriter.java
public FixedByteSkipListSCMVWriter(File file, int numDocs, int totalNumValues, int columnSizeInBytes) throws Exception { float averageValuesPerDoc = totalNumValues / numDocs; this.docsPerChunk = (int) (Math.ceil(PREFERRED_NUM_VALUES_PER_CHUNK / averageValuesPerDoc)); this.numChunks = (numDocs + docsPerChunk - 1) / docsPerChunk; chunkOffsetHeaderSize = numChunks * SIZE_OF_INT * NUM_COLS_IN_HEADER; bitsetSize = (totalNumValues + 7) / 8; rawDataSize = totalNumValues * columnSizeInBytes; totalSize = chunkOffsetHeaderSize + bitsetSize + rawDataSize; raf = new RandomAccessFile(file, "rw"); chunkOffsetsBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, 0, chunkOffsetHeaderSize, file, this.getClass().getSimpleName() + " chunkOffsetsBuffer"); bitsetBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, chunkOffsetHeaderSize, bitsetSize, file, this.getClass().getSimpleName() + " bitsetBuffer"); rawDataBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, chunkOffsetHeaderSize + bitsetSize, rawDataSize, file, this.getClass().getSimpleName() + " rawDataBuffer"); chunkOffsetsWriter = new FixedByteWidthRowColDataFileWriter(chunkOffsetsBuffer, numDocs, NUM_COLS_IN_HEADER, new int[] { SIZE_OF_INT }); customBitSet = CustomBitSet.withByteBuffer(bitsetSize, bitsetBuffer); rawDataWriter = new FixedByteWidthRowColDataFileWriter(rawDataBuffer, totalNumValues, 1, new int[] { columnSizeInBytes }); }
From source file:de.dakror.villagedefense.util.SaveHandler.java
public static void loadSave(File f) { try {//from w ww.ja v a 2 s. c om JSONObject o = new JSONObject(Compressor.decompressFile(f)); Game.world.init(o.getInt("width"), o.getInt("height")); Game.world.setData((int) Math.ceil(o.getInt("width") / (float) (Chunk.SIZE * Tile.SIZE)), (int) Math.ceil(o.getInt("height") / (float) (Chunk.SIZE * Tile.SIZE)), Compressor.decompressRow(new BASE64Decoder().decodeBuffer(o.getString("tile")))); Game.currentGame.resources = new Resources(o.getJSONObject("resources")); if (o.has("created")) Game.currentGame.worldCreated = o.getInt("created"); JSONArray researches = o.getJSONArray("researches"); Game.currentGame.researches = new ArrayList<>(); for (int i = 0; i < researches.length(); i++) Game.currentGame.researches.add(Researches.valueOf(researches.getString(i))); WaveManager.wave = o.getInt("wave") - 1; WaveManager.nextWave = o.getInt("time"); WaveManager.init(); JSONArray entities = o.getJSONArray("entities"); HashMap<Integer, Creature> creaturesWithCustomData = new HashMap<>(); for (int i = 0; i < entities.length(); i++) { JSONObject e = entities.getJSONObject(i); Entity entity = (Entity) Class.forName(e.getString("class")).getConstructor(int.class, int.class) .newInstance(e.getInt("x"), e.getInt("y")); entity.setAttributes(new Attributes(e.getJSONObject("attributes"))); entity.setResources(new Resources(e.getJSONObject("resources"))); if (entity instanceof Creature) { Creature c = (Creature) entity; c.alpha = (float) e.getDouble("alpha"); c.setSpawnPoint(new Point(e.getInt("spawnX"), e.getInt("spawnY"))); if (!e.isNull("targetX") || !e.isNull("targetEntity") || !e.isNull("origin")) { creaturesWithCustomData.put(i, c); continue; } } else if (entity instanceof Struct) { JSONArray researches2 = e.getJSONArray("researches"); ((Struct) entity).clearResearches(); for (int j = 0; j < researches2.length(); j++) ((Struct) entity).add(Researches.valueOf(researches2.getString(j))); ((Struct) entity).tx = e.getInt("tx"); ((Struct) entity).ty = e.getInt("ty"); } Game.world.addEntity2(entity, true); } // -- set creatures' custom data for (Iterator<Integer> iterator = creaturesWithCustomData.keySet().iterator(); iterator.hasNext();) { int index = iterator.next(); JSONObject e = entities.getJSONObject(index); Entity entity = creaturesWithCustomData.get(index); if (!e.isNull("targetEntity")) { JSONObject tE = e.getJSONObject("targetEntity"); for (Entity e1 : Game.world.entities) { int x = (int) (e1 instanceof Creature ? e1.getX() : e1.getX() / Tile.SIZE); int y = (int) (e1 instanceof Creature ? e1.getY() : e1.getY() / Tile.SIZE); if (e1.getClass().getName().equals(tE.getString("class")) && tE.getInt("x") == x && tE.getInt("y") == y) { ((Creature) entity).setTarget(e1, false); continue; } } } if (!e.isNull("targetX")) { ((Creature) entity).setTarget(e.getInt("targetX"), e.getInt("targetY"), false); } if (!e.isNull("origin")) { JSONObject tE = e.getJSONObject("origin"); for (Entity e1 : Game.world.entities) { int x = (int) (e1 instanceof Creature ? e1.getX() : e1.getX() / Tile.SIZE); int y = (int) (e1 instanceof Creature ? e1.getY() : e1.getY() / Tile.SIZE); if (e1.getClass().getName().equals(tE.getString("class")) && tE.getInt("x") == x && tE.getInt("y") == y) { ((Creature) entity).setOrigin(e1); continue; } } } Game.world.addEntity2(entity, true); } Game.currentGame.state = 3; } catch (Exception e) { e.printStackTrace(); } }
From source file:ch.unine.vauchers.fuseerasure.codes.SimpleRegeneratingCode.java
private void init(int stripeSize, int paritySize) { this.stripeSize = stripeSize; this.paritySize = paritySize; this.paritySizeRS = paritySize - paritySizeSRC; assert (stripeSize + paritySizeRS < GF.getFieldSize()); assert (paritySize >= paritySizeSRC); // The degree of a simple parity is the number of locations // combined into the single parity. The degree is a function // of the RS-stripe (stripe + RS parity) length. // (The number of SRC groups is paritySizeSRC + 1, because // one SRC parity is implied -- not stored). simpleParityDegree = (int) Math.ceil((double) (stripeSize + paritySizeRS) / (double) (paritySizeSRC + 1)); while (simpleParityDegree * paritySizeSRC >= stripeSize + paritySizeRS) { LOG.info("\nInvalid code parameters." + " Reducing SRC parities to " + (paritySizeSRC - 1) + " Increasing RS parities to " + (paritySizeRS + 1)); this.paritySizeSRC--; this.paritySizeRS++; simpleParityDegree = (int) Math .ceil((double) (stripeSize + paritySizeRS) / (double) (paritySizeSRC + 1)); }// ww w .j av a 2s .co m this.errSignature = new int[paritySizeRS]; this.dataBuff = new int[paritySizeRS + stripeSize]; this.primitivePower = new int[stripeSize + paritySizeRS]; // compute powers of the primitive root for (int i = 0; i < stripeSize + paritySizeRS; i++) { primitivePower[i] = GF.power(PRIMITIVE_ROOT, i); } // compute generating polynomial int[] gen = { 1 }; int[] poly = new int[2]; for (int i = 0; i < paritySizeRS; i++) { poly[0] = primitivePower[i]; poly[1] = 1; gen = GF.multiply(gen, poly); } // generating polynomial has all generating roots generatingPolynomial = gen; // groupsTable[][] // groupsTable[loc]: the SRC group neighbors of location loc. groupsTable = new int[paritySize + stripeSize][]; for (int i = 0; i < groupsTable.length; i++) { List<Integer> locationsInGroup = getSRCGroupNeighbors(i); groupsTable[i] = new int[locationsInGroup.size()]; int k = 0; for (int loc : locationsInGroup) groupsTable[i][k++] = loc; } }
From source file:io.hops.erasure_coding.SimpleRegeneratingCode.java
private void init(int stripeSize, int paritySize) { this.stripeSize = stripeSize; this.paritySize = paritySize; this.paritySizeRS = paritySize - paritySizeSRC; assert (stripeSize + paritySizeRS < GF.getFieldSize()); assert (paritySize >= paritySizeSRC); // The degree of a simple parity is the number of locations // combined into the single parity. The degree is a function // of the RS-stripe (stripe + RS parity) length. // (The number of SRC groups is paritySizeSRC + 1, because // one SRC parity is implied -- not stored). simpleParityDegree = (int) Math.ceil((double) (stripeSize + paritySizeRS) / (double) (paritySizeSRC + 1)); while (simpleParityDegree * paritySizeSRC >= stripeSize + paritySizeRS) { LOG.info("\nInvalid code parameters." + " Reducing SRC parities to " + (paritySizeSRC - 1) + " Increasing RS parities to " + (paritySizeRS + 1)); this.paritySizeSRC--; this.paritySizeRS++; simpleParityDegree = (int) Math .ceil((double) (stripeSize + paritySizeRS) / (double) (paritySizeSRC + 1)); }/* ww w . j ava 2s.co m*/ this.errSignature = new int[paritySizeRS]; this.dataBuff = new int[paritySizeRS + stripeSize]; this.primitivePower = new int[stripeSize + paritySizeRS]; // compute powers of the primitive root for (int i = 0; i < stripeSize + paritySizeRS; i++) { primitivePower[i] = GF.power(PRIMITIVE_ROOT, i); } // compute generating polynomial int[] gen = { 1 }; int[] poly = new int[2]; for (int i = 0; i < paritySizeRS; i++) { poly[0] = primitivePower[i]; poly[1] = 1; gen = GF.multiply(gen, poly); } // generating polynomial has all generating roots generatingPolynomial = gen; // groupsTable[][] // groupsTable[loc]: the SRC group neighbors of location loc. groupsTable = new int[paritySize + stripeSize][]; for (int i = 0; i < groupsTable.length; i++) { List<Integer> locationsInGroup = getSRCGroupNeighbors(i); groupsTable[i] = new int[locationsInGroup.size()]; int k = 0; for (int loc : locationsInGroup) { groupsTable[i][k++] = loc; } } }
From source file:com.linkedin.pinot.core.io.writer.impl.v1.FixedByteSkipListMultiValueWriter.java
public FixedByteSkipListMultiValueWriter(File file, int numDocs, int totalNumValues, int columnSizeInBytes) throws Exception { float averageValuesPerDoc = totalNumValues / numDocs; this.docsPerChunk = (int) (Math.ceil(PREFERRED_NUM_VALUES_PER_CHUNK / averageValuesPerDoc)); this.numChunks = (numDocs + docsPerChunk - 1) / docsPerChunk; chunkOffsetHeaderSize = numChunks * SIZE_OF_INT * NUM_COLS_IN_HEADER; bitsetSize = (totalNumValues + 7) / 8; rawDataSize = totalNumValues * columnSizeInBytes; totalSize = chunkOffsetHeaderSize + bitsetSize + rawDataSize; raf = new RandomAccessFile(file, "rw"); chunkOffsetsBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, 0, chunkOffsetHeaderSize, file, this.getClass().getSimpleName() + " chunkOffsetsBuffer"); bitsetBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, chunkOffsetHeaderSize, bitsetSize, file, this.getClass().getSimpleName() + " bitsetBuffer"); rawDataBuffer = MmapUtils.mmapFile(raf, FileChannel.MapMode.READ_WRITE, chunkOffsetHeaderSize + bitsetSize, rawDataSize, file, this.getClass().getSimpleName() + " rawDataBuffer"); chunkOffsetsWriter = new FixedByteSingleValueMultiColWriter(chunkOffsetsBuffer, numDocs, NUM_COLS_IN_HEADER, new int[] { SIZE_OF_INT }); customBitSet = CustomBitSet.withByteBuffer(bitsetSize, bitsetBuffer); rawDataWriter = new FixedByteSingleValueMultiColWriter(rawDataBuffer, totalNumValues, 1, new int[] { columnSizeInBytes }); }
From source file:mylife.web.userController.java
/** * * @param pageid/* ww w . java 2 s . c o m*/ * @param request * @return */ @RequestMapping("/user/viewuser/{pageid}") public ModelAndView viewuser(@PathVariable int pageid, HttpServletRequest request) { int total = 10; int start = 1; if (pageid != 1) { start = (pageid - 1) * total + 1; } List<user> list = dao.getuserByPage(start, total); HashMap<String, Object> context = new HashMap<>(); context.put("list", list); int count = dao.getuserCount(); context.put("pages", Math.ceil((float) count / (float) total)); context.put("page", pageid); Message msg = (Message) request.getSession().getAttribute("message"); if (msg != null) { context.put("message", msg); request.getSession().removeAttribute("message"); } return new ModelAndView("viewuser", context); }
From source file:edu.umn.cs.spatialHadoop.nasa.SpatioAggregateQueries.java
/** * Performs a spatio-temporal aggregate query on an indexed directory * @param inFile// ww w .jav a2 s . c o m * @param params * @throws ParseException * @throws IOException * @throws InterruptedException */ public static AggregateQuadTree.Node aggregateQuery(Path inFile, OperationsParams params) throws ParseException, IOException, InterruptedException { // 1- Find matching temporal partitions final FileSystem fs = inFile.getFileSystem(params); Vector<Path> matchingPartitions = selectTemporalPartitions(inFile, params); // 2- Find all matching files (AggregateQuadTrees) in matching partitions final Rectangle spatialRange = params.getShape("rect", new Rectangle()).getMBR(); // Convert spatialRange from lat/lng space to Sinusoidal space double cosPhiRad = Math.cos(spatialRange.y1 * Math.PI / 180); double southWest = spatialRange.x1 * cosPhiRad; double southEast = spatialRange.x2 * cosPhiRad; cosPhiRad = Math.cos(spatialRange.y2 * Math.PI / 180); double northWest = spatialRange.x1 * cosPhiRad; double northEast = spatialRange.x2 * cosPhiRad; spatialRange.x1 = Math.min(northWest, southWest); spatialRange.x2 = Math.max(northEast, southEast); // Convert to the h v space used by MODIS spatialRange.x1 = (spatialRange.x1 + 180.0) / 10.0; spatialRange.x2 = (spatialRange.x2 + 180.0) / 10.0; spatialRange.y2 = (90.0 - spatialRange.y2) / 10.0; spatialRange.y1 = (90.0 - spatialRange.y1) / 10.0; // Vertically flip because the Sinusoidal space increases to the south double tmp = spatialRange.y2; spatialRange.y2 = spatialRange.y1; spatialRange.y1 = tmp; // Find the range of cells in MODIS Sinusoidal grid overlapping the range final int h1 = (int) Math.floor(spatialRange.x1); final int h2 = (int) Math.ceil(spatialRange.x2); final int v1 = (int) Math.floor(spatialRange.y1); final int v2 = (int) Math.ceil(spatialRange.y2); PathFilter rangeFilter = new PathFilter() { @Override public boolean accept(Path p) { Matcher matcher = MODISTileID.matcher(p.getName()); if (!matcher.matches()) return false; int h = Integer.parseInt(matcher.group(1)); int v = Integer.parseInt(matcher.group(2)); return h >= h1 && h < h2 && v >= v1 && v < v2; } }; final Vector<Path> allMatchingFiles = new Vector<Path>(); for (Path matchingPartition : matchingPartitions) { // Select all matching files FileStatus[] matchingFiles = fs.listStatus(matchingPartition, rangeFilter); for (FileStatus matchingFile : matchingFiles) { allMatchingFiles.add(matchingFile.getPath()); } } //noinspection SizeReplaceableByIsEmpty if (allMatchingFiles.isEmpty()) return null; final int resolution = AggregateQuadTree.getResolution(fs, allMatchingFiles.get(0)); // 3- Query all matching files in parallel List<Node> threadsResults = Parallel.forEach(allMatchingFiles.size(), new RunnableRange<AggregateQuadTree.Node>() { @Override public Node run(int i1, int i2) { Node threadResult = new AggregateQuadTree.Node(); for (int i_file = i1; i_file < i2; i_file++) { Path matchingFile = allMatchingFiles.get(i_file); try { Matcher matcher = MODISTileID.matcher(matchingFile.getName()); matcher.matches(); // It has to match int h = Integer.parseInt(matcher.group(1)); int v = Integer.parseInt(matcher.group(2)); // Clip the query region and normalize in this tile Rectangle translated = spatialRange.translate(-h, -v); int x1 = (int) (Math.max(translated.x1, 0) * resolution); int y1 = (int) (Math.max(translated.y1, 0) * resolution); int x2 = (int) (Math.min(translated.x2, 1.0) * resolution); int y2 = (int) (Math.min(translated.y2, 1.0) * resolution); AggregateQuadTree.Node fileResult = AggregateQuadTree.aggregateQuery(fs, matchingFile, new java.awt.Rectangle(x1, y1, (x2 - x1), (y2 - y1))); threadResult.accumulate(fileResult); } catch (Exception e) { throw new RuntimeException("Error reading file " + matchingFile, e); } } return threadResult; } }); AggregateQuadTree.Node finalResult = new AggregateQuadTree.Node(); for (Node threadResult : threadsResults) { finalResult.accumulate(threadResult); } numOfTreesTouchesInLastRequest = allMatchingFiles.size(); return finalResult; }
From source file:de.tud.kom.p2psim.impl.overlay.dht.kademlia2.setup.WorkloadGenerator.java
/** * Constructs a new WorkloadGenerator and builds * {@link WorkloadConfig#NUMBER_OF_DATA_ITEMS} random data items. * //from w w w . java 2 s.c om * @param conf * the Config that contains Kademlia-wide configuration * constants. */ public WorkloadGenerator(final Config conf) { config = conf; rnd = new RandomAdaptor(Simulator.getRandom()); dataItems = new HashMap<KademliaOverlayKey, DHTObject>( (int) Math.ceil(config.getNumberOfDataItems() * 1.002), 0.999f); buildDataItems(); }
From source file:com.inmobi.conduit.distcp.tools.mapred.UniformSizeInputFormat.java
private List<InputSplit> getSplits(Configuration configuration, int numSplits, long totalSizeBytes) throws IOException { List<InputSplit> splits = new ArrayList<InputSplit>(numSplits); long nBytesPerSplit = (long) Math.ceil(totalSizeBytes * 1.0 / numSplits); FileStatus srcFileStatus = new FileStatus(); Text srcRelPath = new Text(); long currentSplitSize = 0; long lastSplitStart = 0; long lastPosition = 0; final Path listingFilePath = getListingFilePath(configuration); if (LOG.isDebugEnabled()) { LOG.debug("Average bytes per map: " + nBytesPerSplit + ", Number of maps: " + numSplits + ", total size: " + totalSizeBytes); }//from w w w . j a v a 2 s .co m SequenceFile.Reader reader = null; try { reader = getListingFileReader(configuration); while (reader.next(srcRelPath, srcFileStatus)) { // If adding the current file would cause the bytes per map to exceed // limit. Add the current file to new split if (currentSplitSize + srcFileStatus.getLen() > nBytesPerSplit && lastPosition != 0) { FileSplit split = new FileSplit(listingFilePath, lastSplitStart, lastPosition - lastSplitStart, null); if (LOG.isDebugEnabled()) { LOG.debug("Creating split : " + split + ", bytes in split: " + currentSplitSize); } splits.add(split); lastSplitStart = lastPosition; currentSplitSize = 0; } currentSplitSize += srcFileStatus.getLen(); lastPosition = reader.getPosition(); } if (lastPosition > lastSplitStart) { FileSplit split = new FileSplit(listingFilePath, lastSplitStart, lastPosition - lastSplitStart, null); if (LOG.isDebugEnabled()) { LOG.info("Creating split : " + split + ", bytes in split: " + currentSplitSize); } splits.add(split); } } finally { IOUtils.closeStream(reader); } return splits; }