List of usage examples for java.lang Integer SIZE
int SIZE
To view the source code for java.lang Integer SIZE.
Click Source Link
From source file:com.moscona.dataSpace.Numeric.java
@Override public long sizeInBytes() { if (value == null) { return 2L; }//from ww w. java2s .c o m if (value.getClass() == Double.class) { return 2L + Double.SIZE / 8; } if (value.getClass() == Float.class) { return 2L + Float.SIZE / 8; } if (value.getClass() == Long.class) { return 2L + Long.SIZE / 8; } if (value.getClass() == Integer.class) { return 2L + Integer.SIZE / 8; } if (value.getClass() == Short.class) { return 2L + Short.SIZE / 8; } if (value.getClass() == Byte.class) { return 2L + Byte.SIZE / 8; } return 2L; }
From source file:org.moeaframework.core.variable.BinaryIntegerVariable.java
/** * Returns the minimum number of bits required to represent an integer * within the given bounds./* w ww .j av a 2s. com*/ * * @param lowerBound the lower bound * @param upperBound the upper bound * @return the minimum number of bits required to represent an integer * within the given bounds */ public static final int getNumberOfBits(int lowerBound, int upperBound) { return Integer.SIZE - Integer.numberOfLeadingZeros(upperBound - lowerBound); }
From source file:org.apache.hadoop.hive.serde2.compression.SnappyCompDe.java
/** * Compress a set of columns./*from w ww .j av a 2 s.c o m*/ * * The header contains a compressed array of data types. * The body contains compressed columns and their metadata. * The footer contains a compressed array of chunk sizes. The final four bytes of the footer encode the byte size of that compressed array. * * @param colSet * * @return ByteBuffer representing the compressed set. */ @Override public ByteBuffer compress(ColumnBuffer[] colSet) { // Many compression libraries allow you to avoid allocation of intermediate arrays. // To use these API, we need to preallocate the output container. // Reserve space for the header. int[] dataType = new int[colSet.length]; int maxCompressedSize = Snappy.maxCompressedLength(4 * dataType.length); // Reserve space for the compressed nulls BitSet for each column. maxCompressedSize += colSet.length * Snappy.maxCompressedLength((colSet.length / 8) + 1); // Track the length of `List<Integer> compressedSize` which will be declared later. int uncompressedFooterLength = 1 + 2 * colSet.length; for (int colNum = 0; colNum < colSet.length; ++colNum) { // Reserve space for the compressed columns. dataType[colNum] = colSet[colNum].getType().toTType().getValue(); switch (TTypeId.findByValue(dataType[colNum])) { case BOOLEAN_TYPE: maxCompressedSize += Integer.SIZE / Byte.SIZE; // This is for the encoded length. maxCompressedSize += Snappy.maxCompressedLength((colSet.length / 8) + 1); break; case TINYINT_TYPE: maxCompressedSize += Snappy.maxCompressedLength(colSet.length); break; case SMALLINT_TYPE: maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Short.SIZE / Byte.SIZE); break; case INT_TYPE: maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Integer.SIZE / Byte.SIZE); break; case BIGINT_TYPE: maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Long.SIZE / Byte.SIZE); break; case DOUBLE_TYPE: maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Double.SIZE / Byte.SIZE); break; case BINARY_TYPE: // Reserve space for the size of the compressed array of row sizes. maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Integer.SIZE / Byte.SIZE); // Reserve space for the size of the compressed flattened bytes. for (ByteBuffer nextBuffer : colSet[colNum].toTColumn().getBinaryVal().getValues()) { maxCompressedSize += Snappy.maxCompressedLength(nextBuffer.limit()); } // Add an additional value to the list of compressed chunk sizes (length of `rowSize` array). uncompressedFooterLength++; break; case STRING_TYPE: // Reserve space for the size of the compressed array of row sizes. maxCompressedSize += Snappy.maxCompressedLength(colSet.length * Integer.SIZE / Byte.SIZE); // Reserve space for the size of the compressed flattened bytes. for (String nextString : colSet[colNum].toTColumn().getStringVal().getValues()) { maxCompressedSize += Snappy .maxCompressedLength(nextString.getBytes(StandardCharsets.UTF_8).length); } // Add an additional value to the list of compressed chunk sizes (length of `rowSize` array). uncompressedFooterLength++; break; default: throw new IllegalStateException("Unrecognized column type"); } } // Reserve space for the footer. maxCompressedSize += Snappy.maxCompressedLength(uncompressedFooterLength * Integer.SIZE / Byte.SIZE); // Allocate the output container. ByteBuffer output = ByteBuffer.allocate(maxCompressedSize); // Allocate the footer. This goes in the footer because we don't know the chunk sizes until after // the columns have been compressed and written. ArrayList<Integer> compressedSize = new ArrayList<Integer>(uncompressedFooterLength); // Write to the output buffer. try { // Write the header. compressedSize.add(writePrimitives(dataType, output)); // Write the compressed columns and metadata. for (int colNum = 0; colNum < colSet.length; colNum++) { switch (TTypeId.findByValue(dataType[colNum])) { case BOOLEAN_TYPE: { TBoolColumn column = colSet[colNum].toTColumn().getBoolVal(); List<Boolean> bools = column.getValues(); BitSet bsBools = new BitSet(bools.size()); for (int rowNum = 0; rowNum < bools.size(); rowNum++) { bsBools.set(rowNum, bools.get(rowNum)); } compressedSize.add(writePrimitives(column.getNulls(), output)); // BitSet won't write trailing zeroes so we encode the length output.putInt(column.getValuesSize()); compressedSize.add(writePrimitives(bsBools.toByteArray(), output)); break; } case TINYINT_TYPE: { TByteColumn column = colSet[colNum].toTColumn().getByteVal(); compressedSize.add(writePrimitives(column.getNulls(), output)); compressedSize.add(writeBoxedBytes(column.getValues(), output)); break; } case SMALLINT_TYPE: { TI16Column column = colSet[colNum].toTColumn().getI16Val(); compressedSize.add(writePrimitives(column.getNulls(), output)); compressedSize.add(writeBoxedShorts(column.getValues(), output)); break; } case INT_TYPE: { TI32Column column = colSet[colNum].toTColumn().getI32Val(); compressedSize.add(writePrimitives(column.getNulls(), output)); compressedSize.add(writeBoxedIntegers(column.getValues(), output)); break; } case BIGINT_TYPE: { TI64Column column = colSet[colNum].toTColumn().getI64Val(); compressedSize.add(writePrimitives(column.getNulls(), output)); compressedSize.add(writeBoxedLongs(column.getValues(), output)); break; } case DOUBLE_TYPE: { TDoubleColumn column = colSet[colNum].toTColumn().getDoubleVal(); compressedSize.add(writePrimitives(column.getNulls(), output)); compressedSize.add(writeBoxedDoubles(column.getValues(), output)); break; } case BINARY_TYPE: { TBinaryColumn column = colSet[colNum].toTColumn().getBinaryVal(); // Initialize the array of row sizes. int[] rowSizes = new int[column.getValuesSize()]; int totalSize = 0; for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) { rowSizes[rowNum] = column.getValues().get(rowNum).limit(); totalSize += column.getValues().get(rowNum).limit(); } // Flatten the data for Snappy for a better compression ratio. ByteBuffer flattenedData = ByteBuffer.allocate(totalSize); for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) { flattenedData.put(column.getValues().get(rowNum)); } // Write nulls bitmap. compressedSize.add(writePrimitives(column.getNulls(), output)); // Write the list of row sizes. compressedSize.add(writePrimitives(rowSizes, output)); // Write the compressed, flattened data. compressedSize.add(writePrimitives(flattenedData.array(), output)); break; } case STRING_TYPE: { TStringColumn column = colSet[colNum].toTColumn().getStringVal(); // Initialize the array of row sizes. int[] rowSizes = new int[column.getValuesSize()]; int totalSize = 0; for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) { rowSizes[rowNum] = column.getValues().get(rowNum).length(); totalSize += column.getValues().get(rowNum).length(); } // Flatten the data for Snappy for a better compression ratio. StringBuilder flattenedData = new StringBuilder(totalSize); for (int rowNum = 0; rowNum < column.getValuesSize(); rowNum++) { flattenedData.append(column.getValues().get(rowNum)); } // Write nulls bitmap. compressedSize.add(writePrimitives(column.getNulls(), output)); // Write the list of row sizes. compressedSize.add(writePrimitives(rowSizes, output)); // Write the flattened data. compressedSize.add( writePrimitives(flattenedData.toString().getBytes(StandardCharsets.UTF_8), output)); break; } default: throw new IllegalStateException("Unrecognized column type"); } } // Write the footer. output.putInt(writeBoxedIntegers(compressedSize, output)); } catch (IOException e) { e.printStackTrace(); } output.flip(); return output; }
From source file:com.brotherpowers.cameraview.AspectRatio.java
@Override public int hashCode() { // assuming most sizes are <2^16, doing a rotate will give us perfect hashing return mY ^ ((mX << (Integer.SIZE / 2)) | (mX >>> (Integer.SIZE / 2))); }
From source file:org.openmrs.module.pharmacy.fragment.controller.AssociatedOrderViewFragmentController.java
public void controller(PageModel model, @RequestParam(value = "action", required = false) String action, @RequestParam(value = "pharmaOrderID", required = false) String pharmaOrderID, @RequestParam("patientId") Patient patient) { HashMap<Integer, List<drugorders>> associatedOrderExtn = new HashMap<>(); HashMap<Integer, DrugOrder> associatedOrderMain = new HashMap<>(); HashMap<Integer, drugorders> allOrdersExtn = new HashMap<>(); HashMap<Integer, DrugOrder> allOrdersMain = new HashMap<>(); HashMap<Integer, List<String>> otherOrders = new HashMap<>(); HashMap<Integer, String> OrdererName = new HashMap<>(); if (StringUtils.isNotBlank(action)) { try {//from w w w. j a va 2 s .c o m if ("Confirm".equals(action)) { drugorders drugorder = Context.getService(drugordersService.class) .getDrugOrderByOrderID(Integer.parseInt(pharmaOrderID)); if (drugorder.getGroupId() != null) { //Fetch all Orders that were ordered as a group with the recorded Order List<drugorders> orderExtn = Context.getService(drugordersService.class) .getDrugOrdersByGroupID(drugorder.getGroupId()); associatedOrderExtn.put(drugorder.getGroupId(), orderExtn); for (drugorders oExtn : orderExtn) { associatedOrderMain.put(oExtn.getOrderId(), (DrugOrder) Context.getOrderService().getOrder(oExtn.getOrderId())); otherOrders.put(oExtn.getOrderId(), pullAssociatedGroupOrders(oExtn)); Person person = Context.getOrderService().getOrder(oExtn.getOrderId()).getOrderer() .getPerson(); OrdererName.put(oExtn.getOrderId(), person.getGivenName() + " " + person.getFamilyName()); } } else if (Context.getService(planordersService.class) .getDrugOrderByOrderID(Integer.parseInt(pharmaOrderID)) != null) { //Fetch all Orders that were ordered as a part of Med Plan with the recorded OrderInteger List<planorders> planOrderList = Context.getService(planordersService.class) .getDrugOrdersByPlanAndPatient(drugorder.getAssociatedDiagnosis(), Context.getPatientService().getPatient(drugorder.getPatientId())); List<drugorders> orderExtn = new ArrayList<>(); for (planorders planOrder : planOrderList) { associatedOrderMain.put(planOrder.getOrderId(), (DrugOrder) Context.getOrderService().getOrder(planOrder.getOrderId())); orderExtn.add(Context.getService(drugordersService.class) .getDrugOrderByOrderID(planOrder.getOrderId())); } associatedOrderExtn.put(Integer.SIZE, orderExtn); for (drugorders extn : orderExtn) { otherOrders.put(extn.getOrderId(), pullAssociatedPlanOrders(extn)); Person person = Context.getOrderService().getOrder(extn.getOrderId()).getOrderer() .getPerson(); OrdererName.put(extn.getOrderId(), person.getGivenName() + " " + person.getFamilyName()); } } //Fetch all other Orders placed for the given Patient that are not a part of the same group as recorded Order List<drugorders> allExtn = Context.getService(drugordersService.class) .getDrugOrdersByPatient(patient); for (drugorders extn : allExtn) { if (!associatedOrderMain.containsKey(extn.getOrderId())) { allOrdersExtn.put(extn.getOrderId(), extn); DrugOrder main = (DrugOrder) Context.getOrderService().getOrder(extn.getOrderId()); allOrdersMain.put(main.getOrderId(), main); } switch (extn.getOrderStatus()) { case "Active-Group": otherOrders.put(extn.getOrderId(), pullAssociatedGroupOrders(extn)); break; case "Active-Plan": otherOrders.put(extn.getOrderId(), pullAssociatedPlanOrders(extn)); break; } Person person = Context.getOrderService().getOrder(extn.getOrderId()).getOrderer() .getPerson(); OrdererName.put(extn.getOrderId(), person.getGivenName() + " " + person.getFamilyName()); } } } catch (NumberFormatException | APIException e) { System.out.println(e.toString()); } } model.addAttribute("associatedOrderExtn", associatedOrderExtn); model.addAttribute("associatedOrderMain", associatedOrderMain); model.addAttribute("allOrdersExtn", allOrdersExtn); model.addAttribute("allOrdersMain", allOrdersMain); model.addAttribute("otherOrders", otherOrders); model.addAttribute("assocOrdererName", OrdererName); }
From source file:org.wso2.carbon.analytics.data.commons.utils.AnalyticsCommonUtils.java
public static Object deserializeObject(byte[] source) { if (source == null) { return null; }//w ww .j ava 2s .co m /* skip the object size integer */ try (Input input = new Input(Arrays.copyOfRange(source, Integer.SIZE / 8, source.length))) { Kryo kryo = kryoTL.get(); return kryo.readClassAndObject(input); } }
From source file:com.alibaba.jstorm.utils.JStormUtils.java
public static byte[] barr(Integer v) { byte[] byteArray = new byte[Integer.SIZE / 8]; for (int i = 0; i < byteArray.length; i++) { int off = (byteArray.length - 1 - i) * 8; byteArray[i] = (byte) ((v >> off) & 0xFF); }//from www. j a v a 2 s. co m return byteArray; }
From source file:ie.peternagy.jcrypto.algo.EllipticCurveWrapper.java
/** * Create raw header - includes version, keyId, crc * /* w ww.j a va 2 s . c o m*/ * @param dataCrc * @return */ public byte[] createRawHeader(long dataCrc) { try { byte[] keyId = getKeyId(); ByteArrayOutputStream header = new ByteArrayOutputStream(); header.write((byte) 100);//version header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(keyId.length).array());//key id length header.write(ByteBuffer.allocate(Long.SIZE / Byte.SIZE).putLong(dataCrc).array()); header.write(keyId); return header.toByteArray(); } catch (IOException ex) { Logger.getLogger(EllipticCurveWrapper.class.getName()).log(Level.SEVERE, null, ex); } return null; }
From source file:org.wso2.carbon.analytics.data.commons.utils.AnalyticsCommonUtils.java
public static byte[] serializeObject(Object obj) { Kryo kryo = kryoTL.get();//from ww w . j av a 2 s.c o m ByteArrayOutputStream byteOut = new ByteArrayOutputStream(); try (Output out = new Output(byteOut)) { kryo.writeClassAndObject(out, obj); out.flush(); byte[] data = byteOut.toByteArray(); ByteBuffer result = ByteBuffer.allocate(data.length + Integer.SIZE / 8); result.putInt(data.length); result.put(data); return result.array(); } }
From source file:ie.peternagy.jcrypto.algo.AesWrapper.java
protected byte[] createHeader() { try {/* w ww . ja va 2 s .c o m*/ byte[] garbageByte = CryptoSecurityUtil.getSecureBytes(CryptoSecurityUtil.getRandomIntInRange(0, 768)); byte[] baseKeyEnc = curve.doFinalWithHeader(baseKey, true); ByteArrayOutputStream header = new ByteArrayOutputStream(); header.write((byte) 100); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(iv.length).array()); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(salt.length).array()); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(garbageByte.length).array()); header.write(ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(baseKeyEnc.length).array()); header.write(iv); header.write(salt); header.write(garbageByte); header.write(baseKeyEnc);//encrypt with EC //include: long and crc32 for data return header.toByteArray(); } catch (IOException ex) { Logger.getLogger(AesWrapper.class.getName()).log(Level.SEVERE, null, ex); } return null; }