List of usage examples for java.lang Integer SIZE
int SIZE
To view the source code for java.lang Integer SIZE.
Click Source Link
From source file:com.linkedin.pinot.core.startree.OffHeapStarTreeBuilder.java
public void init(StarTreeBuilderConfig builderConfig) throws Exception { schema = builderConfig.schema;//from w w w .jav a2s .co m timeColumnName = schema.getTimeColumnName(); this.dimensionsSplitOrder = builderConfig.dimensionsSplitOrder; skipStarNodeCreationForDimensions = builderConfig.getSkipStarNodeCreationForDimensions(); skipMaterializationForDimensions = builderConfig.getSkipMaterializationForDimensions(); skipMaterializationCardinalityThreshold = builderConfig.getSkipMaterializationCardinalityThreshold(); enableOffHeapFormat = builderConfig.isEnableOffHealpFormat(); this.maxLeafRecords = builderConfig.maxLeafRecords; this.outDir = builderConfig.getOutDir(); if (outDir == null) { outDir = new File(System.getProperty("java.io.tmpdir"), V1Constants.STAR_TREE_INDEX_DIR + "_" + DateTime.now()); } LOG.info("Index output directory:{}", outDir); dimensionTypes = new ArrayList<>(); dimensionNames = new ArrayList<>(); dimensionNameToIndexMap = HashBiMap.create(); dimensionNameToStarValueMap = new HashMap<>(); dictionaryMap = new HashMap<>(); // READ DIMENSIONS COLUMNS List<DimensionFieldSpec> dimensionFieldSpecs = schema.getDimensionFieldSpecs(); for (int index = 0; index < dimensionFieldSpecs.size(); index++) { DimensionFieldSpec spec = dimensionFieldSpecs.get(index); String dimensionName = spec.getName(); dimensionNames.add(dimensionName); dimensionNameToIndexMap.put(dimensionName, index); Object starValue; starValue = getAllStarValue(spec); dimensionNameToStarValueMap.put(dimensionName, starValue); dimensionTypes.add(spec.getDataType()); HashBiMap<Object, Integer> dictionary = HashBiMap.create(); dictionaryMap.put(dimensionName, dictionary); } // treat time column as just another dimension, only difference is that we will never split on // this dimension unless explicitly specified in split order if (timeColumnName != null) { dimensionNames.add(timeColumnName); TimeFieldSpec timeFieldSpec = schema.getTimeFieldSpec(); dimensionTypes.add(timeFieldSpec.getDataType()); int index = dimensionNameToIndexMap.size(); dimensionNameToIndexMap.put(timeColumnName, index); Object starValue; starValue = getAllStarValue(timeFieldSpec); dimensionNameToStarValueMap.put(timeColumnName, starValue); HashBiMap<Object, Integer> dictionary = HashBiMap.create(); dictionaryMap.put(schema.getTimeColumnName(), dictionary); } dimensionSizeBytes = dimensionNames.size() * Integer.SIZE / 8; this.numDimensions = dimensionNames.size(); // READ METRIC COLUMNS this.metricNames = new ArrayList<>(); this.metricNameToIndexMap = new HashMap<>(); this.metricSizeBytes = 0; List<MetricFieldSpec> metricFieldSpecs = schema.getMetricFieldSpecs(); for (int index = 0; index < metricFieldSpecs.size(); index++) { MetricFieldSpec spec = metricFieldSpecs.get(index); String metricName = spec.getName(); metricNames.add(metricName); metricNameToIndexMap.put(metricName, index); metricSizeBytes += spec.getFieldSize(); } numMetrics = metricNames.size(); builderConfig.getOutDir().mkdirs(); dataFile = new File(outDir, "star-tree.buf"); LOG.info("StarTree output data file: {}", dataFile.getAbsolutePath()); dataBuffer = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(dataFile))); // INITIALIZE THE ROOT NODE this.starTreeRootIndexNode = new StarTreeIndexNode(); this.starTreeRootIndexNode.setDimensionName(StarTreeIndexNodeInterf.ALL); this.starTreeRootIndexNode.setDimensionValue(StarTreeIndexNodeInterf.ALL); this.starTreeRootIndexNode.setLevel(0); LOG.info("dimensionNames:{}", dimensionNames); LOG.info("metricNames:{}", metricNames); }
From source file:de.hpi.fgis.hdrs.Triple.java
/** * Estimate total IN MEMORY size of this triple. Including headers and data. * This estimate is for 32bit.//w w w . j a v a2 s . c o m * @return Estimated size of this triple in bytes. */ public int estimateSize() { return (9 * Integer.SIZE) / Byte.SIZE + bufferSize(); }
From source file:de.rwhq.btree.InnerNode.java
private int getSizeOfPageId() { return Integer.SIZE / 8; }
From source file:com.xsdn.main.util.Ip4Network.java
/** * Construct a new instance./*from w w w. ja va 2 s .co m*/ * * @param bytes A byte array which represents an IPv4 address. * @param prefix Prefix length that specifies network range. * Note that zero means "no mask". So zero is treated as if * the maximum prefix length is specified. * @throws NullPointerException * {@code bytes} is {@code null}. * @throws IllegalArgumentException * The given prefix length is invalid. * @throws IllegalArgumentException * The given byte address does not represent an IPv4 address. */ public Ip4Network(byte[] bytes, int prefix) { super(prefix); int addr = NumberUtils.toInteger(bytes); int plen = getPrefixLength(); int mask = getNetMask(plen); netMask = mask; if (plen == Integer.SIZE) { address = addr; byteAddress = bytes.clone(); } else { address = addr & mask; } }
From source file:de.rwhq.btree.InnerNode.java
private int getOffsetForLeftPageIdOfKey(final int i) { return new KeyStruct(i).getOffset() - Integer.SIZE / 8; }
From source file:com.xsdn.main.util.Ip4Network.java
/** * Construct a new instance.// w ww .j a v a2 s .c o m * * <p> * This constructor specifies 32 as CIDR prefix length. * </p> * * @param iaddr An {@link InetAddress} instance which represents an IPv4 * address. * @throws NullPointerException * {@code iaddr} is {@code null}. * @throws IllegalArgumentException * The given {@link InetAddress} instance does not represent an IPv4 * address. */ public Ip4Network(InetAddress iaddr) { super(iaddr, Integer.SIZE); }
From source file:cn.ac.ncic.mastiff.io.coding.RedBlackTreeStringReader.java
@Override public byte[] ensureDecompressed() throws IOException { DataOutputBuffer transfer = new DataOutputBuffer(); transfer.write(inBuf.getData(), 12, inBuf.getLength() - 12); DataInputBuffer dib = new DataInputBuffer(); dib.reset(transfer.getData(), 0, transfer.getLength()); int dictionarySize = dib.readInt(); int length1 = dib.readInt(); byte[] data = transfer.getData(); transfer.close();// ww w. ja va 2 s.co m dib.reset(data, Integer.SIZE + Integer.SIZE, length1); FlexibleEncoding.ORC.StreamName name = new FlexibleEncoding.ORC.StreamName(0, OrcProto.Stream.Kind.DICTIONARY_DATA); ByteBuffer inBuf1 = ByteBuffer.allocate(length1); inBuf1.put(dib.getData(), 0, dib.getLength()); inBuf1.flip(); InStream in = InStream.create("test1", inBuf1, null, dictionarySize); if (in.available() > 0) { dictionaryBuffer = new DynamicByteArray(64, in.available()); dictionaryBuffer.readAll(in); in.close(); // read the lengths google proto buffer name = new StreamName(1, OrcProto.Stream.Kind.LENGTH); dib.reset(data, 4 + 4 + length1, 4); int length2 = dib.readInt(); dib.reset(data, 4 + 4 + length1 + 4, length2); // in = streams.get(name); ByteBuffer inBuf2 = ByteBuffer.allocate(length2); inBuf2.put(dib.getData(), 0, length2); inBuf2.flip(); in = InStream.create("test2", inBuf2, null, dictionarySize); // IntegerReader lenReader = createIntegerReader(encodings.get(columnId) // .getKind(), in, false); IntegerReader lenReader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false); int offset = 0; dictionaryOffsets = new int[dictionarySize + 1]; for (int i = 0; i < dictionarySize; ++i) { dictionaryOffsets[i] = offset; offset += (int) lenReader.next(); } dictionaryOffsets[dictionarySize] = offset; in.close(); name = new FlexibleEncoding.ORC.StreamName(2, OrcProto.Stream.Kind.DATA); dib.reset(data, 4 + 4 + length1 + 4 + length2, 4); int length3 = dib.readInt(); dib.reset(data, 4 + 4 + length1 + 4 + length2 + 4, length3); ByteBuffer inBuf3 = ByteBuffer.allocate(length3); inBuf3.put(dib.getData(), 0, length3); inBuf3.flip(); in = InStream.create("test3", inBuf3, null, dictionarySize); reader = createIntegerReader(OrcProto.ColumnEncoding.Kind.DIRECT_V2, in, false); } inBuf.close(); DataOutputBuffer decoding = new DataOutputBuffer(); DataOutputBuffer offsets = new DataOutputBuffer(); decoding.writeInt(decompressedSize); decoding.writeInt(numPairs); decoding.writeInt(startPos); int dataoffset = 12; String str; for (int i = 0; i < numPairs; i++) { str = readEachValue(null); decoding.writeUTF(str); // if(i<5){ // System.out.println("304 bin[i] "+str+" decoding "+ decoding.size()); // } dataoffset = decoding.size(); offsets.writeInt(dataoffset); } System.out.println("315 offset.size() " + offsets.size() + " decoding.szie " + decoding.size()); System.out.println("316 dataoffet " + dataoffset); decoding.write(offsets.getData(), 0, offsets.size()); inBuf.close(); offsets.close(); dib.close(); System.out.println("316 decoding " + decoding.size() + decoding.getLength() + " decoding.getData() " + decoding.getData().length); inBuf1.clear(); return decoding.getData(); }
From source file:de.rwhq.btree.InnerNode.java
public int minPageSize() { return Header.size() + 3 * keySerializer.getSerializedLength() + 4 * Integer.SIZE / 8; }
From source file:com.xsdn.main.util.Ip4Network.java
/** * {@inheritDoc} */ @Override public int getMaxPrefix() { return Integer.SIZE; }
From source file:org.kiji.schema.FormattedEntityId.java
/** * Create an hbase row key, which is a byte array from the given formatted kijiRowKey. * This method requires that the kijiRowKey argument is the correct length for the specified * format./*w ww. ja v a 2 s. c o m*/ * The following encoding will be used to ensure correct ordering: * Strings are UTF-8 encoded and terminated by a null byte. Strings cannot contain "\u0000". * Integers are exactly 4 bytes long. * Longs are exactly 8 bytes long. * Both integers and longs have the sign bit flipped so that their values are wrapped around to * create the correct lexicographic ordering. (i.e. after converting to byte array, * MIN_INT < 0 < MAX_INT). * Hashed components are exactly hash_size bytes long and are the first component of * the hbase key. * Except for the first, all components of a kijiRowKey can be null. However, to maintain * ordering, all components to the right of a null component must also be null. Nullable index * in the row key format specifies which component (and hence following components) are nullable. * By default, the hash only uses the first component, but this can be changed using the Range * Scan index. * * @param format The formatted row key format for this table. * @param kijiRowKey An ordered list of Objects of the key components. * @return A byte array representing the encoded Hbase row key. */ private static byte[] makeHbaseRowKey(RowKeyFormat2 format, List<Object> kijiRowKey) { ArrayList<byte[]> hbaseKey = new ArrayList<byte[]>(); final byte zeroDelim = 0; int pos; for (pos = 0; pos < kijiRowKey.size(); pos++) { // we have already done the validation check for null cascades. if (null == kijiRowKey.get(pos)) { continue; } byte[] tempBytes; switch (getType(kijiRowKey.get(pos))) { case STRING: if (((String) kijiRowKey.get(pos)).contains("\u0000")) { throw new EntityIdException("String component cannot contain \u0000"); } try { hbaseKey.add(((String) kijiRowKey.get(pos)).getBytes("UTF-8")); } catch (UnsupportedEncodingException e) { LOG.error(e.toString()); throw new EntityIdException(String.format("UnsupportedEncoding for component %d", pos)); } break; case INTEGER: int temp = (Integer) kijiRowKey.get(pos); tempBytes = ByteBuffer.allocate(Integer.SIZE / Byte.SIZE).putInt(temp).array(); tempBytes[0] = (byte) ((int) tempBytes[0] ^ (int) Byte.MIN_VALUE); hbaseKey.add(tempBytes); break; case LONG: long templong = (Long) kijiRowKey.get(pos); tempBytes = ByteBuffer.allocate(Long.SIZE / Byte.SIZE).putLong(templong).array(); tempBytes[0] = (byte) ((int) tempBytes[0] ^ (int) Byte.MIN_VALUE); hbaseKey.add(tempBytes); break; default: throw new RuntimeException("Invalid code path"); } } // hash stuff int hashUpto = format.getRangeScanStartIndex() - 1; ByteArrayOutputStream tohash = new ByteArrayOutputStream(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); for (pos = 0; pos <= hashUpto && pos < hbaseKey.size(); pos++) { tohash.write(hbaseKey.get(pos), 0, hbaseKey.get(pos).length); } byte[] hashed = Arrays.copyOfRange(Hasher.hash(tohash.toByteArray()), 0, format.getSalt().getHashSize()); baos.write(hashed, 0, hashed.length); // to materialize or not to materialize that is the question if (format.getSalt().getSuppressKeyMaterialization()) { return baos.toByteArray(); } else { for (pos = 0; pos < hbaseKey.size(); pos++) { baos.write(hbaseKey.get(pos), 0, hbaseKey.get(pos).length); if (format.getComponents().get(pos).getType() == ComponentType.STRING || format.getComponents().get(pos) == null) { // empty strings will be encoded as null, hence we need to delimit them too baos.write(zeroDelim); } } return baos.toByteArray(); } }