List of usage examples for com.google.common.primitives Longs BYTES
int BYTES
To view the source code for com.google.common.primitives Longs BYTES.
Click Source Link
From source file:google.registry.model.server.ServerSecret.java
/** Returns the value of this ServerSecret as a byte array. */ public byte[] asBytes() { return ByteBuffer.allocate(Longs.BYTES * 2).putLong(mostSignificant).putLong(leastSignificant).array(); }
From source file:alluxio.network.protocol.RPCBlockReadResponse.java
@Override public int getEncodedLength() { // 3 longs (mBLockId, mOffset, mLength) + 1 short (mStatus) return Longs.BYTES * 3 + Shorts.BYTES; }
From source file:com.metamx.druid.aggregation.HistogramAggregatorFactory.java
@Override public int getMaxIntermediateSize() { return Longs.BYTES * (breaks.length + 1) + Floats.BYTES * 2; }
From source file:io.druid.query.metadata.SegmentAnalyzer.java
public Map<String, ColumnAnalysis> analyze(StorageAdapter adapter, EnumSet<SegmentMetadataQuery.AnalysisType> analysisTypes) { Preconditions.checkNotNull(adapter, "Adapter cannot be null"); Map<String, ColumnAnalysis> columns = Maps.newTreeMap(); List<String> columnNames = getStorageAdapterColumnNames(adapter); int numRows = adapter.getNumRows(); for (String columnName : columnNames) { final ColumnCapabilities capabilities = adapter.getColumnCapabilities(columnName); final ColumnAnalysis analysis; /**// www . ja v a 2 s. c o m * StorageAdapter doesn't provide a way to get column values, so size is * not calculated for STRING and COMPLEX columns. */ ValueType capType = capabilities.getType(); switch (capType) { case LONG: analysis = lengthBasedAnalysisForAdapter(analysisTypes, capType.name(), capabilities, numRows, Longs.BYTES); break; case FLOAT: analysis = lengthBasedAnalysisForAdapter(analysisTypes, capType.name(), capabilities, numRows, NUM_BYTES_IN_TEXT_FLOAT); break; case STRING: analysis = new ColumnAnalysis(capType.name(), 0, analysisHasCardinality(analysisTypes) ? adapter.getDimensionCardinality(columnName) : 0, null); break; case COMPLEX: analysis = new ColumnAnalysis(capType.name(), 0, null, null); break; default: log.warn("Unknown column type[%s].", capType); analysis = ColumnAnalysis.error(String.format("unknown_type_%s", capType)); } columns.put(columnName, analysis); } columns.put(Column.TIME_COLUMN_NAME, lengthBasedAnalysisForAdapter(analysisTypes, ValueType.LONG.name(), null, numRows, NUM_BYTES_IN_TIMESTAMP)); return columns; }
From source file:co.cask.cdap.internal.io.ReflectionWriter.java
/** * Write the given object that has the given schema. * * @param object the object to write// w ww.ja va2 s.co m * @param objSchema the schema of the object to write * @throws IOException if there was an exception writing the object */ @SuppressWarnings("ConstantConditions") protected void write(WRITER writer, Object object, Schema objSchema) throws IOException { if (object != null) { if (seenRefs.contains(object)) { throw new IOException("Recursive reference not supported."); } if (objSchema.getType() == Schema.Type.RECORD) { seenRefs.add(object); } } switch (objSchema.getType()) { case NULL: writeNull(writer); break; case BOOLEAN: writeBool(writer, (Boolean) object); break; case INT: writeInt(writer, ((Number) object).intValue()); break; case LONG: writeLong(writer, ((Number) object).longValue()); break; case FLOAT: writeFloat(writer, (Float) object); break; case DOUBLE: writeDouble(writer, (Double) object); break; case STRING: writeString(writer, object.toString()); break; case BYTES: if (object instanceof ByteBuffer) { writeBytes(writer, (ByteBuffer) object); } else if (object instanceof UUID) { UUID uuid = (UUID) object; ByteBuffer buf = ByteBuffer.allocate(Longs.BYTES * 2); buf.putLong(uuid.getMostSignificantBits()).putLong(uuid.getLeastSignificantBits()); writeBytes(writer, (ByteBuffer) buf.flip()); } else { writeBytes(writer, (byte[]) object); } break; case ENUM: writeEnum(writer, object.toString(), objSchema); break; case ARRAY: if (object instanceof Collection) { writeArray(writer, (Collection<?>) object, objSchema.getComponentSchema()); } else { writeArray(writer, object, objSchema.getComponentSchema()); } break; case MAP: writeMap(writer, (Map<?, ?>) object, objSchema.getMapSchema()); break; case RECORD: writeRecord(writer, object, objSchema); break; case UNION: writeUnion(writer, object, objSchema); break; } }
From source file:io.druid.query.aggregation.last.FloatLastAggregatorFactory.java
@Override public AggregatorFactory getCombiningFactory() { return new FloatLastAggregatorFactory(name, name) { @Override//from ww w . jav a2 s .c o m public Aggregator factorize(ColumnSelectorFactory metricFactory) { final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name); return new FloatLastAggregator(name, null, null) { @Override public void aggregate() { SerializablePair<Long, Float> pair = (SerializablePair<Long, Float>) selector.getObject(); if (pair.lhs >= lastTime) { lastTime = pair.lhs; lastValue = pair.rhs; } } }; } @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name); return new FloatLastBufferAggregator(null, null) { @Override public void aggregate(ByteBuffer buf, int position) { SerializablePair<Long, Float> pair = (SerializablePair<Long, Float>) selector.getObject(); long lastTime = buf.getLong(position); if (pair.lhs >= lastTime) { buf.putLong(position, pair.lhs); buf.putFloat(position + Longs.BYTES, pair.rhs); } } @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { inspector.visit("selector", selector); } }; } }; }
From source file:io.druid.query.aggregation.last.DoubleLastAggregatorFactory.java
@Override public AggregatorFactory getCombiningFactory() { return new DoubleLastAggregatorFactory(name, name) { @Override/* w w w .j a va 2 s .c om*/ public Aggregator factorize(ColumnSelectorFactory metricFactory) { final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name); return new DoubleLastAggregator(name, null, null) { @Override public void aggregate() { SerializablePair<Long, Double> pair = (SerializablePair<Long, Double>) selector.getObject(); if (pair.lhs >= lastTime) { lastTime = pair.lhs; lastValue = pair.rhs; } } }; } @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name); return new DoubleLastBufferAggregator(null, null) { @Override public void aggregate(ByteBuffer buf, int position) { SerializablePair<Long, Double> pair = (SerializablePair<Long, Double>) selector.getObject(); long lastTime = buf.getLong(position); if (pair.lhs >= lastTime) { buf.putLong(position, pair.lhs); buf.putDouble(position + Longs.BYTES, pair.rhs); } } @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { inspector.visit("selector", selector); } }; } }; }
From source file:org.apache.kudu.util.Slice.java
/** * Gets a 64-bit long integer at the specified absolute {@code index} in * this buffer.// www. j a v a 2 s. c o m * * @throws IndexOutOfBoundsException if the specified {@code index} is less than {@code 0} or * {@code index + 8} is greater than {@code this.capacity} */ public long getLong(int index) { Preconditions.checkPositionIndexes(index, index + Longs.BYTES, this.length); index += offset; return ((long) data[index] & 0xff) | ((long) data[index + 1] & 0xff) << 8 | ((long) data[index + 2] & 0xff) << 16 | ((long) data[index + 3] & 0xff) << 24 | ((long) data[index + 4] & 0xff) << 32 | ((long) data[index + 5] & 0xff) << 40 | ((long) data[index + 6] & 0xff) << 48 | ((long) data[index + 7] & 0xff) << 56; }
From source file:io.druid.query.aggregation.first.FloatFirstAggregatorFactory.java
@Override public AggregatorFactory getCombiningFactory() { return new FloatFirstAggregatorFactory(name, name) { @Override//from ww w. j a v a 2s . c om public Aggregator factorize(ColumnSelectorFactory metricFactory) { final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name); return new FloatFirstAggregator(name, null, null) { @Override public void aggregate() { SerializablePair<Long, Float> pair = (SerializablePair<Long, Float>) selector.getObject(); if (pair.lhs < firstTime) { firstTime = pair.lhs; firstValue = pair.rhs; } } }; } @Override public BufferAggregator factorizeBuffered(ColumnSelectorFactory metricFactory) { final BaseObjectColumnValueSelector selector = metricFactory.makeColumnValueSelector(name); return new FloatFirstBufferAggregator(null, null) { @Override public void aggregate(ByteBuffer buf, int position) { SerializablePair<Long, Float> pair = (SerializablePair<Long, Float>) selector.getObject(); long firstTime = buf.getLong(position); if (pair.lhs < firstTime) { buf.putLong(position, pair.lhs); buf.putFloat(position + Longs.BYTES, pair.rhs); } } @Override public void inspectRuntimeShape(RuntimeShapeInspector inspector) { inspector.visit("selector", selector); } }; } }; }
From source file:com.yandex.yoctodb.util.mutable.impl.LongArrayBitSet.java
@Override public boolean or(@NotNull final Buffer longArrayBitSetInByteBuffer, final long startPosition, final int bitSetSizeInLongs) { boolean notEmpty = false; long currentPosition = startPosition; assert usedWords == bitSetSizeInLongs; for (int i = 0; i < usedWords; i++) { final long currentWord = longArrayBitSetInByteBuffer.getLong(currentPosition); currentPosition += Longs.BYTES; final long word = words[i] | currentWord; words[i] = word;/* ww w . j av a 2 s .co m*/ if (word != 0) { notEmpty = true; } } return notEmpty; }