Example usage for java.util UUID getLeastSignificantBits

List of usage examples for java.util UUID getLeastSignificantBits

Introduction

In this page you can find the example usage for java.util UUID getLeastSignificantBits.

Prototype

public long getLeastSignificantBits() 

Source Link

Document

Returns the least significant 64 bits of this UUID's 128 bit value.

Usage

From source file:org.hillview.dataset.RemoteDataSet.java

/**
 * Zip operation on two IDataSet objects that need to reside on the same remote server.
 *///from  ww  w. j  a  va 2s. com
@Override
public <S> Observable<PartialResult<IDataSet<Pair<T, S>>>> zip(final IDataSet<S> other) {
    if (!(other instanceof RemoteDataSet<?>)) {
        throw new RuntimeException("Unexpected type in Zip " + other);
    }
    final RemoteDataSet<S> rds = (RemoteDataSet<S>) other;

    // zip commands are not valid if the RemoteDataSet instances point to different
    // actor systems or different nodes.
    final HostAndPort leftAddress = this.serverEndpoint;
    final HostAndPort rightAddress = rds.serverEndpoint;
    if (!leftAddress.equals(rightAddress)) {
        throw new RuntimeException("Zip command invalid for RemoteDataSets "
                + "across different servers | left: " + leftAddress + ", right:" + rightAddress);
    }

    final ZipOperation zip = new ZipOperation(rds.remoteHandle);
    final byte[] serializedOp = SerializationUtils.serialize(zip);
    final UUID operationId = UUID.randomUUID();
    final Command command = Command.newBuilder().setIdsIndex(this.remoteHandle)
            .setSerializedOp(ByteString.copyFrom(serializedOp)).setHighId(operationId.getMostSignificantBits())
            .setLowId(operationId.getLeastSignificantBits()).build();
    final SerializedSubject<PartialResult<IDataSet<Pair<T, S>>>, PartialResult<IDataSet<Pair<T, S>>>> subj = createSerializedSubject();
    final StreamObserver<PartialResponse> responseObserver = new NewDataSetObserver<Pair<T, S>>(subj);
    return subj.unsubscribeOn(ExecutorUtils.getUnsubscribeScheduler()).doOnSubscribe(
            () -> this.stub.withDeadlineAfter(TIMEOUT, TimeUnit.MILLISECONDS).zip(command, responseObserver))
            .doOnUnsubscribe(() -> this.unsubscribe(operationId));
}

From source file:com.cognitect.transit.TransitMPTest.java

public void testReadUUID() throws IOException {

    UUID uuid = UUID.randomUUID();
    final long hi64 = uuid.getMostSignificantBits();
    final long lo64 = uuid.getLeastSignificantBits();

    assertEquals(0, uuid.compareTo((UUID) readerOf("~u" + uuid.toString()).read()));

    List thing = new ArrayList() {
        {//from   w  w  w .j  a v a  2s  .co m
            add("~#u");
            add(new ArrayList() {
                {
                    add(hi64);
                    add(lo64);
                }
            });
        }
    };

    assertEquals(0, uuid.compareTo((UUID) readerOf(thing).read()));
}

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java

public WALKey.Builder getBuilder(WALCellCodec.ByteStringCompressor compressor) throws IOException {
    WALKey.Builder builder = WALKey.newBuilder();
    if (compressionContext == null) {
        builder.setEncodedRegionName(HBaseZeroCopyByteString.wrap(this.encodedRegionName));
        builder.setTableName(HBaseZeroCopyByteString.wrap(this.tablename.getName()));
    } else {//from   www . j a  v  a2s  . co m
        builder.setEncodedRegionName(
                compressor.compress(this.encodedRegionName, compressionContext.regionDict));
        builder.setTableName(compressor.compress(this.tablename.getName(), compressionContext.tableDict));
    }
    builder.setLogSequenceNumber(this.logSeqNum);
    builder.setWriteTime(writeTime);
    if (this.nonce != HConstants.NO_NONCE) {
        builder.setNonce(nonce);
    }
    if (this.nonceGroup != HConstants.NO_NONCE) {
        builder.setNonceGroup(nonceGroup);
    }
    HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
    for (UUID clusterId : clusterIds) {
        uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
        uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
        builder.addClusterIds(uuidBuilder.build());
    }
    if (scopes != null) {
        for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
            ByteString family = (compressionContext == null) ? HBaseZeroCopyByteString.wrap(e.getKey())
                    : compressor.compress(e.getKey(), compressionContext.familyDict);
            builder.addScopes(
                    FamilyScope.newBuilder().setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
        }
    }
    return builder;
}

From source file:org.apache.hadoop.hbase.wal.WALKey.java

public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder getBuilder(
        WALCellCodec.ByteStringCompressor compressor) throws IOException {
    org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.Builder builder = org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey
            .newBuilder();/*from  w  ww .j  a  v  a  2 s.  co m*/
    if (compressionContext == null) {
        builder.setEncodedRegionName(ByteStringer.wrap(this.encodedRegionName));
        builder.setTableName(ByteStringer.wrap(this.tablename.getName()));
    } else {
        builder.setEncodedRegionName(
                compressor.compress(this.encodedRegionName, compressionContext.regionDict));
        builder.setTableName(compressor.compress(this.tablename.getName(), compressionContext.tableDict));
    }
    builder.setLogSequenceNumber(this.logSeqNum);
    builder.setWriteTime(writeTime);
    if (this.origLogSeqNum > 0) {
        builder.setOrigSequenceNumber(this.origLogSeqNum);
    }
    if (this.nonce != HConstants.NO_NONCE) {
        builder.setNonce(nonce);
    }
    if (this.nonceGroup != HConstants.NO_NONCE) {
        builder.setNonceGroup(nonceGroup);
    }
    HBaseProtos.UUID.Builder uuidBuilder = HBaseProtos.UUID.newBuilder();
    for (UUID clusterId : clusterIds) {
        uuidBuilder.setLeastSigBits(clusterId.getLeastSignificantBits());
        uuidBuilder.setMostSigBits(clusterId.getMostSignificantBits());
        builder.addClusterIds(uuidBuilder.build());
    }
    if (scopes != null) {
        for (Map.Entry<byte[], Integer> e : scopes.entrySet()) {
            ByteString family = (compressionContext == null) ? ByteStringer.wrap(e.getKey())
                    : compressor.compress(e.getKey(), compressionContext.familyDict);
            builder.addScopes(
                    FamilyScope.newBuilder().setFamily(family).setScopeType(ScopeType.valueOf(e.getValue())));
        }
    }
    return builder;
}

From source file:org.apache.hadoop.hbase.regionserver.wal.HLogKey.java

@Override
@Deprecated/*from  w  ww .  j a  v  a  2 s  . c om*/
public void write(DataOutput out) throws IOException {
    LOG.warn("HLogKey is being serialized to writable - only expected in test code");
    WritableUtils.writeVInt(out, VERSION.code);
    if (compressionContext == null) {
        Bytes.writeByteArray(out, this.encodedRegionName);
        Bytes.writeByteArray(out, this.tablename.getName());
    } else {
        Compressor.writeCompressed(this.encodedRegionName, 0, this.encodedRegionName.length, out,
                compressionContext.regionDict);
        Compressor.writeCompressed(this.tablename.getName(), 0, this.tablename.getName().length, out,
                compressionContext.tableDict);
    }
    out.writeLong(this.logSeqNum);
    out.writeLong(this.writeTime);
    // Don't need to write the clusters information as we are using protobufs from 0.95
    // Writing only the first clusterId for testing the legacy read
    Iterator<UUID> iterator = clusterIds.iterator();
    if (iterator.hasNext()) {
        out.writeBoolean(true);
        UUID clusterId = iterator.next();
        out.writeLong(clusterId.getMostSignificantBits());
        out.writeLong(clusterId.getLeastSignificantBits());
    } else {
        out.writeBoolean(false);
    }
}

From source file:org.springframework.amqp.rabbit.junit.BrokerRunning.java

/**
 * Generate the connection id for the connection used by the rule's
 * connection factory./*from w  w w .j a  va  2s .c o m*/
 * @return the id.
 */
public String generateId() {
    UUID uuid = UUID.randomUUID();
    ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
    bb.putLong(uuid.getMostSignificantBits()).putLong(uuid.getLeastSignificantBits());
    return "SpringBrokerRunning." + Base64Utils.encodeToUrlSafeString(bb.array()).replaceAll("=", "");
}

From source file:org.echocat.jomon.net.cluster.channel.multicast.MulticastClusterChannelIntegrationTest.java

@Override
@Nonnull/*from ww w  .j  ava  2s. com*/
protected MulticastClusterChannel channel(@Nonnull UUID uuid) throws Exception {
    final MulticastClusterChannel channel = new MulticastClusterChannel(uuid);
    final NetworkInterface loopBackInterface = networkInterfaceRepository()
            .findOneBy(networkInterface().whichIsOfType(loopBack));
    assertThat(loopBackInterface, isNotNull());
    channel.setAddress(new InetSocketAddress(HOST, PORT), loopBackInterface);
    channel.register(getMessageHandler());
    channel.register(getStateHandler());
    channel.setName(uuid.getLeastSignificantBits() + "");
    channel.init();
    return channel;
}

From source file:org.apache.hadoop.hbase.client.Mutation.java

/**
 * Marks that the clusters with the given clusterIds have consumed the mutation
 *
 * @param clusterIds of the clusters that have consumed the mutation
 *///from  w  ww  .j a va2 s .  c  o  m
public Mutation setClusterIds(List<UUID> clusterIds) {
    ByteArrayDataOutput out = ByteStreams.newDataOutput();
    out.writeInt(clusterIds.size());
    for (UUID clusterId : clusterIds) {
        out.writeLong(clusterId.getMostSignificantBits());
        out.writeLong(clusterId.getLeastSignificantBits());
    }
    setAttribute(CONSUMED_CLUSTER_IDS, out.toByteArray());
    return this;
}

From source file:org.echocat.jomon.net.cluster.channel.tcp.TcpClusterChannelIntegrationTest.java

@Override
@Nonnull/*from  ww w .  j  av a2s . c o m*/
protected TcpClusterChannel channel(@Nonnull UUID uuid) throws Exception {
    final TcpClusterChannel channel = new TcpClusterChannel(uuid);
    final NetworkInterface loopBackInterface = networkInterfaceRepository()
            .findOneBy(networkInterface().whichIsOfType(loopBack));
    final int port = new FreeTcpPortDetector(loopBackInterface, 10000, 50000).detect();
    channel.setAddress(new InetSocketAddress(port), loopBackInterface);
    channel.register(getMessageHandler());
    channel.register(getStateHandler());
    channel.setName(uuid.getLeastSignificantBits() + "");
    return channel;
}

From source file:com.urbancode.terraform.main.Main.java

/**
 * Initializes Terraform so it can execute the given commands.
 * Here is the order of operations://  w w  w  .jav a2  s .  c o m
 * Parses the credentials file and verifies the given credentials.
 * Generates a random string for this environment, which is appended to the output xml file.
 * Parses the xml file.
 * Runs the specified command (create, destroy, etc).
 * @throws XmlParsingException
 * @throws IOException
 * @throws CredentialsException
 * @throws CreationException
 * @throws DestructionException
 * @throws RestorationException
 */
public void execute() throws XmlParsingException, IOException, CredentialsException, CreationException,
        DestructionException, RestorationException {
    TerraformContext context = null;
    try {
        // parse xml and set context
        context = parseContext(inputXmlFile);

        Credentials credentials = parseCredentials(credsFile);

        context.setCredentials(credentials);
        if (AllowedCommands.CREATE.getCommandName().equalsIgnoreCase(command)) {
            // create new file if creating a new environment
            UUID uuid = UUID.randomUUID();
            //convert uuid to base 62 (allowed chars: 0-9 a-z A-Z)
            ByteBuffer bb = ByteBuffer.wrap(new byte[16]);
            bb.putLong(uuid.getMostSignificantBits());
            bb.putLong(uuid.getLeastSignificantBits());
            String suffix = Base64.encodeBase64URLSafeString(bb.array());
            suffix = suffix.replaceAll("-", "Y");
            suffix = suffix.replaceAll("_", "Z");
            suffix = suffix.substring(0, 4);
            if (context.getEnvironment() != null) {
                context.getEnvironment().addSuffixToEnvName(suffix);
                log.debug("UUID for env " + context.getEnvironment().getName() + " is " + suffix);
            } else {
                throw new NullPointerException("No environment on context!");
            }

            String name = context.getEnvironment().getName();
            log.debug("Output filename = " + name);
            outputXmlFile = new File("env-" + name + ".xml");

            log.debug("Calling create() on context");
            context.create();
        } else if (AllowedCommands.DESTROY.getCommandName().equalsIgnoreCase(command)) {
            String suffix = parseSuffix(context.getEnvironment().getName());
            context.getEnvironment().setSuffix(suffix);
            log.debug("found suffix " + suffix);
            // write out instance failure regardless of success or failure
            outputXmlFile = inputXmlFile;
            log.debug("Calling destroy() on context");
            context.destroy();
        } else if (AllowedCommands.SUSPEND.getCommandName().equalsIgnoreCase(command)) {
            outputXmlFile = inputXmlFile;
            log.debug("Calling restore() on context");
            log.info("Attempting to suspend power on all instances/VMs in the environment.");
            context.restore();
            if (context instanceof ContextVmware) {
                SuspendCommand newCommand = new SuspendCommand((ContextVmware) context);
                newCommand.execute();
            } else if (context instanceof ContextAWS) {
                com.urbancode.terraform.commands.aws.SuspendCommand newCommand = new com.urbancode.terraform.commands.aws.SuspendCommand(
                        (ContextAWS) context);
                newCommand.execute();
            } else {
                log.warn("Could not resolve context to call command \"" + command + "\"");
            }
        } else if (AllowedCommands.RESUME.getCommandName().equalsIgnoreCase(command)) {
            outputXmlFile = inputXmlFile;
            log.debug("Calling restore() on context");
            context.restore();
            log.info("Attempting to power on all instances/VMs in the environment.");
            if (context instanceof ContextVmware) {
                ResumeCommand newCommand = new ResumeCommand((ContextVmware) context);
                newCommand.execute();
            } else if (context instanceof ContextAWS) {
                com.urbancode.terraform.commands.aws.ResumeCommand newCommand = new com.urbancode.terraform.commands.aws.ResumeCommand(
                        (ContextAWS) context);
                newCommand.execute();
            }

            else {
                log.warn("Could not resolve context to call command \"" + command + "\"");
            }
        } else if (AllowedCommands.TAKE_SNAPSHOT.getCommandName().equalsIgnoreCase(command)) {
            outputXmlFile = inputXmlFile;
            log.debug("Calling restore() on context");
            context.restore();
            log.info("Attempting to take snapshots of all instances/VMs in the environment.");
            if (context instanceof ContextVmware) {
                TakeSnapshotCommand newCommand = new TakeSnapshotCommand((ContextVmware) context);
                newCommand.execute();
            } else if (context instanceof ContextAWS) {
                log.warn("Taking snapshots is not currently supported with Terraform and AWS.");
            }

            else {
                log.warn("Could not resolve context to call command \"" + command + "\"");
            }
        }
    } catch (ParserConfigurationException e1) {
        throw new XmlParsingException("ParserConfigurationException: " + e1.getMessage(), e1);
    } catch (SAXException e2) {
        throw new XmlParsingException("SAXException: " + e2.getMessage(), e2);
    } finally {
        if (context != null && context.doWriteContext() && outputXmlFile != null) {
            log.debug("Writing context out to " + outputXmlFile);
            writeEnvToXml(outputXmlFile, context);
        }
    }
}