Example usage for java.util Random nextLong

List of usage examples for java.util Random nextLong

Introduction

In this page you can find the example usage for java.util Random nextLong.

Prototype

public long nextLong() 

Source Link

Document

Returns the next pseudorandom, uniformly distributed long value from this random number generator's sequence.

Usage

From source file:org.firstopen.singularity.admin.view.ECSpecBean.java

public void testMPR2010_AWID_define() throws Exception {
    Random random = new Random(System.currentTimeMillis());
    long longVal = random.nextLong();
    String ecSpecGenName = new String("ecSpec" + longVal);

    aSLSB.define(generateECSpecDocument(ecSpecGenName));

    aSLSB.subscribe(ecSpecGenName, notificationURI);
}

From source file:es.csic.iiia.planes.generator.HotspotFactory.java

@Override
public MultivariateNormalDistribution buildDistribution(Configuration config, Random r) {
    final double w = config.getWidth();
    final double h = config.getHeight();
    double maxd = interpolator.value(config.getHotspotRadius(), config.getHotspotFreedomDegrees());
    double factor = 1 / maxd;

    if (covDistribution == null) {
        RealMatrix m = new Array2DRowRealMatrix(new double[][] { { factor, 0 }, { 0, factor } });
        covDistribution = new InverseWishartDistribution(m, config.getHotspotFreedomDegrees());
        covDistribution.reseedRandomGenerator(r.nextLong());
    }/*from www  .  j  a  va  2  s  .c  o  m*/

    double[] means = new double[] { r.nextInt(config.getWidth()), r.nextInt(config.getHeight()), };
    double[][] covariance = getCovarianceMatrix();
    MultivariateNormalDistribution distribution = new MultivariateNormalDistribution(means, covariance);
    distribution.reseedRandomGenerator(r.nextLong());
    return distribution;
}

From source file:com.datasalt.pangool.tuplemr.mapred.lib.output.TestTupleInputOutputFormat.java

public void testSplits(long maxSplitSize, int generatedRows) throws IOException, InterruptedException,
        IllegalArgumentException, SecurityException, ClassNotFoundException, InstantiationException,
        IllegalAccessException, InvocationTargetException, NoSuchMethodException {
    logger.info("Testing maxSplitSize: " + maxSplitSize + " and generatedRows:" + generatedRows);
    FileSystem fS = FileSystem.get(getConf());
    Random r = new Random(1);
    Schema schema = new Schema("schema", Fields.parse("i:int,s:string"));
    ITuple tuple = new Tuple(schema);

    Path outPath = new Path(OUT);
    TupleFile.Writer writer = new TupleFile.Writer(FileSystem.get(getConf()), getConf(), outPath, schema);
    for (int i = 0; i < generatedRows; i++) {
        tuple.set("i", r.nextInt());
        tuple.set("s", r.nextLong() + "");
        writer.append(tuple);/*w  w w.ja  va  2  s . c  o  m*/
    }
    writer.close();

    TupleInputFormat format = ReflectionUtils.newInstance(TupleInputFormat.class, getConf());
    Job job = new Job(getConf());
    FileInputFormat.setInputPaths(job, outPath);
    logger.info("Using max input split size: " + maxSplitSize);
    FileInputFormat.setMaxInputSplitSize(job, maxSplitSize);
    job.setInputFormatClass(FileInputFormat.class);

    // Read all the splits and count. The number of read rows must
    // be the same than the written ones.
    int count = 0;
    for (InputSplit split : format.getSplits(job)) {
        TaskAttemptID attemptId = new TaskAttemptID(new TaskID(), 1);
        TaskAttemptContext attemptContext = TaskAttemptContextFactory.get(getConf(), attemptId);
        logger.info("Sampling split: " + split);
        RecordReader<ITuple, NullWritable> reader = format.createRecordReader(split, attemptContext);
        reader.initialize(split, attemptContext);
        while (reader.nextKeyValue()) {
            tuple = reader.getCurrentKey();
            count++;
        }
        reader.close();
    }

    assertEquals(generatedRows, count);

    HadoopUtils.deleteIfExists(fS, outPath);
}

From source file:org.daxplore.presenter.shared.Base64Test.java

/**
 * Test encoding and decoding of longs, which was added for the Daxplore
 * project./*from w  w  w.  j a v  a2  s. co m*/
 * @throws UnsupportedEncodingException 
 */
@Test
public void testLongRandom() throws UnsupportedEncodingException {
    org.apache.commons.codec.binary.Base64 apacheCoder = new org.apache.commons.codec.binary.Base64();

    Random rnd = new Random(0x8af3411e);
    for (int i = 0; i < 1000; i++) {
        long testLong;
        switch (i) {
        case 0:
            testLong = 0;
            break;
        case 1:
            testLong = -1;
            break;
        case 2:
            testLong = Long.MAX_VALUE;
            break;
        case 3:
            testLong = Long.MIN_VALUE;
            break;
        default:
            testLong = rnd.nextLong();
        }

        String e1 = Base64.encodeLong(testLong);
        long d1 = Base64.decodeLong(e1);
        assertEquals(testLong, d1);

        byte[] bytes = ByteBuffer.allocate(8).putLong(testLong).array();
        int firstRelevantByte = 0;
        for (; firstRelevantByte < bytes.length && bytes[firstRelevantByte] == 0; firstRelevantByte++) {
            // increase firstRelevantByte to reach non-zero byte or end 
        }
        bytes = Arrays.copyOfRange(bytes, firstRelevantByte, bytes.length);
        String e2 = new String(apacheCoder.encode(bytes), "UTF-8");

        bytes = apacheCoder.decode(e2.getBytes("UTF-8"));
        ByteBuffer bb = ByteBuffer.allocate(8);
        bb.position(8 - bytes.length);
        bb.put(bytes);
        assertEquals(testLong, bb.getLong(0));

        assertEquals(e2, e1);
    }
}

From source file:org.apache.solr.core.OpenCloseCoreStressTest.java

public Queries(OpenCloseCoreStressTest OCCST, String url, List<HttpSolrClient> clients, int numThreads,
        Random random) {
    baseUrl = url;/*  w w  w. j a  va  2s .co m*/
    for (int idx = 0; idx < numThreads; ++idx) {
        Thread one = new OneQuery(OCCST, url, clients.get(idx), random.nextLong());
        _threads.add(one);
        one.start();
    }

}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

@Test(timeout = 10000)
public void testDownloadBadPublic() throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);//  ww w . ja va  2  s.  c om
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    int size = 512;
    LocalResourceVisibility vis = LocalResourceVisibility.PUBLIC;
    Path path = new Path(basedir, "test-file");
    LocalResource rsrc = createFile(files, path, size, rand, vis);
    rsrcVis.put(rsrc, vis);
    Path destPath = dirs.getLocalPathForWrite(basedir.toString(), size, conf);
    destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
    FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
    pending.put(rsrc, exec.submit(fsd));
    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    Assert.assertTrue(pending.get(rsrc).isDone());

    try {
        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            p.getValue().get();
            Assert.fail("We localized a file that is not public.");
        }
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof IOException);
    }
}

From source file:org.apache.syncope.core.notification.NotificationTest.java

@Test
public void issueSYNCOPE492() throws Exception {
    // 1. create suitable disabled notification for subsequent tests
    Notification notification = new Notification();
    notification.addEvent("[REST]:[UserController]:[]:[create]:[SUCCESS]");
    notification.setUserAbout(SyncopeClient.getUserSearchConditionBuilder().hasRoles(7L).query());
    notification.setSelfAsRecipient(true);

    notification.setRecipientAttrName("email");
    notification.setRecipientAttrType(IntMappingType.UserSchema);

    notification.getStaticRecipients().add("syncope492@syncope.apache.org");

    Random random = new Random(System.currentTimeMillis());
    String sender = "syncopetest-" + random.nextLong() + "@syncope.apache.org";
    notification.setSender(sender);// w ww  .  j  a v  a 2  s .  c o m
    String subject = "Test notification " + random.nextLong();
    notification.setSubject(subject);
    notification.setTemplate("optin");
    notification.setActive(false);

    Notification actual = notificationDAO.save(notification);
    assertNotNull(actual);

    notificationDAO.flush();

    final int tasksNumberBefore = taskDAO.findAll(NotificationTask.class).size();

    // 2. create user
    UserTO userTO = UserTestITCase.getUniqueSampleTO(mailAddress);
    MembershipTO membershipTO = new MembershipTO();
    membershipTO.setRoleId(7);
    userTO.getMemberships().add(membershipTO);

    userController.create(userTO);

    // 3. force Quartz job execution
    notificationJob.execute(null);

    // 4. check if number of tasks is not incremented
    assertEquals(tasksNumberBefore, taskDAO.findAll(NotificationTask.class).size());
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

@Test(timeout = 10000)
public void testDirDownload() throws IOException, InterruptedException {
    Configuration conf = new Configuration();
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);/*  w w  w . j  a  v a 2 s .c  o  m*/
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    for (int i = 0; i < 5; ++i) {
        LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
        if (i % 2 == 1) {
            vis = LocalResourceVisibility.APPLICATION;
        }

        Path p = new Path(basedir, "dir" + i + ".jar");
        LocalResource rsrc = createJar(files, p, vis);
        rsrcVis.put(rsrc, vis);
        Path destPath = dirs.getLocalPathForWrite(basedir.toString(), conf);
        destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
        FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
        pending.put(rsrc, exec.submit(fsd));
    }

    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    for (Future<Path> path : pending.values()) {
        Assert.assertTrue(path.isDone());
    }

    try {

        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            Path localized = p.getValue().get();
            FileStatus status = files.getFileStatus(localized);

            System.out.println("Testing path " + localized);
            assert (status.isDirectory());
            assert (rsrcVis.containsKey(p.getKey()));

            verifyPermsRecursively(localized.getFileSystem(conf), files, localized, rsrcVis.get(p.getKey()));
        }
    } catch (ExecutionException e) {
        throw new IOException("Failed exec", e);
    }
}

From source file:org.apache.syncope.core.notification.NotificationTest.java

@Test
public void notifyByMailEmptyAbout() throws Exception {
    // 1. create suitable notification for subsequent tests
    Notification notification = new Notification();
    notification.addEvent("[REST]:[UserController]:[]:[create]:[SUCCESS]");
    notification.setUserAbout(null);/* w w w  . j  a va2 s.c  o m*/
    notification.setRecipients(SyncopeClient.getUserSearchConditionBuilder().hasRoles(8L).query());
    notification.setSelfAsRecipient(true);

    notification.setRecipientAttrName("email");
    notification.setRecipientAttrType(IntMappingType.UserSchema);

    Random random = new Random(System.currentTimeMillis());
    String sender = "syncopetest-" + random.nextLong() + "@syncope.apache.org";
    notification.setSender(sender);
    String subject = "Test notification " + random.nextLong();
    notification.setSubject(subject);
    notification.setTemplate("optin");

    Notification actual = notificationDAO.save(notification);
    assertNotNull(actual);

    notificationDAO.flush();

    // 2. create user
    UserTO userTO = UserTestITCase.getSampleTO(mailAddress);
    MembershipTO membershipTO = new MembershipTO();
    membershipTO.setRoleId(7);
    userTO.getMemberships().add(membershipTO);

    userController.create(userTO);

    // 3. force Quartz job execution and verify e-mail
    notificationJob.execute(null);
    assertTrue(verifyMail(sender, subject));

    // 4. get NotificationTask id
    Long taskId = null;
    for (NotificationTask task : taskDAO.findAll(NotificationTask.class)) {
        if (sender.equals(task.getSender())) {
            taskId = task.getId();
        }
    }
    assertNotNull(taskId);

    // 5. execute Notification task and verify e-mail
    taskController.execute(taskId, false);
    assertTrue(verifyMail(sender, subject));
}

From source file:org.apache.syncope.core.notification.NotificationTest.java

@Test
public void issueSYNCOPE192() throws Exception {
    // 1. create suitable notification for subsequent tests
    Notification notification = new Notification();
    notification.addEvent("[REST]:[UserController]:[]:[create]:[SUCCESS]");
    notification.setUserAbout(SyncopeClient.getUserSearchConditionBuilder().hasRoles(7L).query());
    notification.setRecipients(SyncopeClient.getUserSearchConditionBuilder().hasRoles(8L).query());
    notification.setSelfAsRecipient(true);

    notification.setRecipientAttrName("email");
    notification.setRecipientAttrType(IntMappingType.UserSchema);

    Random random = new Random(System.currentTimeMillis());
    String sender = "syncope192-" + random.nextLong() + "@syncope.apache.org";
    notification.setSender(sender);// w  w w  .j  ava2s.  c  o m
    String subject = "Test notification " + random.nextLong();
    notification.setSubject(subject);
    notification.setTemplate("optin");
    notification.setTraceLevel(TraceLevel.NONE);

    Notification actual = notificationDAO.save(notification);
    assertNotNull(actual);

    // 2. create user
    UserTO userTO = UserTestITCase.getSampleTO(mailAddress);
    MembershipTO membershipTO = new MembershipTO();
    membershipTO.setRoleId(7);
    userTO.getMemberships().add(membershipTO);

    userController.create(userTO);

    // 3. force Quartz job execution and verify e-mail
    notificationJob.execute(null);
    assertTrue(verifyMail(sender, subject));

    // 4. get NotificationTask id
    Long taskId = null;
    for (NotificationTask task : taskDAO.findAll(NotificationTask.class)) {
        if (sender.equals(task.getSender())) {
            taskId = task.getId();
        }
    }
    assertNotNull(taskId);

    // 5. verify that last exec status was updated
    NotificationTaskTO task = (NotificationTaskTO) taskController.read(taskId);
    assertNotNull(task);
    assertTrue(task.getExecutions().isEmpty());
    assertTrue(task.isExecuted());
    assertTrue(StringUtils.isNotBlank(task.getLatestExecStatus()));
}