Example usage for java.util Random setSeed

List of usage examples for java.util Random setSeed

Introduction

In this page you can find the example usage for java.util Random setSeed.

Prototype

public synchronized void setSeed(long seed) 

Source Link

Document

Sets the seed of this random number generator using a single long seed.

Usage

From source file:net.sf.mzmine.modules.peaklistmethods.peakpicking.adap3decompositionV1_5.ADAP3DecompositionV1_5SetupDialog.java

/**
 * Cluster list of PeakInfo based on the chromatographic shapes
 * //  w  w  w.j  av  a  2s  .  com
 * @param peaks list of ADAP peaks
 * @param outClusters output of clusters
 * @param outText output of tooltip text
 * @param outColors output of colors
 */

private void shapeCluster(List<Peak> peaks, List<List<NavigableMap<Double, Double>>> outClusters,
        List<List<String>> outText, List<Double> outColors) {
    NumberFormat numberFormat = NumberFormat.getNumberInstance();

    Double edgeToHeightRatio = parameterSet.getParameter(ADAP3DecompositionV1_5Parameters.EDGE_TO_HEIGHT_RATIO)
            .getValue();
    Double deltaToHeightRatio = parameterSet
            .getParameter(ADAP3DecompositionV1_5Parameters.DELTA_TO_HEIGHT_RATIO).getValue();
    Boolean useIsShared = parameterSet.getParameter(ADAP3DecompositionV1_5Parameters.USE_ISSHARED).getValue();
    Double shapeSimThreshold = parameterSet.getParameter(ADAP3DecompositionV1_5Parameters.SHAPE_SIM_THRESHOLD)
            .getValue();
    Double minModelPeakSharpness = parameterSet
            .getParameter(ADAP3DecompositionV1_5Parameters.MIN_MODEL_SHARPNESS).getValue();
    List<Range<Double>> deprecatedMZValues = parameterSet
            .getParameter(ADAP3DecompositionV1_5Parameters.MZ_VALUES).getValue();

    if (edgeToHeightRatio == null || deltaToHeightRatio == null || useIsShared == null
            || shapeSimThreshold == null || minModelPeakSharpness == null || deprecatedMZValues == null)
        return;

    List<Peak> modelPeakCandidates = TwoStepDecomposition.filterPeaks(peaks, useIsShared, edgeToHeightRatio,
            deltaToHeightRatio, minModelPeakSharpness, deprecatedMZValues);

    if (modelPeakCandidates.isEmpty())
        return;

    List<List<Peak>> clusters = TwoStepDecomposition.getShapeClusters(modelPeakCandidates, shapeSimThreshold);

    outClusters.clear();
    outText.clear();
    outColors.clear();

    Random rand = new Random();
    rand.setSeed(0);

    int colorIndex = 0;
    final int numColors = 10;
    final double[] colors = new double[numColors];

    for (int i = 0; i < numColors; ++i)
        colors[i] = rand.nextDouble();

    for (List<Peak> cluster : clusters) {
        List<NavigableMap<Double, Double>> c = new ArrayList<>(cluster.size());

        List<String> texts = new ArrayList<>(cluster.size());

        for (Peak peak : cluster) {
            c.add(peak.getChromatogram());
            texts.add(peak.getInfo() + "\nSharpness: "
                    + numberFormat.format(FeatureTools.sharpnessYang(peak.getChromatogram())));
        }
        outClusters.add(c);
        outText.add(texts);

        outColors.add(colors[colorIndex % numColors]);
        ++colorIndex;
    }
}

From source file:org.apache.hadoop.io.compress.TestCodec.java

public void testGzipCompatibility() throws IOException {
    Random r = new Random();
    long seed = r.nextLong();
    r.setSeed(seed);
    LOG.info("seed: " + seed);

    DataOutputBuffer dflbuf = new DataOutputBuffer();
    GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
    byte[] b = new byte[r.nextInt(128 * 1024 + 1)];
    r.nextBytes(b);/* w ww .  ja  v  a 2s. c o  m*/
    gzout.write(b);
    gzout.close();

    DataInputBuffer gzbuf = new DataInputBuffer();
    gzbuf.reset(dflbuf.getData(), dflbuf.getLength());

    Configuration conf = new Configuration();
    conf.setBoolean("hadoop.native.lib", false);
    CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
    Decompressor decom = codec.createDecompressor();
    assertNotNull(decom);
    assertEquals(BuiltInGzipDecompressor.class, decom.getClass());
    InputStream gzin = codec.createInputStream(gzbuf, decom);

    dflbuf.reset();
    IOUtils.copyBytes(gzin, dflbuf, 4096);
    final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
    assertTrue(java.util.Arrays.equals(b, dflchk));
}

From source file:org.apache.hadoop.io.compress.TestCodec.java

void GzipConcatTest(Configuration conf, Class<? extends Decompressor> decomClass) throws IOException {
    Random r = new Random();
    long seed = r.nextLong();
    r.setSeed(seed);
    LOG.info(decomClass + " seed: " + seed);

    final int CONCAT = r.nextInt(4) + 3;
    final int BUFLEN = 128 * 1024;
    DataOutputBuffer dflbuf = new DataOutputBuffer();
    DataOutputBuffer chkbuf = new DataOutputBuffer();
    byte[] b = new byte[BUFLEN];
    for (int i = 0; i < CONCAT; ++i) {
        GZIPOutputStream gzout = new GZIPOutputStream(dflbuf);
        r.nextBytes(b);//  ww  w . ja v a  2  s  .c om
        int len = r.nextInt(BUFLEN);
        int off = r.nextInt(BUFLEN - len);
        chkbuf.write(b, off, len);
        gzout.write(b, off, len);
        gzout.close();
    }
    final byte[] chk = Arrays.copyOf(chkbuf.getData(), chkbuf.getLength());

    CompressionCodec codec = ReflectionUtils.newInstance(GzipCodec.class, conf);
    Decompressor decom = codec.createDecompressor();
    assertNotNull(decom);
    assertEquals(decomClass, decom.getClass());
    DataInputBuffer gzbuf = new DataInputBuffer();
    gzbuf.reset(dflbuf.getData(), dflbuf.getLength());
    InputStream gzin = codec.createInputStream(gzbuf, decom);

    dflbuf.reset();
    IOUtils.copyBytes(gzin, dflbuf, 4096);
    final byte[] dflchk = Arrays.copyOf(dflbuf.getData(), dflbuf.getLength());
    assertTrue(java.util.Arrays.equals(chk, dflchk));
}

From source file:org.apache.hadoop.mapreduce.TestMapCollection.java

@Test
public void testRandom() throws Exception {
    Configuration conf = new Configuration();
    conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
    Job job = Job.getInstance(conf);// w w  w.ja  va  2s.c o m
    conf = job.getConfiguration();
    conf.setInt(MRJobConfig.IO_SORT_MB, 1);
    conf.setClass("test.mapcollection.class", RandomFactory.class, RecordFactory.class);
    final Random r = new Random();
    final long seed = r.nextLong();
    LOG.info("SEED: " + seed);
    r.setSeed(seed);
    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(Math.max(0.1f, r.nextFloat())));
    RandomFactory.setLengths(conf, r, 1 << 14);
    conf.setInt("test.spillmap.records", r.nextInt(500));
    conf.setLong("test.randomfactory.seed", r.nextLong());
    runTest("random", job);
}

From source file:org.apache.hadoop.mapreduce.TestMapCollection.java

@Test
public void testRandomCompress() throws Exception {
    Configuration conf = new Configuration();
    conf.setInt(Job.COMPLETION_POLL_INTERVAL_KEY, 100);
    Job job = Job.getInstance(conf);/*from ww w  . j  a  v a  2  s  .  co  m*/
    conf = job.getConfiguration();
    conf.setInt(MRJobConfig.IO_SORT_MB, 1);
    conf.setBoolean(MRJobConfig.MAP_OUTPUT_COMPRESS, true);
    conf.setClass("test.mapcollection.class", RandomFactory.class, RecordFactory.class);
    final Random r = new Random();
    final long seed = r.nextLong();
    LOG.info("SEED: " + seed);
    r.setSeed(seed);
    conf.set(MRJobConfig.MAP_SORT_SPILL_PERCENT, Float.toString(Math.max(0.1f, r.nextFloat())));
    RandomFactory.setLengths(conf, r, 1 << 14);
    conf.setInt("test.spillmap.records", r.nextInt(500));
    conf.setLong("test.randomfactory.seed", r.nextLong());
    runTest("randomCompress", job);
}

From source file:com.qut.middleware.crypto.impl.CryptoProcessorImpl.java

private X509Certificate generateV3Certificate(KeyPair pair, String certSubjectDN, Calendar before,
        Calendar expiry) throws CryptoException {
    X509V3CertificateGenerator cert = new X509V3CertificateGenerator();

    /* Set the certificate serial number to a random number */
    Random rand = new Random();
    rand.setSeed(System.currentTimeMillis());

    /* Generates a number between 0 and 2^32 as the serial */
    BigInteger serial = BigInteger.valueOf(rand.nextInt(Integer.MAX_VALUE));
    logger.info("Setting X509 Cert Serial to: " + serial);

    cert.setSerialNumber(serial);//from w w w.ja  v  a  2s.c  o m

    /* Set the certificate issuer */
    cert.setIssuerDN(new X500Principal(this.certIssuerDN));

    /* Set the start of valid period. */
    cert.setNotBefore(before.getTime());

    /* Set the certificate expiry date. */
    cert.setNotAfter(expiry.getTime());

    /* Set the subject */
    cert.setSubjectDN(new X500Principal(certSubjectDN));

    cert.setPublicKey(pair.getPublic());

    /* Signature algorithm, this may need to be changed if not all hosts have SHA256 and RSA implementations */
    cert.setSignatureAlgorithm("SHA512withRSA");

    cert.addExtension(X509Extensions.BasicConstraints, true, new BasicConstraints(false));

    /* Only for signing */
    cert.addExtension(X509Extensions.KeyUsage, true,
            new KeyUsage(KeyUsage.digitalSignature | KeyUsage.keyCertSign));
    cert.addExtension(X509Extensions.ExtendedKeyUsage, true,
            new ExtendedKeyUsage(KeyPurposeId.id_kp_serverAuth));

    /* Set a contact email address for the issuer */
    cert.addExtension(X509Extensions.SubjectAlternativeName, false,
            new GeneralNames(new GeneralName(GeneralName.rfc822Name, this.certIssuerEmail)));

    logger.debug("Generating X509Certificate for key pair: " + pair);

    try {
        /* Use the BouncyCastle provider to actually generate the X509Certificate now */
        return cert.generateX509Certificate(pair.getPrivate(), "BC");
    } catch (InvalidKeyException e) {
        this.logger.error("InvalidKeyException thrown, " + e.getLocalizedMessage());
        this.logger.debug(e.toString());
        throw new CryptoException(e.getLocalizedMessage(), e);
    } catch (NoSuchProviderException e) {
        this.logger.error("NoSuchProviderException thrown, " + e.getLocalizedMessage());
        this.logger.debug(e.toString());
        throw new CryptoException(e.getLocalizedMessage(), e);
    } catch (SecurityException e) {
        this.logger.error("SecurityException thrown, " + e.getLocalizedMessage());
        this.logger.debug(e.toString());
        throw new CryptoException(e.getLocalizedMessage(), e);
    } catch (SignatureException e) {
        this.logger.error("SignatureException thrown, " + e.getLocalizedMessage());
        this.logger.debug(e.toString());
        throw new CryptoException(e.getLocalizedMessage(), e);
    }

}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

@Test(timeout = 10000)
public void testDownloadBadPublic() throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);//from ww  w . j  a  v a  2  s.  c o m
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    int size = 512;
    LocalResourceVisibility vis = LocalResourceVisibility.PUBLIC;
    Path path = new Path(basedir, "test-file");
    LocalResource rsrc = createFile(files, path, size, rand, vis);
    rsrcVis.put(rsrc, vis);
    Path destPath = dirs.getLocalPathForWrite(basedir.toString(), size, conf);
    destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
    FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
    pending.put(rsrc, exec.submit(fsd));
    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    Assert.assertTrue(pending.get(rsrc).isDone());

    try {
        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            p.getValue().get();
            Assert.fail("We localized a file that is not public.");
        }
    } catch (ExecutionException e) {
        Assert.assertTrue(e.getCause() instanceof IOException);
    }
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

@Test(timeout = 10000)
public void testDirDownload() throws IOException, InterruptedException {
    Configuration conf = new Configuration();
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);/*from  www.j av  a2s. co m*/
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    for (int i = 0; i < 5; ++i) {
        LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
        if (i % 2 == 1) {
            vis = LocalResourceVisibility.APPLICATION;
        }

        Path p = new Path(basedir, "dir" + i + ".jar");
        LocalResource rsrc = createJar(files, p, vis);
        rsrcVis.put(rsrc, vis);
        Path destPath = dirs.getLocalPathForWrite(basedir.toString(), conf);
        destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
        FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
        pending.put(rsrc, exec.submit(fsd));
    }

    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    for (Future<Path> path : pending.values()) {
        Assert.assertTrue(path.isDone());
    }

    try {

        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            Path localized = p.getValue().get();
            FileStatus status = files.getFileStatus(localized);

            System.out.println("Testing path " + localized);
            assert (status.isDirectory());
            assert (rsrcVis.containsKey(p.getKey()));

            verifyPermsRecursively(localized.getFileSystem(conf), files, localized, rsrcVis.get(p.getKey()));
        }
    } catch (ExecutionException e) {
        throw new IOException("Failed exec", e);
    }
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

@Test(timeout = 10000)
public void testDownload() throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);//  www.  jav  a  2 s. c o m
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Map<LocalResource, LocalResourceVisibility> rsrcVis = new HashMap<LocalResource, LocalResourceVisibility>();

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());
    int[] sizes = new int[10];
    for (int i = 0; i < 10; ++i) {
        sizes[i] = rand.nextInt(512) + 512;
        LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
        if (i % 2 == 1) {
            vis = LocalResourceVisibility.APPLICATION;
        }
        Path p = new Path(basedir, "" + i);
        LocalResource rsrc = createFile(files, p, sizes[i], rand, vis);
        rsrcVis.put(rsrc, vis);
        Path destPath = dirs.getLocalPathForWrite(basedir.toString(), sizes[i], conf);
        destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
        FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
        pending.put(rsrc, exec.submit(fsd));
    }

    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    for (Future<Path> path : pending.values()) {
        Assert.assertTrue(path.isDone());
    }

    try {
        for (Map.Entry<LocalResource, Future<Path>> p : pending.entrySet()) {
            Path localized = p.getValue().get();
            assertEquals(sizes[Integer.parseInt(localized.getName())], p.getKey().getSize());

            FileStatus status = files.getFileStatus(localized.getParent());
            FsPermission perm = status.getPermission();
            assertEquals("Cache directory permissions are incorrect", new FsPermission((short) 0755), perm);

            status = files.getFileStatus(localized);
            perm = status.getPermission();
            System.out
                    .println("File permission " + perm + " for rsrc vis " + p.getKey().getVisibility().name());
            assert (rsrcVis.containsKey(p.getKey()));
            Assert.assertTrue("Private file should be 500",
                    perm.toShort() == FSDownload.PRIVATE_FILE_PERMS.toShort());
        }
    } catch (ExecutionException e) {
        throw new IOException("Failed exec", e);
    }
}

From source file:org.apache.hadoop.yarn.util.TestFSDownload.java

private void downloadWithFileType(TEST_FILE_TYPE fileType)
        throws IOException, URISyntaxException, InterruptedException {
    Configuration conf = new Configuration();
    conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
    FileContext files = FileContext.getLocalFSFileContext(conf);
    final Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName()));
    files.mkdir(basedir, null, true);//w  ww. j  a v a  2 s.c  o  m
    conf.setStrings(TestFSDownload.class.getName(), basedir.toString());

    Random rand = new Random();
    long sharedSeed = rand.nextLong();
    rand.setSeed(sharedSeed);
    System.out.println("SEED: " + sharedSeed);

    Map<LocalResource, Future<Path>> pending = new HashMap<LocalResource, Future<Path>>();
    ExecutorService exec = Executors.newSingleThreadExecutor();
    LocalDirAllocator dirs = new LocalDirAllocator(TestFSDownload.class.getName());

    int size = rand.nextInt(512) + 512;
    LocalResourceVisibility vis = LocalResourceVisibility.PRIVATE;
    Path p = new Path(basedir, "" + 1);
    String strFileName = "";
    LocalResource rsrc = null;
    switch (fileType) {
    case TAR:
        rsrc = createTarFile(files, p, size, rand, vis);
        break;
    case JAR:
        rsrc = createJarFile(files, p, size, rand, vis);
        rsrc.setType(LocalResourceType.PATTERN);
        break;
    case ZIP:
        rsrc = createZipFile(files, p, size, rand, vis);
        strFileName = p.getName() + ".ZIP";
        break;
    case TGZ:
        rsrc = createTgzFile(files, p, size, rand, vis);
        break;
    }
    Path destPath = dirs.getLocalPathForWrite(basedir.toString(), size, conf);
    destPath = new Path(destPath, Long.toString(uniqueNumberGenerator.incrementAndGet()));
    FSDownload fsd = new FSDownload(files, UserGroupInformation.getCurrentUser(), conf, destPath, rsrc);
    pending.put(rsrc, exec.submit(fsd));
    exec.shutdown();
    while (!exec.awaitTermination(1000, TimeUnit.MILLISECONDS))
        ;
    try {
        pending.get(rsrc).get(); // see if there was an Exception during download
        FileStatus[] filesstatus = files.getDefaultFileSystem().listStatus(basedir);
        for (FileStatus filestatus : filesstatus) {
            if (filestatus.isDirectory()) {
                FileStatus[] childFiles = files.getDefaultFileSystem().listStatus(filestatus.getPath());
                for (FileStatus childfile : childFiles) {
                    if (strFileName.endsWith(".ZIP") && childfile.getPath().getName().equals(strFileName)
                            && !childfile.isDirectory()) {
                        Assert.fail("Failure...After unzip, there should have been a"
                                + " directory formed with zip file name but found a file. "
                                + childfile.getPath());
                    }
                    if (childfile.getPath().getName().startsWith("tmp")) {
                        Assert.fail("Tmp File should not have been there " + childfile.getPath());
                    }
                }
            }
        }
    } catch (Exception e) {
        throw new IOException("Failed exec", e);
    }
}