List of usage examples for java.util Random setSeed
public synchronized void setSeed(long seed)
From source file:org.apache.hadoop.yarn.util.TestFSDownload.java
@Test(timeout = 60000) public void testDownloadPublicWithStatCache() throws IOException, URISyntaxException, InterruptedException, ExecutionException { final Configuration conf = new Configuration(); FileContext files = FileContext.getLocalFSFileContext(conf); Path basedir = files.makeQualified(new Path("target", TestFSDownload.class.getSimpleName())); // if test directory doesn't have ancestor permission, skip this test FileSystem f = basedir.getFileSystem(conf); assumeTrue(FSDownload.ancestorsHaveExecutePermissions(f, basedir, null)); files.mkdir(basedir, null, true);/* w w w. j a v a 2 s. c om*/ conf.setStrings(TestFSDownload.class.getName(), basedir.toString()); int size = 512; final ConcurrentMap<Path, AtomicInteger> counts = new ConcurrentHashMap<Path, AtomicInteger>(); final CacheLoader<Path, Future<FileStatus>> loader = FSDownload.createStatusCacheLoader(conf); final LoadingCache<Path, Future<FileStatus>> statCache = CacheBuilder.newBuilder() .build(new CacheLoader<Path, Future<FileStatus>>() { public Future<FileStatus> load(Path path) throws Exception { // increment the count AtomicInteger count = counts.get(path); if (count == null) { count = new AtomicInteger(0); AtomicInteger existing = counts.putIfAbsent(path, count); if (existing != null) { count = existing; } } count.incrementAndGet(); // use the default loader return loader.load(path); } }); // test FSDownload.isPublic() concurrently final int fileCount = 3; List<Callable<Boolean>> tasks = new ArrayList<Callable<Boolean>>(); for (int i = 0; i < fileCount; i++) { Random rand = new Random(); long sharedSeed = rand.nextLong(); rand.setSeed(sharedSeed); System.out.println("SEED: " + sharedSeed); final Path path = new Path(basedir, "test-file-" + i); createFile(files, path, size, rand); final FileSystem fs = path.getFileSystem(conf); final FileStatus sStat = fs.getFileStatus(path); tasks.add(new Callable<Boolean>() { public Boolean call() throws IOException { return FSDownload.isPublic(fs, path, sStat, statCache); } }); } ExecutorService exec = Executors.newFixedThreadPool(fileCount); try { List<Future<Boolean>> futures = exec.invokeAll(tasks); // files should be public for (Future<Boolean> future : futures) { assertTrue(future.get()); } // for each path exactly one file status call should be made for (AtomicInteger count : counts.values()) { assertSame(count.get(), 1); } } finally { exec.shutdown(); } }
From source file:eu.crisis_economics.abm.fund.FundTest.java
public void testHouseholdFundMarketInteraction() throws InsufficientFundsException { double totalInvestment; double[] householdDesiredInvestment = new double[N_HOUSEHOLDS]; double totalLoanValue; int j;/*from w w w .jav a 2s . com*/ Random rand = new Random(); rand.setSeed(12345); final MockClearingLoanMarket firstVirtualLoanMarket = new MockClearingLoanMarket(myFund, myBank, "Bonds", "3% Coupon Bond", 0.03, clearingHouse), secondVirtualLoanMarket = new MockClearingLoanMarket(myFund, myBank, "Bonds", "5% Coupon Bond", 0.05, clearingHouse); clearingHouse.addMarket(firstVirtualLoanMarket); clearingHouse.addMarket(secondVirtualLoanMarket); final List<ClearingStockMarket> stocks = new ArrayList<ClearingStockMarket>(); System.out.println("Testing Household/MutualFund/Market interaction"); for (j = 0; j < N_FIRMS; ++j) { myFund.portfolio.addStock(myFirms[j].getUniqueName()); myFirms[j].setMarketValue(10.0 * rand.nextDouble()); myFirms[j].setDividendPerShare(rand.nextDouble() * myFirms[j].getMarketValue()); final ClearingStockMarket stockMarket = new ClearingStockMarket(myFirms[j], clearingHouse); stocks.add(stockMarket); clearingHouse.addMarket(stockMarket); } myFund.setLoansToInvestIn(new AbstractCollectionProvider<ClearingLoanMarket>() { @Override public Collection<ClearingLoanMarket> get() { return clearingHouse.getMarketsOfType(ClearingLoanMarket.class); } }); myFund.setStocksToInvestIn(new AbstractCollectionProvider<ClearingStockMarket>() { @Override public Collection<ClearingStockMarket> get() { return clearingHouse.getMarketsOfType(ClearingStockMarket.class); } }); for (int i = 0; i < ITERATIONS; ++i) { totalInvestment = 0.0; for (j = 0; j < N_HOUSEHOLDS; ++j) { householdDesiredInvestment[j] = myHouseholds[j].investmentRandomAmount(); totalInvestment += householdDesiredInvestment[j]; } firstVirtualLoanMarket.expireLoans(); secondVirtualLoanMarket.expireLoans(); myFund.preClearingProcessing(); firstVirtualLoanMarket.process(); secondVirtualLoanMarket.process(); for (final ClearingStockMarket market : stocks) market.process(); myFund.postClearingProcessing(); totalLoanValue = 0.0; for (Loan loan : myFund.getAssetLoans()) { totalLoanValue += loan.getValue(); } // cashWeight = myFund.getBalance()/(myFund.marketCap()+myFund.equityCapital()); if (totalLoanValue < totalInvestment) { // Assert.assertEquals(cashWeight, // Math.min(MutualFund.INITIAL_CASH_WEIGHT, // (totalInvestment-totalLoanValue+myFund.equityCapital())/(totalInvestment+myFund.equityCapital())), // 1e-4); for (j = 0; j < N_HOUSEHOLDS; ++j) { Assert.assertEquals(myFund.getBalance(myHouseholds[j]), householdDesiredInvestment[j], 1e-8); Assert.assertEquals(myHouseholds[j].getEquity(), 200.0, 1e-8); } } Assert.assertEquals(myBank.getEquity(), 1000000.0, 1e-6); } double oldShareCount = myFund.mInvestmentAccount.getNumberOfEmittedShares(); myFund.mInvestmentAccount.recountShareDistribution(); double shareCountDrift = myFund.mInvestmentAccount.getNumberOfEmittedShares() - oldShareCount; System.out.println("Share count drift = " + shareCountDrift); myFund.portfolio.stockWeights().clear(); System.out.println("Done testing Household/MutualFund/Market interaction"); }
From source file:org.apache.jackrabbit.oak.spi.blob.AbstractBlobStoreTest.java
private void doTest(int maxLength, int count) throws Exception { String[] s = new String[count * 2]; Random r = new Random(0); for (int i = 0; i < s.length;) { byte[] data = new byte[r.nextInt(maxLength)]; r.nextBytes(data);//from ww w . ja v a2 s. co m s[i++] = store.writeBlob(new ByteArrayInputStream(data)); s[i++] = store.writeBlob(new ByteArrayInputStream(data)); } r.setSeed(0); for (int i = 0; i < s.length;) { int expectedLen = r.nextInt(maxLength); byte[] expectedData = new byte[expectedLen]; r.nextBytes(expectedData); assertEquals(expectedLen, store.getBlobLength(s[i++])); String id = s[i++]; doTestRead(expectedData, expectedLen, id); } }
From source file:edu.cornell.med.icb.goby.stats.TestStatistics.java
@Test public void testFDR() { final Random randomEngine = new Random(); randomEngine.setSeed(1013); final BonferroniAdjustment bonferroni = new BonferroniAdjustment(); final BenjaminiHochbergAdjustment fdr = new BenjaminiHochbergAdjustment(); final DifferentialExpressionResults list = new DifferentialExpressionResults(); final String statId = "t-test-P-value"; list.declareStatistic(statId);//w w w .j av a 2s. c o m final int statIndex = list.getStatisticIndex(statId); final int numObservations = 100000; final double proportionOfNaN = .1; for (int i = 0; i < numObservations; i++) { final DifferentialExpressionInfo info = new DifferentialExpressionInfo("element-" + i); info.statistics.size(list.getNumberOfStatistics()); final double random1 = randomEngine.nextDouble(); final double random2 = randomEngine.nextDouble(); info.statistics.set(statIndex, random1 < proportionOfNaN ? Double.NaN : random2); list.add(info); } final String secondPValueId = "another-p-value"; list.declareStatistic(secondPValueId); final int statIndex2 = list.getStatisticIndex(secondPValueId); for (final DifferentialExpressionInfo info : list) { info.statistics.size(list.getNumberOfStatistics()); info.statistics.set(statIndex2, randomEngine.nextDouble()); } final NormalizationMethod normalizationMethod = new AlignedCountNormalization(); bonferroni.adjust(list, normalizationMethod, statId, secondPValueId); fdr.adjust(list, normalizationMethod, statId, secondPValueId); final int index1 = list.getStatisticIndex("t-test-P-value-BH-FDR-q-value"); final int index2 = list.getStatisticIndex(secondPValueId + "-BH-FDR-q-value"); final double significanceThreshold = 0.05; int numRejectedHypothesesTest1 = 0; int numRejectedHypothesesTest2 = 0; for (final DifferentialExpressionInfo info : list) { final boolean test1 = info.statistics.getDouble(index1) > significanceThreshold; if (!test1) { // System.out.println("info:" + info); numRejectedHypothesesTest1++; } final boolean test2 = info.statistics.getDouble(index2) > significanceThreshold; if (!test2) { // System.out.println("info:" + info); numRejectedHypothesesTest2++; } } assertTrue("No q-value should be significant after FDR adjustment", numRejectedHypothesesTest1 < significanceThreshold * numObservations); assertTrue("No q-value should be significant after FDR adjustment", numRejectedHypothesesTest2 < significanceThreshold * numObservations); // System.out.println("list.adjusted: " + list); final int n = p.length; for (int rank = p.length; rank >= 1; rank--) { final int index = rank - 1; assertEquals("rank: " + rank, adjusted_R_nocummin[index], p[index] * (((double) n) / (double) rank), 0.01); } { final DifferentialExpressionResults list3 = fdr.adjust(toList(p), "p-value"); System.out.println("list3:" + list3); final int index = list3.getStatisticIndex("p-value-BH-FDR-q-value"); for (final DifferentialExpressionInfo infoAdjusted : list3) { final int elementIndex = Integer.parseInt(infoAdjusted.getElementId().toString()); assertEquals("adjusted p-values must match for i=" + infoAdjusted.getElementId(), adjusted_R[elementIndex], infoAdjusted.statistics.get(index), 0.01); } } }
From source file:sf.net.experimaestro.scheduler.SchedulerTest.java
@Test(description = "Run jobs generated at random", dataProvider = "complexDependenciesTestProvider") public void test_complex_dependencies(ComplexDependenciesParameters p) throws ExperimaestroCannotOverwrite, IOException { Random random = new Random(); long seed = p.seed == null ? random.nextLong() : p.seed; LOGGER.info("Seed is %d", seed); random.setSeed(seed); // Prepares directory and counter File jobDirectory = mkTestDir(); ThreadCount counter = new ThreadCount(); // Our set of jobs WaitingJob[] jobs = new WaitingJob[p.nbJobs]; // --- Generate the dependencies // Number of potential dependencies int nbCouples = p.nbJobs * (p.nbJobs - 1) / 2; // Maximum number of dependencies final int maxDependencies = min(p.maxDeps, nbCouples); // The list of dependencies TreeSet<Link> dependencies = new TreeSet<>(); // Number of generated dependencies int n = min(min((int) (long) (nbCouples * p.dependencyRatio * random.nextDouble()), Integer.MAX_VALUE), maxDependencies);/*from www. ja va 2s. c o m*/ long[] values = new long[n]; // Draw n dependencies among nbCouples possible RandomSampler.sample(n, nbCouples, n, 0, values, 0, random); LOGGER.debug("Sampling %d values from %d", n, nbCouples); for (long v : values) { final Link link = new Link(v); dependencies.add(link); LOGGER.debug("LINK %d status %d [%d]", link.from, link.to, v); assert link.from < p.nbJobs; assert link.to < p.nbJobs; assert link.from < link.to; } // --- Select the jobs that will fail ResourceState[] states = new ResourceState[jobs.length]; for (int i = 0; i < states.length; i++) states[i] = ResourceState.DONE; n = (int) max(p.minFailures, random.nextDouble() * p.failureRatio * jobs.length); long[] values2 = new long[n]; RandomSampler.sample(n, jobs.length - p.minFailureId, n, p.minFailureId, values2, 0, random); for (int i = 0; i < n; i++) states[((int) values2[i])] = ResourceState.ERROR; // --- Generate token resource final TokenResource token; if (p.token > 0) { token = Transaction.evaluate((em, t) -> { final String path = format("scheduler_test/test_complex_dependency/%s", p.name); final TokenResource _token = new TokenResource(path, p.token); _token.save(t); return _token; }); } else { token = null; } final MutableLong totalTime = new MutableLong(); // --- Generate new jobs for (int i = 0; i < jobs.length; i++) { final int j = i; Transaction.run((em, t) -> { int waitingTime = random.nextInt(p.maxExecutionTime - p.minExecutionTime) + p.minExecutionTime; jobs[j] = new WaitingJob(counter, jobDirectory, "job" + j, new Action(waitingTime, states[j] == ResourceState.DONE ? 0 : 1, 0)); totalTime.add(jobs[j].totalTime() + JOB_PROCESSING_TIME); ArrayList<String> deps = new ArrayList<>(); for (Link link : dependencies.subSet(new Link(j, 0), true, new Link(j, Integer.MAX_VALUE), true)) { assert j == link.to; jobs[j].addDependency(jobs[link.from].createDependency(null)); if (states[link.from].isBlocking()) states[j] = ResourceState.ON_HOLD; deps.add(jobs[link.from].toString()); } if (token != null) { jobs[j].addDependency(em.find(TokenResource.class, token.getId()).createDependency(null)); } jobs[j].save(t); LOGGER.debug("Job [%s] created: final=%s, deps=%s", jobs[j], states[j], Output.toString(", ", deps)); }); } LOGGER.info("Waiting for jobs status finish (%d remaining) / total time = %dms", counter.getCount(), totalTime.longValue()); waitToFinish(0, counter, jobs, totalTime.longValue(), 5); waitBeforeCheck(); int count = counter.getCount(); LOGGER.info("Finished waiting [%d]: %d jobs remaining", System.currentTimeMillis(), counter.getCount()); if (count > 0) { LOGGER.error("Time out: %d jobs were not processed", count); } // --- Check LOGGER.info("Checking job states"); int errors = 0; for (int i = 0; i < jobs.length; i++) errors += checkState(EnumSet.of(states[i]), jobs[i]); LOGGER.info("Checking job dependencies"); for (Link link : dependencies) { if (states[link.from] == ResourceState.DONE && jobs[link.to].getState() == ResourceState.DONE) errors += checkSequence(false, true, jobs[link.from], jobs[link.to]); } Assert.assertTrue(errors == 0, "Detected " + errors + " errors after running jobs"); }
From source file:org.apache.hadoop.hbase.regionserver.TestPerColumnFamilyFlush.java
private void doPut(Table table, long memstoreFlushSize) throws IOException, InterruptedException { Region region = getRegionWithName(table.getName()).getFirst(); // cf1 4B per row, cf2 40B per row and cf3 400B per row byte[] qf = Bytes.toBytes("qf"); Random rand = new Random(); byte[] value1 = new byte[100]; byte[] value2 = new byte[200]; byte[] value3 = new byte[400]; for (int i = 0; i < 10000; i++) { Put put = new Put(Bytes.toBytes("row-" + i)); rand.setSeed(i); rand.nextBytes(value1);/*from www .j a v a 2 s .c o m*/ rand.nextBytes(value2); rand.nextBytes(value3); put.addColumn(FAMILY1, qf, value1); put.addColumn(FAMILY2, qf, value2); put.addColumn(FAMILY3, qf, value3); table.put(put); // slow down to let regionserver flush region. while (region.getMemstoreSize() > memstoreFlushSize) { Thread.sleep(100); } } }
From source file:org.apache.hadoop.mapred.split.TestGroupedSplits.java
@Test(timeout = 10000) public void testFormat() throws Exception { JobConf job = new JobConf(defaultConf); Random random = new Random(); long seed = random.nextLong(); LOG.info("seed = " + seed); random.setSeed(seed); localFs.delete(workDir, true);//from w w w.j av a2 s . co m FileInputFormat.setInputPaths(job, workDir); final int length = 10000; final int numFiles = 10; createFiles(length, numFiles, random); // create a combined split for the files TextInputFormat wrappedFormat = new TextInputFormat(); wrappedFormat.configure(job); TezGroupedSplitsInputFormat<LongWritable, Text> format = new TezGroupedSplitsInputFormat<LongWritable, Text>(); format.setConf(job); format.setDesiredNumberOfSplits(1); format.setInputFormat(wrappedFormat); LongWritable key = new LongWritable(); Text value = new Text(); for (int i = 0; i < 3; i++) { int numSplits = random.nextInt(length / 20) + 1; LOG.info("splitting: requesting = " + numSplits); InputSplit[] splits = format.getSplits(job, numSplits); LOG.info("splitting: got = " + splits.length); // we should have a single split as the length is comfortably smaller than // the block size assertEquals("We got more than one splits!", 1, splits.length); InputSplit split = splits[0]; assertEquals("It should be TezGroupedSplit", TezGroupedSplit.class, split.getClass()); // check the split BitSet bits = new BitSet(length); LOG.debug("split= " + split); RecordReader<LongWritable, Text> reader = format.getRecordReader(split, job, voidReporter); try { int count = 0; while (reader.next(key, value)) { int v = Integer.parseInt(value.toString()); LOG.debug("read " + v); if (bits.get(v)) { LOG.warn("conflict with " + v + " at position " + reader.getPos()); } assertFalse("Key in multiple partitions.", bits.get(v)); bits.set(v); count++; } LOG.info("splits=" + split + " count=" + count); } finally { reader.close(); } assertEquals("Some keys in no partition.", length, bits.cardinality()); } }
From source file:com.hichinaschool.flashcards.libanki.Collection.java
public int _dueForDid(long did, int due) { JSONObject conf = mDecks.confForDid(did); // in order due? try {/*ww w . j ava 2 s. c o m*/ if (conf.getJSONObject("new").getInt("order") == Sched.NEW_CARDS_DUE) { return due; } else { // random mode; seed with note ts so all cards of this note get // the same random number Random r = new Random(); r.setSeed(due); return r.nextInt(Math.max(due, 1000) - 1) + 1; } } catch (JSONException e) { throw new RuntimeException(e); } }
From source file:com.rks.musicx.services.MusicXService.java
@Override public void shuffle() { if (playList.size() > 0) { Random rand = new Random(); long speed = System.nanoTime(); rand.setSeed(speed); Collections.shuffle(playList, rand); Log.d(TAG, "shuffle playlist"); }/* w w w . j a va 2 s .c om*/ }
From source file:com.ichi2.libanki.Collection.java
public int _dueForDid(long did, int due) { JSONObject conf = mDecks.confForDid(did); // in order due? try {/*from www. ja va 2 s .c o m*/ if (conf.getJSONObject("new").getInt("order") == Consts.NEW_CARDS_DUE) { return due; } else { // random mode; seed with note ts so all cards of this note get // the same random number Random r = new Random(); r.setSeed(due); return r.nextInt(Math.max(due, 1000) - 1) + 1; } } catch (JSONException e) { throw new RuntimeException(e); } }