List of usage examples for java.util Random nextDouble
public double nextDouble()
From source file:com.comphenix.xp.SampleRange.java
public int sampleInt(Random rnd) { /*/* w w w. j a v a 2 s . c o m*/ * Imagine our range is 0.7 - 5.3: * * 0.7 1 2 3 4 5 5.3 * * |---|----------|----------|----------|----------|---| * | | | | | | | * | | | | | | | * |---|----------|----------|----------|----------|---| * * The integer part, 1 - 5, is easy. To get a random number between and * including 1 and 5, we simply get a random number between 0 and 4 * and add one. * * The beginning, 0.7 - 1.0, covers 30% of an integer. One interpretation is * that this indicates the probability of getting that integer. * * So, we end up with a 30% probability of getting 0 and 5.3 - 5 = 30% * probability of getting 4. */ int value = 0; // Convert the range to an integer equivalent. // Notice that we round to shrink the range. int a = (int) Math.ceil(start); int b = (int) Math.floor(end); // Special case if ((int) start == (int) end) { return sampleIntReduced(rnd); } // The decimal leftover double dA = a - start; double dB = end - b; // Sample an integer from the range [a, b] (inclusive) if (b > a) { value = a + rnd.nextInt(b - a + 1); // Add one since nextInt is exclusive } // The remainder is the probability of choosing the previous value if (dA > 0 && rnd.nextDouble() < dA) value--; // And here it is the probability of choosing the next value if (dB > 0 && rnd.nextDouble() < dB) value++; return value; }
From source file:org.apache.qpid.amqp_1_0.client.Respond.java
public void run() { try {//from w ww .j av a2 s . co m _senders = new HashMap<String, Sender>(); final String queue = getArgs()[0]; String message = ""; _conn = newConnection(); if (isUseMultipleConnections()) { _conn2 = newConnection(); _session2 = _conn2.createSession(); } _session = _conn.createSession(); _receiver = _session.createReceiver(queue, getMode()); _txn = null; int credit = 0; int receivedCount = 0; _responseMsgId = UnsignedLong.ZERO; Random random = null; int batch = 0; List<Message> txnMessages = null; if (useTran()) { if (getRollbackRatio() != 0) { random = new Random(); } batch = getBatchSize(); _txn = _session.createSessionLocalTransaction(); txnMessages = new ArrayList<Message>(batch); } for (int i = 0; receivedCount < getCount(); i++) { if (credit == 0) { if (getCount() - i <= getWindowSize()) { credit = getCount() - i; } else { credit = getWindowSize(); } _receiver.setCredit(UnsignedInteger.valueOf(credit), false); if (!isBlock()) _receiver.drain(); } Message m = isBlock() ? (receivedCount == 0 ? _receiver.receive() : _receiver.receive(10000L)) : _receiver.receive(1000L); credit--; if (m == null) { if (useTran() && batch != getBatchSize()) { _txn.commit(); } break; } System.out.println("Received Message: " + m.getPayload()); respond(m); if (useTran()) { txnMessages.add(m); if (--batch == 0) { if (getRollbackRatio() == 0 || random.nextDouble() >= getRollbackRatio()) { _txn.commit(); txnMessages.clear(); receivedCount += getBatchSize(); } else { System.out.println("Random Rollback"); _txn.rollback(); double result; do { _txn = _session.createSessionLocalTransaction(); for (Message msg : txnMessages) { respond(msg); } result = random.nextDouble(); if (result < getRollbackRatio()) { _txn.rollback(); } else { _txn.commit(); txnMessages.clear(); receivedCount += getBatchSize(); } } while (result < getRollbackRatio()); } _txn = _session.createSessionLocalTransaction(); batch = getBatchSize(); } } else { receivedCount++; } } for (Sender s : _senders.values()) { s.close(); } _receiver.close(); _session.close(); _conn.close(); System.out.println("Received: " + receivedCount); } catch (Connection.ConnectionException e) { e.printStackTrace(); //TODO. } catch (Sender.SenderClosingException e) { e.printStackTrace(); //TODO. } catch (Sender.SenderCreationException e) { e.printStackTrace(); //TODO. } catch (AmqpErrorException e) { e.printStackTrace(); //TODO. } }
From source file:pt.fct.di.benchmarks.TPCW_Riak.database.TPCW_Riak_Executor.java
public void CustomerRegistration(String costumer_id) throws Exception { String name = (BenchmarkUtil.getRandomAString(8, 13) + " " + BenchmarkUtil.getRandomAString(8, 15)); String[] names = name.split(" "); Random r = new Random(); int random_int = r.nextInt(1000); String key = names[0] + "_" + (costumer_id); String pass = names[0].charAt(0) + names[1].charAt(0) + "" + random_int; String first_name = names[0]; String last_name = names[1];/* w ww . j av a 2 s.c o m*/ int phone = r.nextInt(999999999 - 100000000) + 100000000; String email = key + "@" + BenchmarkUtil.getRandomAString(2, 9) + ".com"; double discount = r.nextDouble(); String adress = "Street: " + (BenchmarkUtil.getRandomAString(8, 15) + " " + BenchmarkUtil.getRandomAString(8, 15)) + " number: " + r.nextInt(500); double C_BALANCE = 0.00; double C_YTD_PMT = (double) BenchmarkUtil.getRandomInt(0, 99999) / 100.0; GregorianCalendar cal = new GregorianCalendar(); cal.add(Calendar.DAY_OF_YEAR, -1 * BenchmarkUtil.getRandomInt(1, 730)); java.sql.Date C_SINCE = new java.sql.Date(cal.getTime().getTime()); cal.add(Calendar.DAY_OF_YEAR, BenchmarkUtil.getRandomInt(0, 60)); if (cal.after(new GregorianCalendar())) { cal = new GregorianCalendar(); } java.sql.Date C_LAST_LOGIN = new java.sql.Date(cal.getTime().getTime()); java.sql.Timestamp C_LOGIN = new java.sql.Timestamp(System.currentTimeMillis()); cal = new GregorianCalendar(); cal.add(Calendar.HOUR, 2); java.sql.Timestamp C_EXPIRATION = new java.sql.Timestamp(cal.getTime().getTime()); cal = BenchmarkUtil.getRandomDate(1880, 2000); java.sql.Date C_BIRTHDATE = new java.sql.Date(cal.getTime().getTime()); String C_DATA = BenchmarkUtil.getRandomAString(100, 500); String address_id = insertAdress(); Customer c = new Customer(costumer_id, key, pass, last_name, first_name, phone + "", email, C_SINCE.toString(), C_LAST_LOGIN.toString(), C_LOGIN.toString(), C_EXPIRATION.toString(), C_BALANCE, C_YTD_PMT, C_BIRTHDATE.toString(), C_DATA, discount, address_id); insert(costumer_id, "customer", c); }
From source file:SupervisedMetablocking.AbstractSupervisedMetablocking.java
protected void getTrainingSet(int iteration) { int trueMetadata = 0; Random random = new Random(iteration); int matchingInstances = (int) (SAMPLE_SIZE * duplicates.size() + 1); double nonMatchRatio = matchingInstances / (validComparisons - duplicates.size()); trainingSet = new HashSet<Comparison>(4 * matchingInstances); trainingInstances = new Instances("trainingSet", attributes, 2 * matchingInstances); trainingInstances.setClassIndex(noOfAttributes - 1); // counters = new double[entityIndex.getNoOfEntities()]; // counters_entro = new double[entityIndex.getNoOfEntities()]; int matchming = 0; System.out.println("matchingInstances " + matchingInstances); for (AbstractBlock block : blocks) { ComparisonIterator iterator = block.getComparisonIterator(); while (iterator.hasNext()) { Comparison comparison = iterator.next(); final List<Integer> commonBlockIndices = entityIndex.getCommonBlockIndices(block.getBlockIndex(), comparison);//from w w w .j a va 2 s . co m if (commonBlockIndices == null) { continue; } int match = NON_DUPLICATE; // false if (areMatching(comparison)) { matchming++; if (random.nextDouble() < SAMPLE_SIZE) { trueMetadata++; match = DUPLICATE; // true } else { continue; } } else if (nonMatchRatio <= random.nextDouble()) { continue; } trainingSet.add(comparison); System.out.println("match " + match + " "); Instance newInstance = getFeatures(match, commonBlockIndices, comparison, nonMatchRatio); for (int i = 0; i < 5; i++) { System.out.print(newInstance.valueSparse(i) + " "); } System.out.println(); trainingInstances.add(newInstance); } } sampleMatches.add((double) trueMetadata); sampleNonMatches.add((double) (trainingSet.size() - trueMetadata)); System.out.println( "match " + trueMetadata + " " + (trainingSet.size() - trueMetadata) + " total dup " + matchming); }
From source file:se.llbit.chunky.renderer.scene.Scene.java
/** * Test if the ray should be killed (Russian Roulette) * @param depth/*from ww w .ja v a 2s . c om*/ * @param random * @return {@code true} if the ray needs to die now */ protected final boolean kill(int depth, Random random) { return depth >= rayDepth && random.nextDouble() < .5f; }
From source file:jhplot.H1D.java
/** Fill the histogram with random numbers from a flat distribution. * Seed is taken from time. /*from w w w . j av a 2 s . co m*/ * Using mean=0 and width=1 will give a flat distribution between 0 and 1. * @param TotNumber number generated events * @param mean mean of the distribution * @param width width of the distribution */ public void fillRnd(int TotNumber, double mean, double width) { java.util.Random random = new java.util.Random(); for (int i = 0; i < TotNumber; i++) h1.fill(width * random.nextDouble() + mean); }
From source file:SupervisedMetablocking.AbstractSupervisedMetablocking.java
private void randomSelection() { Random r = new Random(); int levelSize[] = new int[150]; int count = 0; ArrayList<Instance> list; for (int i = 0; i < testSet.size(); i++) { list = testSet.get(i);//from w w w. j a v a2 s . co m for (int j = 0; j < list.size(); j++) { count++; } levelSize[i] = count; count = 0; } // for (int i = 0; i < levelSize.length; i++) { // System.out.println(levelSize[i]); // } trainingInstances = new Instances("trainingSet", attributes, 500); trainingInstances.setClassIndex(noOfAttributes - 1); double nonMatchRatio = 2500 / (50000); int trueMetadata = 0, falseP = 0; for (int i = 0; i < testSet.size(); i++) { list = testSet.get(i); count = 0; //Iterator<Instance> listit = list.iterator(); int controle = 0; while (controle < list.size()) { count++; int temp = r.nextInt(levelSize[i]); if (temp < 30) { // System.out.println(levelSize[i] +" 20928 " + temp ); Instance ins = list.get(controle++); int match = NON_DUPLICATE; // false if (ins.valueSparse(attributes.size() - 1) == 1) { trueMetadata++; if (r.nextDouble() < 0.30) { //trueMetadata++; match = DUPLICATE; // true } else { // continue; } } else { falseP++; } if (0.000005 <= r.nextDouble()) { // continue; } else falseP++; if (ins.valueSparse(attributes.size() - 1) == 0 && ins.valueSparse(0) > 100) { continue; } trainingInstances.add(ins); } controle++; } //System.out.println("apagar"); //System.out.println(list.size()+ "--------\n\n\n"); } System.out.println(" true " + trueMetadata + " falseP " + falseP); }
From source file:edu.cornell.med.icb.goby.modes.TallyBasesMode.java
/** * Run the tally bases mode.// ww w . j av a 2 s .c o m * * @throws java.io.IOException error reading / writing */ @Override public void execute() throws IOException { if (basenames.length != 2) { System.err.println("Exactly two basenames are supported at this time."); System.exit(1); } final CountsArchiveReader[] archives = new CountsArchiveReader[basenames.length]; int i = 0; for (final String basename : basenames) { archives[i++] = new CountsArchiveReader(basename, alternativeCountArhive); } final CountsArchiveReader archiveA = archives[0]; final CountsArchiveReader archiveB = archives[1]; // keep only common reference sequences between the two input count archives. final ObjectSet<String> identifiers = new ObjectOpenHashSet<String>(); identifiers.addAll(archiveA.getIdentifiers()); identifiers.retainAll(archiveB.getIdentifiers()); // find the optimal offset A vs B: final int offset = offsetString.equals("auto") ? optimizeOffset(archiveA, archiveB, identifiers) : Integer.parseInt(offsetString); System.out.println("offset: " + offset); final RandomAccessSequenceCache cache = new RandomAccessSequenceCache(); if (cache.canLoad(genomeCacheFilename)) { try { cache.load(genomeCacheFilename); } catch (ClassNotFoundException e) { System.err.println("Cannot load cache from disk. Consider deleting the cache and rebuilding."); e.printStackTrace(); System.exit(1); } } else { Reader reader = null; try { if (genomeFilename.endsWith(".fa") || genomeFilename.endsWith(".fasta")) { reader = new FileReader(genomeFilename); cache.loadFasta(reader); } else if (genomeFilename.endsWith(".fa.gz") || genomeFilename.endsWith(".fasta.gz")) { reader = new InputStreamReader(new GZIPInputStream(new FileInputStream(genomeFilename))); cache.loadFasta(reader); } else { System.err.println("The format of the input file is not supported at this time."); System.exit(1); } } finally { IOUtils.closeQuietly(reader); } } System.out.println("Will use genome cache basename: " + genomeCacheFilename); cache.save(genomeCacheFilename); final Random random = new Random(new Date().getTime()); final double delta = cutoff; final int countThreshold = 30; final PrintStream output = new PrintStream(outputFilename); writeHeader(output, windowSize); for (final String referenceSequenceId : identifiers) { if (isReferenceIncluded(referenceSequenceId)) { final int referenceIndex = cache.getReferenceIndex(referenceSequenceId); if (referenceIndex != -1) { // sequence in cache. System.out.println("Processing sequence " + referenceSequenceId); final double sumA = getSumOfCounts(archiveA.getCountReader(referenceSequenceId)); final double sumB = getSumOfCounts(archiveB.getCountReader(referenceSequenceId)); final int referenceSize = cache.getSequenceSize(referenceIndex); // process this sequence: final AnyTransitionCountsIterator iterator = new AnyTransitionCountsIterator( archiveA.getCountReader(referenceSequenceId), new OffsetCountsReader(archiveB.getCountReader(referenceSequenceId), offset)); while (iterator.hasNextTransition()) { iterator.nextTransition(); final int position = iterator.getPosition(); final int countA = iterator.getCount(0); final int countB = iterator.getCount(1); if (countA + countB >= countThreshold) { final double foldChange = Math.log1p(countA) - Math.log1p(countB) - Math.log(sumA) + Math.log(sumB); if (foldChange >= delta || foldChange <= -delta) { if (random.nextDouble() < sampleRate) { tallyPosition(cache, referenceIndex, position, foldChange, windowSize, referenceSize, referenceSequenceId, output, countA, countB, sumA, sumB); } } } } iterator.close(); } } output.flush(); } output.close(); }
From source file:org.apache.accumulo.tserver.TabletServer.java
private static long jitter(long ms) { Random r = new Random(); // add a random 10% wait return (long) ((1. + (r.nextDouble() / 10)) * ms); }
From source file:sadl.run.datagenerators.SmacDataGeneratorMixed.java
private void run() throws IOException, InterruptedException { if (Files.notExists(outputDir)) { Files.createDirectories(outputDir); }// w w w .j a va 2 s. co m Files.walk(outputDir).filter(p -> !Files.isDirectory(p)).forEach(p -> { try { logger.info("Deleting file {}", p); Files.delete(p); } catch (final Exception e) { e.printStackTrace(); } }); logger.info("Starting to learn TauPTA..."); int k = 0; // parse timed sequences TimedInput trainingTimedSequences = TimedInput.parseAlt(Paths.get(dataString), 1); final boolean splitTimedEvents = true; if (splitTimedEvents) { final ButlaPdtaLearner butla = new ButlaPdtaLearner(10000, EventsCreationStrategy.SplitEvents, KDEFormelVariant.OriginalKDE); final Pair<TimedInput, Map<String, Event>> p = butla .splitEventsInTimedSequences(trainingTimedSequences); trainingTimedSequences = p.getKey(); } final Random r = MasterSeed.nextRandom(); final List<TimedWord> trainSequences = new ArrayList<>(); final List<TimedWord> testSequences = new ArrayList<>(); final TauPtaLearner learner = new TauPtaLearner(); final TauPTA pta = learner.train(trainingTimedSequences); final DecimalFormat df = new DecimalFormat("00"); // final Path p = Paths.get("pta_normal.dot"); // pta.toGraphvizFile(outputDir.resolve(p), false); // final Process ps = Runtime.getRuntime().exec("dot -Tpdf -O " + outputDir.resolve(p)); // System.out.println(outputDir.resolve(p)); // ps.waitFor(); logger.info("Finished TauPTA creation."); logger.info("Before inserting anomalies, normal PTA has {} states and {} transitions", pta.getStateCount(), pta.getTransitionCount()); final List<TauPTA> abnormalPtas = new ArrayList<>(); for (final AnomalyInsertionType type : AnomalyInsertionType.values()) { if (type != AnomalyInsertionType.NONE && type != AnomalyInsertionType.ALL) { final TauPTA anomaly = SerializationUtils.clone(pta); logger.info("inserting Anomaly Type {}", type); anomaly.makeAbnormal(type); abnormalPtas.add(anomaly); if (type == AnomalyInsertionType.TYPE_TWO) { anomaly.removeAbnormalSequences(pta); } logger.info("After inserting anomaly type {}, normal PTA has {} states and {} transitions", type, pta.getStateCount(), pta.getTransitionCount()); } } logger.info("After inserting all anomalies, normal PTA has {} states and {} transitions", pta.getStateCount(), pta.getTransitionCount()); final TObjectIntMap<TauPTA> anomalyOccurences = new TObjectIntHashMap<>(); final Random anomalyChooser = MasterSeed.nextRandom(); while (k < SAMPLE_FILES) { trainSequences.clear(); testSequences.clear(); for (int i = 0; i < TRAIN_SIZE; i++) { trainSequences.add(pta.sampleSequence()); } for (int i = 0; i < TEST_SIZE; i++) { if (r.nextDouble() < ANOMALY_PERCENTAGE) { boolean wasAnormal = false; TimedWord seq = null; final TauPTA chosen = CollectionUtils.chooseRandomObject(abnormalPtas, anomalyChooser); while (!wasAnormal) { seq = chosen.sampleSequence(); wasAnormal = seq.isAnomaly(); } anomalyOccurences.adjustOrPutValue(chosen, 1, 1); testSequences.add(seq); } else { testSequences.add(pta.sampleSequence()); } } final TimedInput trainset = new TimedInput(trainSequences); final TimedInput testset = new TimedInput(testSequences); final Path outputFile = outputDir.resolve(Paths.get(df.format(k) + "_smac_mixed.txt")); try (BufferedWriter bw = Files.newBufferedWriter(outputFile, StandardCharsets.UTF_8)) { trainset.toFile(bw, true); bw.write('\n'); bw.write(TRAIN_TEST_SEP); bw.write('\n'); testset.toFile(bw, true); } logger.info("Wrote file #{} ({})", k, outputFile); k++; } for (final TauPTA anomaly : anomalyOccurences.keySet()) { logger.info("Anomaly {} was chosen {} times", anomaly.getAnomalyType(), anomalyOccurences.get(anomaly)); } }