List of usage examples for java.util Random nextDouble
public double nextDouble()
From source file:info.varden.anatychia.Main.java
public static MaterialDataList materialList(SaveData save, ProgressUpdater pu, MaterialData[] filters) { Random random = new Random(); boolean filtersNull = filters == null; pu.updated(0, 1);//from w w w .j a va 2 s . c o m pu.updated(-3, 1); MaterialDataList mdl = new MaterialDataList(); File saveDir = save.getLocation(); File[] regionFolders = listRegionContainers(saveDir); int depth = Integer.MAX_VALUE; File shallowest = null; for (File f : regionFolders) { String path = f.getAbsolutePath(); Pattern p = Pattern.compile(Pattern.quote(File.separator)); Matcher m = p.matcher(path); int count = 0; while (m.find()) { count++; } if (count < depth) { depth = count; if (shallowest == null || f.getName().equalsIgnoreCase("region")) { shallowest = f; } } } pu.updated(-1, 1); ArrayList<File> regions = new ArrayList<File>(); int tfs = 0; for (File f : regionFolders) { String dimName = f.getParentFile().getName(); boolean deleted = false; if (f.equals(shallowest)) { dimName = "DIM0"; } if (!filtersNull) { for (MaterialData type : filters) { if (type.getType() == MaterialType.DIMENSION && type.getName().equals(dimName)) { System.out.println("Deleting: " + dimName); deleted = recursiveDelete(f); } } } if (deleted) continue; mdl.increment(new MaterialData(MaterialType.DIMENSION, dimName, 1L)); File[] r = f.listFiles(new FilenameFilter() { @Override public boolean accept(File dir, String name) { return name.endsWith(".mca"); } }); int max = r.length; int cur = 0; for (File valid : r) { cur++; try { BufferedInputStream bis = new BufferedInputStream(new FileInputStream(valid)); byte[] offsetHeader = new byte[4096]; bis.read(offsetHeader, 0, 4096); bis.close(); ByteBuffer bb = ByteBuffer.wrap(offsetHeader); IntBuffer ib = bb.asIntBuffer(); while (ib.remaining() > 0) { if (ib.get() != 0) { tfs++; } } bb = null; ib = null; } catch (IOException ex) { Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex); } // tfs += Math.floor(valid.length() / 1000D); pu.updated(cur, max); } regions.addAll(Arrays.asList(r)); } if (regions.size() <= 0) { pu.updated(1, 1); return mdl; } pu.updated(-2, 1); int fc = 0; int fs = 0; for (File region : regions) { fc++; //fs += Math.floor(region.length() / 1000D); try { RegionFile anvil = new RegionFile(region); for (int x = 0; x < 32; x++) { for (int z = 0; z < 32; z++) { InputStream is = anvil.getChunkDataInputStream(x, z); if (is == null) continue; NBTInputStream nbti = new NBTInputStream(is, CompressionMode.NONE); CompoundTag root = (CompoundTag) nbti.readTag(); String rootName = root.getName(); CompoundTag level = (CompoundTag) root.getValue().get("Level"); Map<String, Tag> levelTags = level.getValue(); ListTag sectionTag = (ListTag) levelTags.get("Sections"); ArrayList<Tag> sections = new ArrayList<Tag>(sectionTag.getValue()); for (int i = 0; i < sections.size(); i++) { mdl.setSectorsRelative(1); CompoundTag sect = (CompoundTag) sections.get(i); Map<String, Tag> sectTags = sect.getValue(); ByteArrayTag blockArray = (ByteArrayTag) sectTags.get("Blocks"); byte[] add = new byte[0]; boolean hasAdd = false; if (sectTags.containsKey("Add")) { hasAdd = true; ByteArrayTag addArray = (ByteArrayTag) sectTags.get("Add"); add = addArray.getValue(); } byte[] blocks = blockArray.getValue(); for (int j = 0; j < blocks.length; j++) { short id; byte aid = (byte) 0; if (hasAdd) { aid = ChunkFormat.Nibble4(add, j); id = (short) ((blocks[j] & 0xFF) + (aid << 8)); } else { id = (short) (blocks[j] & 0xFF); } if (!filtersNull) { for (MaterialData type : filters) { if (type.getType() == MaterialType.BLOCK && type.getName().equals(String.valueOf(blocks[j] & 0xFF)) && (type.getRemovalChance() == 1D || random.nextDouble() < type.getRemovalChance())) { blocks[j] = (byte) 0; if (aid != 0) { add[j / 2] = (byte) (add[j / 2] & (j % 2 == 0 ? 0xF0 : 0x0F)); } id = (short) 0; } } } mdl.increment(new MaterialData(MaterialType.BLOCK, String.valueOf(id), 1L)); } if (!filtersNull) { HashMap<String, Tag> rSectTags = new HashMap<String, Tag>(); rSectTags.putAll(sectTags); ByteArrayTag bat = new ByteArrayTag("Blocks", blocks); rSectTags.put("Blocks", bat); if (hasAdd) { ByteArrayTag adt = new ByteArrayTag("Add", add); rSectTags.put("Add", adt); } CompoundTag rSect = new CompoundTag(sect.getName(), rSectTags); sections.set(i, rSect); } } ListTag entitiesTag = (ListTag) levelTags.get("Entities"); ArrayList<Tag> entities = new ArrayList<Tag>(entitiesTag.getValue()); for (int i = entities.size() - 1; i >= 0; i--) { CompoundTag entity = (CompoundTag) entities.get(i); Map<String, Tag> entityTags = entity.getValue(); if (entityTags.containsKey("id")) { StringTag idTag = (StringTag) entityTags.get("id"); String id = idTag.getValue(); boolean removed = false; if (!filtersNull) { for (MaterialData type : filters) { if (type.getType() == MaterialType.ENTITY && (type.getName().equals(id) || type.getName().equals("")) && (type.getRemovalChance() == 1D || random.nextDouble() < type.getRemovalChance())) { if (type.fulfillsRequirements(entity)) { entities.remove(i); removed = true; } } } } if (!removed) { mdl.increment(new MaterialData(MaterialType.ENTITY, id, 1L)); } } } nbti.close(); is.close(); if (!filtersNull) { HashMap<String, Tag> rLevelTags = new HashMap<String, Tag>(); rLevelTags.putAll(levelTags); ListTag rSectionTag = new ListTag("Sections", CompoundTag.class, sections); rLevelTags.put("Sections", rSectionTag); ListTag rEntityTag = new ListTag("Entities", CompoundTag.class, entities); rLevelTags.put("Entities", rEntityTag); final CompoundTag rLevel = new CompoundTag("Level", rLevelTags); HashMap<String, Tag> rRootTags = new HashMap<String, Tag>() { { put("Level", rLevel); } }; CompoundTag rRoot = new CompoundTag(rootName, rRootTags); OutputStream os = anvil.getChunkDataOutputStream(x, z); NBTOutputStream nbto = new NBTOutputStream(os, CompressionMode.NONE); nbto.writeTag(rRoot); nbto.close(); } fs++; pu.updated(fs, tfs); } } anvil.close(); } catch (Exception ex) { Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex); } } MaterialData[] data = mdl.toArray(); System.out.println("FILES SCANNED: " + fc); for (MaterialData d : data) { System.out.println(d.getType().getName() + ": " + d.getName() + " (" + d.getQuantity() + ")"); } return mdl; }
From source file:hex.Model.java
public boolean testJavaScoring(Frame data, Frame model_predictions, EasyPredictModelWrapper.Config config, double rel_epsilon, double abs_epsilon, double fraction) { ModelBuilder mb = ModelBuilder.make(_parms.algoName().toLowerCase(), null, null); boolean havePojo = mb.havePojo(); boolean haveMojo = mb.haveMojo(); Random rnd = RandomUtils.getRNG(data.byteSize()); assert data.numRows() == model_predictions.numRows(); Frame fr = new Frame(data); boolean computeMetrics = data.vec(_output.responseName()) != null && !data.vec(_output.responseName()).isBad(); try {//from w ww . ja v a 2 s.c om String[] warns = adaptTestForTrain(fr, true, computeMetrics); if (warns.length > 0) System.err.println(Arrays.toString(warns)); // Output is in the model's domain, but needs to be mapped to the scored // dataset's domain. int[] omap = null; if (_output.isClassifier()) { Vec actual = fr.vec(_output.responseName()); String[] sdomain = actual == null ? null : actual.domain(); // Scored/test domain; can be null String[] mdomain = model_predictions.vec(0).domain(); // Domain of predictions (union of test and train) if (sdomain != null && !Arrays.equals(mdomain, sdomain)) { omap = CategoricalWrappedVec.computeMap(mdomain, sdomain); // Map from model-domain to scoring-domain } } String modelName = JCodeGen.toJavaId(_key.toString()); boolean preview = false; GenModel genmodel = null; Vec[] dvecs = fr.vecs(); Vec[] pvecs = model_predictions.vecs(); double[] features = null; int num_errors = 0; int num_total = 0; // First try internal POJO via fast double[] API if (havePojo) { try { String java_text = toJava(preview, true); Class clz = JCodeGen.compile(modelName, java_text); genmodel = (GenModel) clz.newInstance(); } catch (IllegalArgumentException e) { e.printStackTrace(); return true; } catch (Exception e) { e.printStackTrace(); throw H2O.fail("Internal POJO compilation failed", e); } // Check that POJO has the expected interfaces for (Class<?> clz : getPojoInterfaces()) if (!clz.isInstance(genmodel)) throw new IllegalStateException("POJO is expected to implement interface " + clz.getName()); // Check some model metadata assert _output.responseName() == null || _output.responseName().equals(genmodel.getResponseName()); features = MemoryManager.malloc8d(genmodel.nfeatures()); double[] predictions = MemoryManager.malloc8d(genmodel.nclasses() + 1); // Compare predictions, counting mis-predicts for (int row = 0; row < fr.numRows(); row++) { // For all rows, single-threaded if (rnd.nextDouble() >= fraction) continue; num_total++; // Native Java API for (int col = 0; col < features.length; col++) // Build feature set features[col] = dvecs[col].at(row); genmodel.score0(features, predictions); // POJO predictions for (int col = _output.isClassifier() ? 1 : 0; col < pvecs.length; col++) { // Compare predictions double d = pvecs[col].at(row); // Load internal scoring predictions if (col == 0 && omap != null) d = omap[(int) d]; // map categorical response to scoring domain if (!MathUtils.compare(predictions[col], d, abs_epsilon, rel_epsilon)) { if (num_errors++ < 10) System.err.println("Predictions mismatch, row " + row + ", col " + model_predictions._names[col] + ", internal prediction=" + d + ", POJO prediction=" + predictions[col]); break; } } } } // EasyPredict API with POJO and/or MOJO for (int i = 0; i < 2; ++i) { if (i == 0 && !havePojo) continue; if (i == 1 && !haveMojo) continue; if (i == 1) { // MOJO final String filename = modelName + ".zip"; StreamingSchema ss = new StreamingSchema(getMojo(), filename); try { FileOutputStream os = new FileOutputStream(ss.getFilename()); ss.getStreamWriter().writeTo(os); os.close(); genmodel = MojoModel.load(filename); features = MemoryManager.malloc8d(genmodel._names.length); } catch (IOException e1) { e1.printStackTrace(); throw H2O.fail("Internal MOJO loading failed", e1); } finally { boolean deleted = new File(filename).delete(); if (!deleted) Log.warn("Failed to delete the file"); } } EasyPredictModelWrapper epmw = new EasyPredictModelWrapper( config.setModel(genmodel).setConvertUnknownCategoricalLevelsToNa(true)); RowData rowData = new RowData(); BufferedString bStr = new BufferedString(); for (int row = 0; row < fr.numRows(); row++) { // For all rows, single-threaded if (rnd.nextDouble() >= fraction) continue; // Generate input row for (int col = 0; col < features.length; col++) { if (dvecs[col].isString()) { rowData.put(genmodel._names[col], dvecs[col].atStr(bStr, row).toString()); } else { double val = dvecs[col].at(row); rowData.put(genmodel._names[col], genmodel._domains[col] == null ? (Double) val : Double.isNaN(val) ? val // missing categorical values are kept as NaN, the score0 logic passes it on to bitSetContains() : (int) val < genmodel._domains[col].length ? genmodel._domains[col][(int) val] : "UnknownLevel"); //unseen levels are treated as such } } // Make a prediction AbstractPrediction p; try { p = epmw.predict(rowData); } catch (PredictException e) { num_errors++; if (num_errors < 20) { System.err.println("EasyPredict threw an exception when predicting row " + rowData); e.printStackTrace(); } continue; } // Convert model predictions and "internal" predictions into the same shape double[] expected_preds = new double[pvecs.length]; double[] actual_preds = new double[pvecs.length]; for (int col = 0; col < pvecs.length; col++) { // Compare predictions double d = pvecs[col].at(row); // Load internal scoring predictions if (col == 0 && omap != null) d = omap[(int) d]; // map categorical response to scoring domain double d2 = Double.NaN; switch (genmodel.getModelCategory()) { case AutoEncoder: d2 = ((AutoEncoderModelPrediction) p).reconstructed[col]; break; case Clustering: d2 = ((ClusteringModelPrediction) p).cluster; break; case Regression: d2 = ((RegressionModelPrediction) p).value; break; case Binomial: BinomialModelPrediction bmp = (BinomialModelPrediction) p; d2 = (col == 0) ? bmp.labelIndex : bmp.classProbabilities[col - 1]; break; case Multinomial: MultinomialModelPrediction mmp = (MultinomialModelPrediction) p; d2 = (col == 0) ? mmp.labelIndex : mmp.classProbabilities[col - 1]; break; case DimReduction: d2 = ((DimReductionModelPrediction) p).dimensions[col]; break; } expected_preds[col] = d; actual_preds[col] = d2; } // Verify the correctness of the prediction num_total++; for (int col = genmodel.isClassifier() ? 1 : 0; col < pvecs.length; col++) { if (!MathUtils.compare(actual_preds[col], expected_preds[col], abs_epsilon, rel_epsilon)) { num_errors++; if (num_errors < 20) { System.err.println((i == 0 ? "POJO" : "MOJO") + " EasyPredict Predictions mismatch for row " + row + ":" + rowData); System.err.println(" Expected predictions: " + Arrays.toString(expected_preds)); System.err.println(" Actual predictions: " + Arrays.toString(actual_preds)); System.err .println("Difference: " + Math.abs(expected_preds[expected_preds.length - 1] - actual_preds[actual_preds.length - 1])); } break; } } } } if (num_errors != 0) System.err.println( "Number of errors: " + num_errors + (num_errors > 20 ? " (only first 20 are shown)" : "") + " out of " + num_total + " rows tested."); return num_errors == 0; } finally { Frame.deleteTempFrameAndItsNonSharedVecs(fr, data); // Remove temp keys. } }
From source file:com.ericbarnhill.arrayMath.ArrayMath.java
/** * Returns a {@code double[]} containing uniform distribution Random values. *//from w ww . j ava2 s .co m * @param fi {@code int} array dimension. * @return {@code double[]} array. * * @since 0.1 */ public static double[] random(int fi) { double[] h = new double[fi]; Random r = new Random(); for (int i = 0; i < fi; i++) { h[i] = r.nextDouble(); } return h; }
From source file:com.ericbarnhill.arrayMath.ArrayMath.java
/** * Returns a square {@code double[][]} containing uniform distribution Random values. */*from w w w .j ava2 s . c om*/ * @param fi {@code int} dimension of first array. * @param fj {@code int} dimension of second array. * @return {@code double[][]} array. * * @since 0.1 */ public static double[][] random(int fi, int fj) { double[][] h = new double[fi][fj]; Random r = new Random(); for (int i = 0; i < fi; i++) { for (int j = 0; j < fj; j++) { h[i][j] = r.nextDouble(); } } return h; }
From source file:com.ericbarnhill.arrayMath.ArrayMath.java
/** * Returns a square {@code double[][][]} containing uniform distribution Random values. */*from ww w .j a v a 2 s . c o m*/ * @param fi {@code int} dimension of first level array. * @param fj {@code int} dimension of second level array. * @param fk {@code int} dimension of third level array. * @return {@code double[][][]} array. * * @since 0.1 */ public static double[][][] random(int fi, int fj, int fk) { double[][][] h = new double[fi][fj][fk]; Random r = new Random(); for (int i = 0; i < fi; i++) { for (int j = 0; j < fj; j++) { for (int k = 0; k < fk; k++) { h[i][j][k] = r.nextDouble(); } } } return h; }
From source file:erigo.filepump.FilePumpWorker.java
public void run() { String output_dir_name = pumpSettings.getOutputFolder(); double files_per_sec = pumpSettings.getFilesPerSec(); int total_num_files = pumpSettings.getTotNumFiles(); FilePumpSettings.FileMode mode = pumpSettings.getMode(); String ftpHost = pumpSettings.getFTPHost(); String ftpUsername = pumpSettings.getFTPUser(); String ftpPassword = pumpSettings.getFTPPassword(); double desired_period = 1 / files_per_sec; double sleep_time_millis = desired_period * 1000; long time_used_in_last_filename = 0; Random random_generator = new Random(); int random_range = 999999; if (mode == FilePumpSettings.FileMode.LOCAL_FILESYSTEM) { if (bJustWriteEndFile) { System.err.println("\nWrite \"end.txt\" file to " + output_dir_name); } else {//from w w w. j a v a 2 s . c o m System.err.println("\nWrite files to " + output_dir_name); } } else if (mode == FilePumpSettings.FileMode.FTP) { ftpClient = new FTPClient(); try { login(ftpHost, ftpUsername, ftpPassword); } catch (Exception e) { System.err.println("Caught exception connecting to FTP server:\n" + e); return; } // Make sure we are only using "/" in output_dir_name output_dir_name = output_dir_name.replace('\\', '/'); if (bJustWriteEndFile) { System.err.println("\nWrite \"end.txt\" file out using FTP: host = " + ftpHost + ", username = " + ftpUsername + ", folder = " + output_dir_name); } else { System.err.println("\nFTP files: host = " + ftpHost + ", username = " + ftpUsername + ", folder = " + output_dir_name); } } else if (mode == FilePumpSettings.FileMode.SFTP) { // Make sure output_dir_name starts with an "/" if (output_dir_name.charAt(0) != '/') { output_dir_name = "/" + output_dir_name; } manager = new StandardFileSystemManager(); try { manager.init(); // Just use the default logger // manager.setTemporaryFileStore(new DefaultFileReplicator(new File("C:\\TEMP"))); // Code to set SFTP configuration is largely copied from a submission to the following Stack Overflow post: // https://stackoverflow.com/questions/44763915/how-to-skip-password-prompt-during-sftp-using-commons-vfs // Sample author: Som, https://stackoverflow.com/users/6416340/som // License: Stack Overflow content is covered by the Creative Commons license, https://creativecommons.org/licenses/by-sa/3.0/legalcode // Setup our SFTP configuration fileSystemOptions = new FileSystemOptions(); SftpFileSystemConfigBuilder.getInstance().setStrictHostKeyChecking(fileSystemOptions, "no"); // VFS file system root: // setting this parameter false = cause VFS to choose File System's Root as VFS's root // setting this parameter true = cause VFS to choose user's home directory as VFS's root SftpFileSystemConfigBuilder.getInstance().setUserDirIsRoot(fileSystemOptions, true); SftpFileSystemConfigBuilder.getInstance().setTimeout(fileSystemOptions, 10000); // The following line was used by the Stack Overflow post author to be able to skip a credentials prompt // SftpFileSystemConfigBuilder.getInstance().setPreferredAuthentications(fileSystemOptions, "publickey,keyboard-interactive,password"); } catch (Exception e) { System.err.println("Caught exception setting up Apache Commons VFS manager for SFTP:\n" + e); e.printStackTrace(); return; } // Make sure we are only using "/" in output_dir_name output_dir_name = output_dir_name.replace('\\', '/'); // Create the base connection String // For example, for username "fooUser" and password "fooPW" trying to connect to 192.168.2.56 and put files in folder FooFolder: // sftp://fooUser:fooPW@192.168.2.56/FooFolder // Note that up above we made sure that output_dir_name starts with "/" baseConnectionStr = "sftp://" + ftpUsername + ":" + ftpPassword + "@" + ftpHost + output_dir_name; if (bJustWriteEndFile) { System.err.println("\nWrite \"end.txt\" file out using SFTP: host = " + ftpHost + ", username = " + ftpUsername + ", folder = " + output_dir_name); } else { System.err.println("\nSFTP files: host = " + ftpHost + ", username = " + ftpUsername + ", folder = " + output_dir_name); } } // // If out only task is to send an end.txt file, go ahead and do it and then return // if (bJustWriteEndFile) { String filename = "end.txt"; if (mode == FilePumpSettings.FileMode.FTP) { writeToFTP(output_dir_name, filename, 1); } else if (mode == FilePumpSettings.FileMode.SFTP) { writeToSFTP(filename, 1); } else { File full_filename = new File(output_dir_name, filename); FileWriter fw; try { fw = new FileWriter(full_filename, false); } catch (IOException e) { System.err.println("Caught IOException trying to create the FileWriter:\n" + e + "\n"); e.printStackTrace(); return; } PrintWriter pw = new PrintWriter(fw); pw.format("1\n"); pw.close(); } if (mode == FilePumpSettings.FileMode.FTP) { logout(); } else if (mode == FilePumpSettings.FileMode.SFTP) { manager.close(); } System.err.println("Wrote out \"end.txt\""); return; } // // Setup a periodic timer to update the file count on the GUI // TimerTask timerTask = new FileCountTimerTask(pumpGUI, this); // run timer task as daemon thread Timer timer = new Timer(true); timer.scheduleAtFixedRate(timerTask, 0, 5 * 1000); while (pumpGUI.bPumpRunning) { long start_time = System.currentTimeMillis(); // Create the next file // Always have time move forward // NOTE: The computer's clock being adjusted backward could activate this code if (start_time <= time_used_in_last_filename) { while (true) { try { Thread.sleep(1); } catch (InterruptedException ie) { // nothing to do } start_time = System.currentTimeMillis(); if (start_time > time_used_in_last_filename) { break; } } } ++file_index; String filename = Long.toString(start_time) + "_" + Integer.toString(file_index) + ".txt"; int random_num = (int) ((double) random_range * random_generator.nextDouble()); if (mode == FilePumpSettings.FileMode.FTP) { writeToFTP(output_dir_name, filename, random_num); } else if (mode == FilePumpSettings.FileMode.SFTP) { writeToSFTP(filename, random_num); } else { File full_filename = new File(output_dir_name, filename); FileWriter fw; try { fw = new FileWriter(full_filename, false); } catch (IOException e) { System.err.println("Caught IOException trying to create the FileWriter:\n" + e + "\n"); e.printStackTrace(); break; } PrintWriter pw = new PrintWriter(fw); // Write out a random number to the file pw.format("%06d\n", random_num); pw.close(); } if ((!pumpGUI.bPumpRunning) || (file_index == total_num_files)) { break; } // Sleep try { long actual_sleep_amount = (long) Math.round(sleep_time_millis); if (actual_sleep_amount > 0) { Thread.sleep(actual_sleep_amount); } } catch (InterruptedException ie) { // nothing to do } // Check how we are doing on timing and adjust the sleep time if needed long stop_time = System.currentTimeMillis(); double time_err_secs = desired_period - (double) (stop_time - start_time) / 1000.0; // Adjust sleep_time_millis based on this timing error sleep_time_millis = sleep_time_millis + 0.25 * time_err_secs * 1000.0; // Smallest sleep time is 0 if (sleep_time_millis < 0) { sleep_time_millis = 0.0; } time_used_in_last_filename = start_time; } if (mode == FilePumpSettings.FileMode.FTP) { logout(); } else if (mode == FilePumpSettings.FileMode.SFTP) { manager.close(); } timer.cancel(); // Make sure the final file count is displayed in the GUI pumpGUI.updateNumFiles_nonEDT(file_index); // If we are exiting because the requested number of files have been // reached (ie, exiting of our own volition as opposed to someone else // canceling the run), then reset the user interface if (file_index == total_num_files) { pumpGUI.resetGUI_nonEDT(); } if (!pumpGUI.bShowGUI) { System.err.print("\n"); } System.err.println("Exiting FilePumpWorker; wrote out " + file_index + " files."); }