List of usage examples for java.lang Double NEGATIVE_INFINITY
double NEGATIVE_INFINITY
To view the source code for java.lang Double NEGATIVE_INFINITY.
Click Source Link
From source file:ffx.xray.parsers.MTZWriter.java
/** * <p>/*www .j av a 2s . co m*/ * write</p> */ public void write() { ByteOrder byteOrder = ByteOrder.nativeOrder(); FileOutputStream fileOutputStream; DataOutputStream dataOutputStream; try { if (logger.isLoggable(Level.INFO)) { StringBuilder sb = new StringBuilder(); sb.append(format("\n Writing MTZ HKL file: \"%s\"\n", fileName)); logger.info(sb.toString()); } fileOutputStream = new FileOutputStream(fileName); dataOutputStream = new DataOutputStream(fileOutputStream); byte bytes[] = new byte[80]; int offset = 0; int writeLen = 0; int iMapData; float fMapData; // Header. StringBuilder sb = new StringBuilder(); sb.append("MTZ "); dataOutputStream.writeBytes(sb.toString()); // Header offset. int headerOffset = n * nCol + 21; ByteBuffer byteBuffer = ByteBuffer.wrap(bytes); byteBuffer.order(byteOrder).putInt(headerOffset); // Machine code: double, float, int, uchar // 0x4441 for LE, 0x1111 for BE if (ByteOrder.nativeOrder().equals(ByteOrder.LITTLE_ENDIAN)) { iMapData = 0x4441; } else { iMapData = 0x1111; } byteBuffer.order(byteOrder).putInt(iMapData); dataOutputStream.write(bytes, offset, 8); sb = new StringBuilder(); sb.append(" "); sb.setLength(68); dataOutputStream.writeBytes(sb.toString()); // Data. Vector<String> colname = new Vector<>(nCol); char colType[] = new char[nCol]; double res[] = new double[2]; res[0] = Double.POSITIVE_INFINITY; res[1] = Double.NEGATIVE_INFINITY; float colMinMax[][] = new float[nCol][2]; for (int i = 0; i < nCol; i++) { colMinMax[i][0] = Float.POSITIVE_INFINITY; colMinMax[i][1] = Float.NEGATIVE_INFINITY; } ReflectionSpline sigmaASpline = new ReflectionSpline(reflectionList, refinementData.sigmaa.length); int col = 0; colname.add("H"); colType[col++] = 'H'; colname.add("K"); colType[col++] = 'H'; colname.add("L"); colType[col++] = 'H'; writeLen += 12; if (mtzType != MTZType.FCONLY) { colname.add("FO"); colType[col++] = 'F'; colname.add("SIGFO"); colType[col++] = 'Q'; colname.add("FreeR"); colType[col++] = 'I'; writeLen += 12; } if (mtzType != MTZType.DATAONLY) { colname.add("Fs"); colType[col++] = 'F'; colname.add("PHIFs"); colType[col++] = 'P'; colname.add("Fc"); colType[col++] = 'F'; colname.add("PHIFc"); colType[col++] = 'P'; writeLen += 16; } if (mtzType == MTZType.ALL) { colname.add("FOM"); colType[col++] = 'W'; colname.add("PHIW"); colType[col++] = 'P'; colname.add("SigmaAs"); colType[col++] = 'F'; colname.add("SigmaAw"); colType[col++] = 'Q'; colname.add("FWT"); colType[col++] = 'F'; colname.add("PHWT"); colType[col++] = 'P'; colname.add("DELFWT"); colType[col++] = 'F'; colname.add("PHDELWT"); colType[col++] = 'P'; writeLen += 32; } for (HKL ih : reflectionList.hkllist) { col = 0; int i = ih.index(); // Skip the 0 0 0 reflection. if (ih.h() == 0 && ih.k() == 0 && ih.l() == 0) { continue; } double ss = Crystal.invressq(crystal, ih); res[0] = min(ss, res[0]); res[1] = max(ss, res[1]); // HKL first (3) fMapData = ih.h(); colMinMax[col][0] = min(fMapData, colMinMax[0][0]); colMinMax[col][1] = max(fMapData, colMinMax[0][1]); byteBuffer.rewind(); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = ih.k(); colMinMax[col][0] = min(fMapData, colMinMax[1][0]); colMinMax[col][1] = max(fMapData, colMinMax[1][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = ih.l(); colMinMax[col][0] = min(fMapData, colMinMax[2][0]); colMinMax[col][1] = max(fMapData, colMinMax[2][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; if (mtzType != MTZType.FCONLY) { // F/sigF (2) fMapData = (float) refinementData.getF(i); if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) refinementData.getSigF(i); if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; // Free R (1) fMapData = (float) refinementData.getFreeR(i); if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; } if (mtzType == MTZType.FCONLY) { // Fs (2) fMapData = (float) refinementData.fsF(i); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) toDegrees(refinementData.fsPhi(i)); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; // Fc (unscaled!) (2) fMapData = (float) refinementData.fcF(i); if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) Math.toDegrees(refinementData.fcPhi(i)); if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; } if (mtzType == MTZType.ALL) { // Fs (2) fMapData = (float) refinementData.fsF(i); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) toDegrees(refinementData.fsPhi(i)); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; // Fctot (2) fMapData = (float) refinementData.fcTotF(i); if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) toDegrees(refinementData.fcTotPhi(i)); if (!isNaN(fMapData)) { colMinMax[col][0] = Math.min(fMapData, colMinMax[col][0]); colMinMax[col][1] = Math.max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; // FOM/phase (2) fMapData = (float) refinementData.fomphi[i][0]; if (!isNaN(fMapData)) { colMinMax[col][0] = Math.min(fMapData, colMinMax[col][0]); colMinMax[col][1] = Math.max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) toDegrees(refinementData.fomphi[i][1]); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; // Spline setup. double fh = spline.f(ss, refinementData.spline); double sa = sigmaASpline.f(ss, refinementData.sigmaa); double wa = sigmaASpline.f(ss, refinementData.sigmaw); // sigmaA/w (2) fMapData = (float) sa; if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) wa; if (!isNaN(fMapData)) { colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); } byteBuffer.order(byteOrder).putFloat(fMapData); col++; // Map coeffs (4). fMapData = (float) refinementData.FoFc2F(i); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) toDegrees(refinementData.FoFc2Phi(i)); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) refinementData.foFc1F(i); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; fMapData = (float) toDegrees(refinementData.foFc1Phi(i)); colMinMax[col][0] = min(fMapData, colMinMax[col][0]); colMinMax[col][1] = max(fMapData, colMinMax[col][1]); byteBuffer.order(byteOrder).putFloat(fMapData); col++; } dataOutputStream.write(bytes, offset, writeLen); } // Header. sb = new StringBuilder(); sb.append("VERS MTZ:V1.1 "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); Date now = new Date(); SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss "); sb = new StringBuilder(); sb.append("TITLE FFX output: " + sdf.format(now)); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append(String.format("NCOL %8d %12d %8d", nCol, n, 0)); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("SORT 0 0 0 0 0 "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); char cdata = spaceGroup.shortName.charAt(0); if (cdata == 'H') { cdata = 'R'; } sb.append(String.format("SYMINF %3d %2d %c %5d %22s %5s", spaceGroup.getNumberOfSymOps(), spaceGroup.numPrimitiveSymEquiv, cdata, spaceGroup.number, "'" + spaceGroup.shortName + "'", spaceGroup.pointGroupName)); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); for (int i = 0; i < spaceGroup.symOps.size(); i++) { sb = new StringBuilder(); sb.append("SYMM "); SymOp symop = spaceGroup.symOps.get(i); sb.append(symop.toXYZString()); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); } sb = new StringBuilder(); sb.append(String.format("RESO %8.6f%13s%8.6f", res[0], " ", res[1])); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("VALM NAN "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("NDIF 1 "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("PROJECT 1 project "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("CRYSTAL 1 crystal "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("DATASET 1 dataset "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); for (int j = 0; j < nCol; j++) { sb = new StringBuilder(); sb.append(String.format("COLUMN %-30s %c %17.4f %17.4f 1", colname.get(j), colType[j], colMinMax[j][0], colMinMax[j][1])); dataOutputStream.writeBytes(sb.toString()); } sb = new StringBuilder(); sb.append(String.format("CELL %10.4f %9.4f %9.4f %9.4f %9.4f %9.4f ", crystal.a, crystal.b, crystal.c, crystal.alpha, crystal.beta, crystal.gamma)); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append(String.format("DCELL %9d %10.4f %9.4f %9.4f %9.4f %9.4f %9.4f ", 1, crystal.a, crystal.b, crystal.c, crystal.alpha, crystal.beta, crystal.gamma)); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("DWAVEL 1 1.00000 "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("END "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); sb = new StringBuilder(); sb.append("MTZENDOFHEADERS "); while (sb.length() < 80) { sb.append(" "); } dataOutputStream.writeBytes(sb.toString()); dataOutputStream.close(); } catch (Exception e) { String message = "Fatal exception evaluating structure factors.\n"; logger.log(Level.SEVERE, message, e); } }
From source file:com.aliasi.stats.BinomialDistribution.java
/** * Returns the log (base 2) probability of the specified outcome. * The probability is determined by the likelihood of the * specified number of successes out of the number of trials for * this distribution. See the documentation for the method {@link * #probability(long)} for an exact definition. //from w ww .j a va 2s. co m * * @param outcome Number of successes. * @return Probability of specified number of successes. */ public double log2Probability(long outcome) { if (outcome < 0 || outcome > maxOutcome()) return Double.NEGATIVE_INFINITY; return log2BinomialCoefficient(mNumTrials, outcome) + (((double) outcome) * mBernoulliDistribution.log2Probability(1l)) + (((double) (mNumTrials - outcome)) * mBernoulliDistribution.log2Probability(0l)); }
From source file:com.androzic.navigation.NavigationService.java
private void clearNavigation() { navWaypoint = null;//from ww w. jav a 2s .c om prevWaypoint = null; navRoute = null; navDirection = 0; navCurrentRoutePoint = -1; navProximity = routeProximity; navDistance = 0.0; navBearing = 0.0; navTurn = 0; navVMG = 0.0; navETE = Integer.MAX_VALUE; navCourse = 0.0; navXTK = Double.NEGATIVE_INFINITY; vmgav = null; avvmg = 0.0; }
From source file:main.java.edu.mit.compbio.qrf.QRF_spark.java
public void doMain(String[] args) throws Exception { CmdLineParser parser = new CmdLineParser(this); //parser.setUsageWidth(80); try {// w w w.j av a 2 s.c o m if (help || args.length < 2) throw new CmdLineException(USAGE); parser.parseArgument(args); } catch (CmdLineException e) { System.err.println(e.getMessage()); // print the list of available options parser.printUsage(System.err); System.err.println(); return; } //read input bed file, for each row, String modelFile = arguments.get(0); String inputFile = arguments.get(1); initiate(); SparkConf sparkConf = new SparkConf().setAppName("QRF_spark"); JavaSparkContext sc = new JavaSparkContext(sparkConf); JavaRDD<LabeledPoint> inputData = sc.textFile(inputFile).map(new Function<String, LabeledPoint>() { @Override public LabeledPoint call(String line) throws Exception { String[] tmp = line.split(sep); double[] ds = new double[featureCols.size()]; for (int i = 0; i < featureCols.size(); i++) { ds[i] = Double.parseDouble(tmp[featureCols.get(i) - 1]); } return new LabeledPoint(Double.parseDouble(tmp[labelCol - 1]), Vectors.dense(ds)); } }); // Prepare training documents, which are labeled. if (train) { JavaRDD<LabeledPoint>[] splits = inputData.randomSplit(folds, seed); Map<Integer, Integer> categoricalFeaturesInfo = new HashMap<Integer, Integer>(); RandomForestModel bestModel = null; double bestR2 = Double.NEGATIVE_INFINITY; double bestMse = Double.MAX_VALUE; double mseSum = 0.0; RegressionMetrics rmBest = null; for (int i = 0; i < kFold; i++) { JavaRDD<LabeledPoint> testData = splits[i]; JavaRDD<LabeledPoint> trainingData = null; for (int j = 0; j < kFold; j++) { if (j == i) continue; if (trainingData != null) { trainingData.union(splits[j]); } else { trainingData = splits[j]; } } final RandomForestModel model = RandomForest.trainRegressor(trainingData, categoricalFeaturesInfo, numTrees, featureSubsetStrategy, impurity, maxDepth, maxBins, seed); // Evaluate model on test instances and compute test error RDD<Tuple2<Object, Object>> predictionAndLabel = testData .map(new Function<LabeledPoint, Tuple2<Object, Object>>() { @Override public Tuple2<Object, Object> call(LabeledPoint p) { return new Tuple2<Object, Object>(model.predict(p.features()), p.label()); } }).rdd(); RegressionMetrics rm = new RegressionMetrics(predictionAndLabel); double r2 = rm.r2(); mseSum += rm.meanSquaredError(); if (r2 > bestR2) { bestModel = model; rmBest = rm; bestR2 = r2; bestMse = rm.meanSquaredError(); } } log.info("After cross validation, best model's MSE is: " + bestMse + "\tMean MSE is: " + mseSum / kFold + "\tVariance explained: " + rmBest.explainedVariance() + "\tR2: " + rmBest.r2()); bestModel.save(sc.sc(), modelFile); } else { if (outputFile == null) throw new IllegalArgumentException( "Need to provide output file name in -outputFile for Non training mode !!"); //load trained model log.info("Loading model ..."); final RandomForestModel model = RandomForestModel.load(sc.sc(), modelFile); inputData.map(new Function<LabeledPoint, String>() { @Override public String call(LabeledPoint p) { double predict = model.predict(p.features()); String tmp = null; for (double s : p.features().toArray()) { if (tmp == null) { tmp = String.valueOf(s); } else { tmp = tmp + "\t" + String.valueOf(s); } } return tmp + "\t" + p.label() + "\t" + predict; } }).saveAsTextFile(outputFile + ".tmp"); log.info("Merging files ..."); File[] listOfFiles = new File(outputFile + ".tmp").listFiles(); OutputStream output = new BufferedOutputStream(new FileOutputStream(outputFile, true)); for (File f : listOfFiles) { if (f.isFile() && f.getName().startsWith("part-")) { InputStream input = new BufferedInputStream(new FileInputStream(f)); IOUtils.copy(input, output); IOUtils.closeQuietly(input); } } IOUtils.closeQuietly(output); FileUtils.deleteDirectory(new File(outputFile + ".tmp")); // Evaluate model on test instances and compute test error RDD<Tuple2<Object, Object>> predictionAndLabel = inputData .map(new Function<LabeledPoint, Tuple2<Object, Object>>() { @Override public Tuple2<Object, Object> call(LabeledPoint p) { return new Tuple2<Object, Object>(model.predict(p.features()), p.label()); } }).rdd(); RegressionMetrics rm = new RegressionMetrics(predictionAndLabel); log.info("For the test dataset, MSE is: " + rm.meanSquaredError() + "\tVariance explained: " + rm.explainedVariance() + "\tR2: " + rm.r2()); } finish(); }
From source file:ch.epfl.leb.sass.models.fluorophores.commands.internal.FluorophoreReceiverIT.java
/** * Test of generateFluorophoresGrid3D method, of class FluorophoreReceiver. */// w w w. j a v a2 s . c om @Test public void testGenerateFluorophoresGrid3D() { GenerateFluorophoresGrid3D.Builder fluorBuilder = new GenerateFluorophoresGrid3D.Builder(); fluorBuilder.spacing(4); // Number of fluorophores fluorBuilder.zLow(0); fluorBuilder.zHigh(5.0); // Create the set of fluorophores. fluorBuilder.camera(camera).psfBuilder(psfBuilder).fluorDynamics(fluorDynamics).illumination(illumination); FluorophoreCommand fluorCommand = fluorBuilder.build(); List<Fluorophore> fluorophores = fluorCommand.generateFluorophores(); assertEquals(49, fluorophores.size()); double minZ = Double.POSITIVE_INFINITY; double maxZ = Double.NEGATIVE_INFINITY; for (Fluorophore f : fluorophores) { if (f.getZ() < minZ) minZ = f.getZ(); if (f.getZ() > maxZ) maxZ = f.getZ(); } assertEquals(5.0, maxZ, 0.0001); assertEquals(0.0, minZ, 0.0001); }
From source file:net.sf.maltcms.chromaui.charts.Chromatogram1DChartProvider.java
/** * * @param fragments// ww w. j av a2 s . co m * @param ticvar * @param useRT * @return */ public XYPlot provide1DCoPlot(List<IFileFragment> fragments, String ticvar, boolean useRT) { final String satVar = "scan_acquisition_time"; DefaultXYZDataset cd = new DefaultXYZDataset(); int rowIdx = 0; double min = 0; double max = 1; double minRT = Double.POSITIVE_INFINITY; double maxRT = Double.NEGATIVE_INFINITY; int y = 0; for (IFileFragment f : fragments) { double[] domainValues = null; if (useRT) { domainValues = (double[]) f.getChild(satVar).getArray().get1DJavaArray(double.class); } else { domainValues = (double[]) f.getChild("scan_index").getArray().get1DJavaArray(double.class); } double[] tic = (double[]) f.getChild(ticvar).getArray().get1DJavaArray(double.class); double maxtic = MathTools.max(tic); double mintic = MathTools.min(tic); double[][] values = new double[3][tic.length]; for (int i = 0; i < tic.length; i++) { values[0][i] = domainValues[i]; values[1][i] = y; values[2][i] = Math.sqrt((tic[i] - mintic) / (maxtic - mintic)); } y++; cd.addSeries(f.getName(), values); } // ArrayDouble.D1 a = new ArrayDouble.D1(npoints); // int offset = 0; // for (IFileFragment f : t) { // Array tic = f.getChild(ticvar).getArray(); // int len = tic.getShape()[0]; // Array.arraycopy(tic, 0, a, offset, len); // offset += len; // } // histogram with fixed binsize // fill intensities into adequate bin, raise count in bin by one // afterwards, relative frequency within a bin gives a normalization // coefficient XYBlockRenderer xyb = new XYBlockRenderer(); GradientPaintScale ps = new GradientPaintScale(ImageTools.createSampleTable(256), min, max, ImageTools.rampToColorArray(new ColorRampReader().readColorRamp("res/colorRamps/bcgyr.csv"))); xyb.setPaintScale(ps); final String[] colnames = new String[fragments.size()]; for (int i = 0; i < colnames.length; i++) { colnames[i] = StringTools.removeFileExt(fragments.get(i).getName()); } NumberAxis na = null; if (useRT) { na = new NumberAxis("time [s]"); na.setAutoRangeIncludesZero(false); na.setLowerMargin(0); na.setUpperMargin(0); } else { na = new NumberAxis("scan index"); } // na.setVerticalTickLabels(true); XYPlot xyp = new XYPlot(cd, na, new SymbolAxis("chromatogram", colnames), xyb); xyb.setBlockWidth(1); xyp.setBackgroundPaint(Color.BLACK); xyb.setBaseToolTipGenerator(new XYZToolTipGenerator() { @Override public String generateToolTip(XYZDataset xyzd, int i, int i1) { return colnames[xyzd.getY(i, i1).intValue()] + " @" + xyzd.getXValue(i, i1) + " = " + xyzd.getZValue(i, i1); } @Override public String generateToolTip(XYDataset xyd, int i, int i1) { if (xyd instanceof XYZDataset) { return generateToolTip((XYZDataset) xyd, i, i1); } return colnames[xyd.getY(i, i1).intValue()] + ":" + xyd.getXValue(i, i1); } }); return xyp; }
From source file:edu.cmu.tetrad.search.Ling.java
private void maxMappings(final DoubleMatrix2D matrix, final double min, final double max, final double[][] W, final List<Mapping> allMappings) { final int numNodes = W.length; for (int i = 0; i < numNodes; i++) { double maxScore = Double.NEGATIVE_INFINITY; double[] maxRow = new double[numNodes]; for (Mapping mapping : mappingsForRow(i, allMappings)) { W[mapping.getI()][mapping.getJ()] = 0; }/*from ww w.j a va 2 s. co m*/ try { optimizeNonGaussianity(i, matrix, W, allMappings); // optimizeOrthogonality(i, min, max, W, allMappings, W.length); } catch (IllegalStateException e) { e.printStackTrace(); continue; } double v = ngFullData(i, matrix, W); if (Double.isNaN(v)) continue; if (v >= 9999) continue; double[] row = new double[numNodes]; for (int k = 0; k < numNodes; k++) row[k] = W[i][k]; if (v > maxScore) { maxRow = row; } for (int k = 0; k < numNodes; k++) W[i][k] = maxRow[k]; } }
From source file:edu.jhu.hlt.parma.inference.transducers.BackoffConditionalEditModel.java
public void train(AnnotatedString[] xs, AnnotatedString[] ys, double[] weights, int nrestart) { assert (xs.length == ys.length); assert (ys.length == weights.length); assert (nrestart > 0); boolean converged = false; double ll = Double.NEGATIVE_INFINITY, prevll = Double.NEGATIVE_INFINITY; int iter = 0; System.err.println("running EM..."); while (!converged && iter < MIN_EM_ITER) { ll = em_step(xs, ys, weights);//www . j ava 2s . com System.err.println("Iter " + iter + ": LL=" + ll); if (1.0 - (ll / prevll) < 0.0001) { converged = true; } prevll = ll; iter++; if (iter > MAX_EM_ITER) break; } }
From source file:net.sf.click.jquery.examples.util.JQInlineValidationHelper.java
public void renderValue(HtmlStringBuffer buffer, double min, double max) { if (min == Double.NEGATIVE_INFINITY && max == Double.POSITIVE_INFINITY) { return;/*w ww . ja v a 2 s . com*/ } if (buffer.length() > 0) { buffer.append(","); } else { buffer.append("validate["); } if (min != Double.NEGATIVE_INFINITY) { buffer.append("minvalue[").append(min).append("]"); } if (max != Double.POSITIVE_INFINITY) { buffer.append("maxvalue[").append(max).append("]"); } }
From source file:com.rapidminer.operator.preprocessing.NoiseOperator.java
@Override public List<ParameterType> getParameterTypes() { List<ParameterType> types = super.getParameterTypes(); ParameterType type = new ParameterTypeInt(PARAMETER_RANDOM_ATTRIBUTES, "Adds this number of random attributes.", 0, Integer.MAX_VALUE, 0); type.setExpert(false);/* www .j av a2 s.c o m*/ types.add(type); type = new ParameterTypeDouble(PARAMETER_LABEL_NOISE, "Add this percentage of a numerical label range as a normal distributed noise or probability for a nominal label change.", 0.0d, Double.POSITIVE_INFINITY, 0.05d); type.setExpert(false); types.add(type); types.add(new ParameterTypeDouble(PARAMETER_DEFAULT_ATTRIBUTE_NOISE, "The standard deviation of the default attribute noise.", 0.0d, Double.POSITIVE_INFINITY, 0.0d)); types.add(new ParameterTypeList(PARAMETER_NOISE, "List of noises for each attributes.", new ParameterTypeAttribute("attribute", "To this attribute noise is added.", getExampleSetInputPort()), new ParameterTypeDouble(PARAMETER_NOISE, "The strength of gaussian noise, which is added to this attribute.", 0.0d, Double.POSITIVE_INFINITY, 0.05d))); type = new ParameterTypeDouble(PARAMETER_OFFSET, "Offset added to the values of each random attribute", Double.NEGATIVE_INFINITY, Double.POSITIVE_INFINITY, 0.0d); types.add(type); type = new ParameterTypeDouble(PARAMETER_LINEAR_FACTOR, "Linear factor multiplicated with the values of each random attribute", 0.0d, Double.POSITIVE_INFINITY, 1.0d); types.add(type); types.addAll(RandomGenerator.getRandomGeneratorParameters(this)); return types; }