List of usage examples for java.lang Math floor
public static double floor(double a)
From source file:com.qtplaf.library.util.NumberUtils.java
/** * Returns the floor number to the given decimal places. The decimal places can be negative. * //from w w w .j a va 2s . co m * @param number The source number. * @param decimals The number of decimal places. * @return The floor. */ public static double floor(double number, int decimals) { double pow = number * Math.pow(10, decimals); double floor = Math.floor(pow); double value = floor / Math.pow(10, decimals); return value; }
From source file:marytts.util.string.StringUtils.java
public static String[] indexedNameGenerator(String preName, int numFiles, int startIndex, String postName, String extension) {// w w w.j a va 2s.com int numDigits = 0; if (numFiles > 0) numDigits = (int) Math.floor(Math.log10(startIndex + numFiles - 1)); return indexedNameGenerator(preName, numFiles, startIndex, postName, extension, numDigits); }
From source file:org.adempiere.apps.graph.PerformanceIndicator.java
private JFreeChart createChart() { JFreeChart chart = null;/* ww w . ja v a 2 s. c o m*/ // Set Text StringBuffer text = new StringBuffer(m_goal.getName()); if (m_goal.isTarget()) text.append(": ").append(m_goal.getPercent()).append("%"); else text.append(": ").append(s_format.format(m_goal.getMeasureActual())); m_text = text.toString(); // ToolTip text = new StringBuffer(); if (m_goal.getDescription() != null) text.append(m_goal.getDescription()).append(": "); text.append(s_format.format(m_goal.getMeasureActual())); if (m_goal.isTarget()) text.append(" ").append(Msg.getMsg(Env.getCtx(), "of")).append(" ") .append(s_format.format(m_goal.getMeasureTarget())); setToolTipText(text.toString()); // //setBackground(m_goal.getColor()); setForeground(GraphUtil.getForeground(getBackground())); // Performance Line int percent = m_goal.getPercent(); if (percent > 100) // draw 100% line m_line = s_width100; else // draw Performance Line m_line = s_width100 * m_goal.getGoalPerformanceDouble(); String title = m_text; DefaultValueDataset data = new DefaultValueDataset((float) m_goal.getPercent()); MeterPlot plot = new MeterPlot(data); MColorSchema colorSchema = m_goal.getColorSchema(); int rangeLo = 0; int rangeHi = 0; for (int i = 1; i <= 4; i++) { switch (i) { case 1: rangeHi = colorSchema.getMark1Percent(); break; case 2: rangeHi = colorSchema.getMark2Percent(); break; case 3: rangeHi = colorSchema.getMark3Percent(); break; case 4: rangeHi = colorSchema.getMark4Percent(); break; } if (rangeHi == 9999) rangeHi = (int) Math.floor(rangeLo * 1.5); if (rangeLo < rangeHi) { plot.addInterval(new MeterInterval("Normal", //label new Range(rangeLo, rangeHi), //range colorSchema.getColor(rangeHi), new BasicStroke(7.0f), //Color.lightGray new Color(-13091716) //Color.gray )); rangeLo = rangeHi; } } plot.setRange(new Range(0, rangeLo)); plot.setDialBackgroundPaint(new Color(-13091716));//Color.GRAY); plot.setUnits(m_goal.getName()); plot.setDialShape(DialShape.CHORD);//CIRCLE); //plot.setDialBackgroundPaint(new GradientPaint(0, 0, m_goal.getColor(), 0, 1000, Color.black)); plot.setNeedlePaint(Color.white); plot.setTickSize(2000); plot.setTickLabelFont(new Font("SansSerif", Font.BOLD, 12)); plot.setTickLabelPaint(Color.white); plot.setInsets(new RectangleInsets(1.0, 2.0, 3.0, 4.0)); chart = new JFreeChart(m_text, new Font("SansSerif", Font.BOLD, 15), plot, false); return chart; }
From source file:de.codesourcery.planning.swing.DateAxis.java
public BoundingBox getBoundingBoxFor(ITimelineCallback callback, DateRange range) { final Date start; if (range.getStartDate().before(startDate)) { start = startDate;/* w w w. j a v a 2 s.c o m*/ } else { start = range.getStartDate(); } final Date end; final Date endDate = duration.addTo(startDate); if (range.getEndDate().after(endDate)) { end = endDate; } else { end = range.getEndDate(); } final Duration realDuration = new Duration(start, end); final BoundingBox box = callback.getBoundingBox(); final long startSeconds = (long) Math.floor((start.getTime() - this.startDate.getTime()) / 1000.0f); final double scalingFactor = getXScalingFactor(box); final int startX = (int) (box.getX() + Math.floor(scalingFactor * startSeconds)); final int endX = (int) (startX + Math.floor(scalingFactor * realDuration.toSeconds())); return new BoundingBox(startX, 0, endX, 0); }
From source file:de.dfki.madm.anomalydetection.operator.statistical_based.RobustPCAOperator.java
@Override public void doWork() throws OperatorException { // check whether all attributes are numerical ExampleSet exampleSet = exampleSetInput.getData(ExampleSet.class); Tools.onlyNonMissingValues(exampleSet, "PCA"); Tools.onlyNumericalAttributes(exampleSet, "PCA"); // Get normal probability. double normProb = getParameterAsDouble(PARAMETER_OUTLIER_PROBABILITY); int olInst = exampleSet.size() - (int) Math.floor(exampleSet.size() * normProb); log("Ignoring " + olInst + " anomalyous instances for robustness."); // The robust estimate is based on removing top outliers first based on Mahalanobis distance (MD). // Since MD is the same as the outlier score when using all PCs, the PCA is done twice: // First with all examples, second with top-outliers removed (robust) // First PCA for outlier removal // create covariance matrix Matrix covarianceMatrix = CovarianceMatrix.getCovarianceMatrix(exampleSet); // EigenVector and EigenValues of the covariance matrix EigenvalueDecomposition eigenvalueDecomposition = covarianceMatrix.eig(); // create and deliver results double[] eigenvalues = eigenvalueDecomposition.getRealEigenvalues(); Matrix eigenvectorMatrix = eigenvalueDecomposition.getV(); double[][] eigenvectors = eigenvectorMatrix.getArray(); PCAModel model = new PCAModel(exampleSet, eigenvalues, eigenvectors); // Perform transformation ExampleSet res = model.apply((ExampleSet) exampleSet.clone()); // Compute simple list with MDs and sort according to MD. List<double[]> l = new LinkedList<double[]>(); double eIdx = 0; for (Example example : res) { double md = 0.0; int aNr = 0; for (Attribute attr : example.getAttributes()) { double pcscore = example.getValue(attr); md += (pcscore * pcscore) / model.getEigenvalue(aNr); aNr++;/*from w w w.ja v a2 s. co m*/ } double[] x = { md, eIdx }; l.add(x); eIdx++; } Collections.sort(l, new Comparator<double[]>() { public int compare(double[] first, double[] second) { return Double.compare(second[0], first[0]); } }); // Out of the list, create array with outlier-indexes and array (mapping) with good instances. Iterator<double[]> iter = l.iterator(); int[] olMapping = new int[olInst]; for (int i = 0; i < olInst; i++) { olMapping[i] = (int) ((double[]) iter.next())[1]; } Arrays.sort(olMapping); int[] mapping = new int[exampleSet.size() - olInst]; int olc = 0; int ctr = 0; for (int i = 0; i < exampleSet.size(); i++) { if (olc == olInst) { // Add last elements after last outlier mapping[ctr++] = i; continue; } if (olMapping[olc] != i) { mapping[ctr++] = i; } else { olc++; } } ExampleSet robustExampleSet = new MappedExampleSet(exampleSet, mapping); // creates a new example set without the top outliers. // --- // Second PCA (robust) covarianceMatrix = CovarianceMatrix.getCovarianceMatrix(robustExampleSet); eigenvalueDecomposition = covarianceMatrix.eig(); // create and deliver results eigenvalues = eigenvalueDecomposition.getRealEigenvalues(); eigenvectorMatrix = eigenvalueDecomposition.getV(); eigenvectors = eigenvectorMatrix.getArray(); // Apply on original set model = new PCAModel(exampleSet, eigenvalues, eigenvectors); // Perform transformation res = model.apply((ExampleSet) exampleSet.clone()); // Sort eigenvalues Arrays.sort(eigenvalues); ArrayUtils.reverse(eigenvalues); // if necessary reduce nbr of dimensions ... int reductionType = getParameterAsInt(PARAMETER_REDUCTION_TYPE); List<Integer> pcList = new ArrayList<Integer>(); if (reductionType == PCS_ALL) { for (int i = 0; i < exampleSet.getAttributes().size(); i++) { pcList.add(i); } } if (reductionType == PCS_TOP || reductionType == PCS_BOTH) { //top switch (getParameterAsInt(PARAMETER_TOP_METHODS)) { case PCS_TOP_FIX: for (int i = 0; i < getParameterAsInt(PARAMETER_NUMBER_OF_COMPONENTS_TOP); i++) { pcList.add(i); } break; case PCS_TOP_VAR: double var = getParameterAsDouble(PARAMETER_VARIANCE_THRESHOLD); boolean last = false; for (int i = 0; i < exampleSet.getAttributes().size(); i++) { if (model.getCumulativeVariance(i) < var) { pcList.add(i); } else if (!last) { // we need to add another PC to meet the minimum requirement. last = true; pcList.add(i); } } break; } } if (reductionType == PCS_LOWER || reductionType == PCS_BOTH) { //lower switch (getParameterAsInt(PARAMETER_LOW_METHODS)) { case PCS_LOW_FIX: for (int i = exampleSet.getAttributes().size() - getParameterAsInt(PARAMETER_NUMBER_OF_COMPONENTS_LOW); i < exampleSet.getAttributes() .size(); i++) { pcList.add(i); } break; case PCS_LOW_VAL: double val = getParameterAsDouble(PARAMETER_VALUE_THRESHOLD); for (int i = 0; i < eigenvalues.length; i++) { if (eigenvalues[i] <= val) { if (pcList.size() == 0) { pcList.add(i); } else if (pcList.get(pcList.size() - 1).intValue() < i) { pcList.add(i); } } } break; } } int[] opcs = ArrayUtils.toPrimitive(pcList.toArray(new Integer[pcList.size()])); if (opcs.length == 0) { throw new UserError(this, "Parameters thresholds are selected such that they did not match any principal component. Lower variance or increase eigenvalue threshold."); } if (opcs.length == exampleSet.getAttributes().size()) { log("Using all PCs for score."); } else { log("Using following PCs for score: " + Arrays.toString(opcs)); } // Normalize by Chi-Dist with d degrees of freedom double scoreNormalizer = 1.0; ChiSquaredDistributionImpl chi = new ChiSquaredDistributionImpl(opcs.length); try { scoreNormalizer = chi.inverseCumulativeProbability(normProb); } catch (MathException e) { System.err.println(e); } log("Normalizing score with chi cumulative propability: " + scoreNormalizer); // compute scores Attribute scoreAttr = AttributeFactory.createAttribute("outlier", Ontology.REAL); exampleSet.getExampleTable().addAttribute(scoreAttr); exampleSet.getAttributes().setOutlier(scoreAttr); for (int exNr = 0; exNr < exampleSet.size(); exNr++) { Example orig = exampleSet.getExample(exNr); Example pc = res.getExample(exNr); double oscore = 0.0; int aNr = 0; ctr = 0; for (Attribute attr : pc.getAttributes()) { if (ctr < opcs.length && opcs[ctr] != aNr) { // we skip this dimension aNr++; continue; } double pcscore = pc.getValue(attr); oscore += (pcscore * pcscore) / model.getEigenvalue(aNr); aNr++; ctr++; } orig.setValue(scoreAttr, oscore / scoreNormalizer); } exampleSetOutput.deliver(exampleSet); }
From source file:naftoreiclag.villagefive.util.math.Vec2.java
public Vec2 floorLocal() { this.x = Math.floor(this.x); this.y = Math.floor(this.y); return this; }
From source file:edu.stanford.rsl.conrad.segmentation.HistogramFeatureExtractor.java
@Override public double[] extractFeatureAtIndex(int x, int y) { double[] result; if (this.dataGrid instanceof MultiChannelGrid2D) { MultiChannelGrid2D multiChannelGrid = (MultiChannelGrid2D) this.dataGrid; result = new double[(HistoFeatures * multiChannelGrid.getNumberOfChannels() + 1)]; for (int c = 0; c < multiChannelGrid.getNumberOfChannels(); c++) { double[] histo = new double[numberOfBins + 1]; for (int j = y - patchSize; j <= y + patchSize; j++) { if (j < 0) continue; if (j >= this.dataGrid.getSize()[1]) continue; for (int i = x - patchSize; i <= x + patchSize; i++) { if (i < 0) continue; if (i >= this.dataGrid.getSize()[0]) continue; float cellLength = (max - min) / (numberOfBins); // float value = dataGrid.getAtIndex(i, j); double value = multiChannelGrid.getChannel(c).getAtIndex(i, j); int coord = (int) Math.floor((value - min) / cellLength); if (coord > numberOfBins) { coord = numberOfBins; }//from w w w. j a v a 2s .c o m if (coord < 0) coord = 0; histo[coord]++; } } // histo[numberOfBins] = labelGrid.getAtIndex(x, y); double[] singleVector = extractHistogramFeatures(histo); for (int k = 0; k < HistoFeatures; k++) { result[(c * HistoFeatures) + k] = singleVector[k]; } } } else { result = new double[(HistoFeatures + 1)]; double[] histo = new double[numberOfBins + 1]; for (int j = y - patchSize; j <= y + patchSize; j++) { if (j < 0) continue; if (j >= this.dataGrid.getSize()[1]) continue; for (int i = x - patchSize; i <= x + patchSize; i++) { if (i < 0) continue; if (i >= this.dataGrid.getSize()[0]) continue; float cellLength = (max - min) / (numberOfBins); float value = dataGrid.getAtIndex(i, j); int coord = (int) Math.floor((value - min) / cellLength); if (coord > numberOfBins) { coord = numberOfBins; } if (coord < 0) coord = 0; histo[coord]++; } } result = extractHistogramFeatures(histo); } result[result.length - 1] = labelGrid.getAtIndex(x, y); return result; }
From source file:P251.graphPanel.java
/** Bins and adds to the data series named name. if a data series with that name already exists, if no series with name exists a new data series is created. Binning is useful if/*from w w w . j a va2s . c om*/ there are many redundant points in the data. The data is binned between [bottom,top). Data out side of this range are discarded. Plots points at the center of bins that non-zero counts. @param x the x coordinate of all of the data @param data the x-coordinate data to be binned @param name name of the data series @param bottom the minimum value of the data to be binned, any data less than bottom is discarded @param top the maximum value of the data to be binned, any value equal to or greater than this value will be discarded @param binCount the number of bins to use, more bins gives finer resolution @see #addDataBinning( double [] data, double y, String name,double bottom, double top,int binCount) */ public void addDataBinning(double x, double[] data, String name, double bottom, double top, int binCount) { XYSeries tmp_series; try { tmp_series = dataset.getSeries(name); } catch (org.jfree.data.UnknownKeyException e) { tmp_series = new XYSeries(name); dataset.addSeries(tmp_series); } // add a catch here to deal with if the name is wrong // make histogram of values int hist[] = new int[binCount]; for (double val : data) { // watch the logic here, protecting bounds if (val >= bottom && val < top) { ++hist[(int) Math.floor(binCount * val)]; } } // pick out non-zero entries,add those entries to series for (int j = 0; j < binCount; ++j) { if (hist[j] > 0) { tmp_series.add(x, ((double) j) / binCount + (top - bottom) / (2 * binCount), false); } } tmp_series.fireSeriesChanged(); }
From source file:ark.data.annotation.DataSet.java
/** * /* w w w.j a v a 2s . co m*/ * @param distribution * @param random * @return a random partition of the dataset with whose sets have sizes given * by the distribution * */ public List<DataSet<D, L>> makePartition(double[] distribution, Random random) { List<Integer> dataPermutation = constructRandomDataPermutation(random); List<DataSet<D, L>> partition = new ArrayList<DataSet<D, L>>(distribution.length); int offset = 0; for (int i = 0; i < distribution.length; i++) { int partSize = (int) Math.floor(this.data.size() * distribution[i]); if (i == distribution.length - 1 && offset + partSize < this.data.size()) partSize = this.data.size() - offset; DataSet<D, L> part = new DataSet<D, L>(this.datumTools, this.labelMapping); for (int j = offset; j < offset + partSize; j++) { part.add(this.data.get(dataPermutation.get(j))); } offset += partSize; partition.add(part); } return partition; }
From source file:dk.dma.msinm.web.OsmStaticMap.java
public BufferedImage createBaseMap(MapImageCtx ctx) { BufferedImage image = new BufferedImage(ctx.width, ctx.height, BufferedImage.TYPE_INT_RGB); Graphics2D g2 = image.createGraphics(); int startX = (int) Math.floor(ctx.centerX - (ctx.width / ctx.tileSize) / 2.0); int startY = (int) Math.floor(ctx.centerY - (ctx.height / ctx.tileSize) / 2.0); int endX = (int) Math.ceil(ctx.centerX + (ctx.width / ctx.tileSize) / 2.0); int endY = (int) Math.ceil(ctx.centerY + (ctx.height / ctx.tileSize) / 2.0); ctx.offsetX = -Math.floor((ctx.centerX - Math.floor(ctx.centerX)) * ctx.tileSize); ctx.offsetY = -Math.floor((ctx.centerY - Math.floor(ctx.centerY)) * ctx.tileSize); ctx.offsetX += Math.floor(ctx.width / 2.0); ctx.offsetY += Math.floor(ctx.height / 2.0); ctx.offsetX += Math.floor(startX - Math.floor(ctx.centerX)) * ctx.tileSize; ctx.offsetY += Math.floor(startY - Math.floor(ctx.centerY)) * ctx.tileSize; for (int x = startX; x <= endX; x++) { for (int y = startY; y <= endY; y++) { String url = String.format(OSM_URL, ctx.zoom, x, y); log.info("Fetching " + url); try { BufferedImage tileImage = fetchTile(url); double destX = (x - startX) * ctx.tileSize + ctx.offsetX; double destY = (y - startY) * ctx.tileSize + ctx.offsetY; g2.drawImage(tileImage, (int) destX, (int) destY, ctx.tileSize, ctx.tileSize, null); image.flush();/*from w ww . j a v a 2s. co m*/ } catch (Exception e) { log.warn("Failed loading image " + url); } } } return image; }