List of usage examples for java.lang Double NEGATIVE_INFINITY
double NEGATIVE_INFINITY
To view the source code for java.lang Double NEGATIVE_INFINITY.
Click Source Link
From source file:com.rapidminer.gui.plotter.charts.Abstract2DChartPlotter.java
private void prepareNominalData() { this.nominal = true; dataSet = new DefaultXYDataset(); if (axis[X_AXIS] >= 0 && axis[Y_AXIS] >= 0) { Map<String, List<double[]>> dataCollection = new LinkedHashMap<String, List<double[]>>(); Map<String, List<String>> idCollection = new LinkedHashMap<String, List<String>>(); synchronized (dataTable) { if (colorColumn >= 0) { for (int v = 0; v < dataTable.getNumberOfValues(colorColumn); v++) { dataCollection.put(dataTable.mapIndex(colorColumn, v), new LinkedList<double[]>()); idCollection.put(dataTable.mapIndex(colorColumn, v), new LinkedList<String>()); }//from ww w .ja va2 s. c om } Iterator<DataTableRow> i = this.dataTable.iterator(); int index = 0; while (i.hasNext()) { DataTableRow row = i.next(); double xValue = row.getValue(axis[X_AXIS]); double yValue = row.getValue(axis[Y_AXIS]); double colorValue = Double.NaN; if (colorColumn >= 0) { colorValue = row.getValue(colorColumn); } // TM: removed check // if (!Double.isNaN(xValue) && !Double.isNaN(yValue)) { addPoint(dataCollection, idCollection, row.getId(), xValue, yValue, colorValue); // } index++; } } double minX = Double.POSITIVE_INFINITY; double maxX = Double.NEGATIVE_INFINITY; double minY = Double.POSITIVE_INFINITY; double maxY = Double.NEGATIVE_INFINITY; Iterator<Map.Entry<String, List<double[]>>> i = dataCollection.entrySet().iterator(); while (i.hasNext()) { Map.Entry<String, List<double[]>> entry = i.next(); List<double[]> dataList = entry.getValue(); Iterator<double[]> j = dataList.iterator(); while (j.hasNext()) { double[] current = j.next(); minX = MathFunctions.robustMin(minX, current[X_AXIS]); maxX = MathFunctions.robustMax(maxX, current[X_AXIS]); minY = MathFunctions.robustMin(minY, current[Y_AXIS]); maxY = MathFunctions.robustMax(maxY, current[Y_AXIS]); } } Random jitterRandom = new Random(2001); double oldXRange = maxX - minX; double oldYRange = maxY - minY; if (Double.isInfinite(oldXRange) || Double.isNaN(oldXRange)) { oldXRange = 0; } if (Double.isInfinite(oldYRange) || Double.isNaN(oldYRange)) { oldYRange = 0; } i = dataCollection.entrySet().iterator(); while (i.hasNext()) { Map.Entry<String, List<double[]>> entry = i.next(); String seriesName = entry.getKey(); List<double[]> dataList = entry.getValue(); double[][] data = new double[2][dataList.size()]; int listCounter = 0; Iterator<double[]> j = dataList.iterator(); while (j.hasNext()) { double[] current = j.next(); data[X_AXIS][listCounter] = current[X_AXIS]; data[Y_AXIS][listCounter] = current[Y_AXIS]; if (this.jitterAmount > 0) { double pertX = oldXRange * (jitterAmount / 200.0d) * jitterRandom.nextGaussian(); double pertY = oldYRange * (jitterAmount / 200.0d) * jitterRandom.nextGaussian(); data[X_AXIS][listCounter] += pertX; data[Y_AXIS][listCounter] += pertY; } listCounter++; } ((DefaultXYDataset) dataSet).addSeries(seriesName, data); } int seriesCounter = 0; Iterator<List<String>> v = idCollection.values().iterator(); while (v.hasNext()) { List<String> idList = v.next(); int itemCounter = 0; Iterator<String> j = idList.iterator(); while (j.hasNext()) { idMap.put(new SeriesAndItem(seriesCounter, itemCounter++), j.next()); } seriesCounter++; } } }
From source file:org.apache.axis2.databinding.utils.ConverterUtil.java
public static double convertToDouble(String s) { if ((s == null) || s.equals("")) { return Double.NaN; }/*from ww w . ja v a 2s. co m*/ if (s.startsWith("+")) { s = s.substring(1); } if (POSITIVE_INFINITY.equals(s)) { return Double.POSITIVE_INFINITY; } else if (NEGATIVE_INFINITY.equals(s)) { return Double.NEGATIVE_INFINITY; } return Double.parseDouble(s); }
From source file:darks.learning.word2vec.Word2Vec.java
/** * Calculate similar between two words lists with weights by statistic way * /* w w w.jav a 2s . co m*/ * @param sources Source words list * @param sourceWeights Source words weight * @param targets Target words list * @param targetWeights Target words weight * @return Similar score */ private double distanceStatistic(Collection<String> sources, Map<String, Double> sourceWeights, Collection<String> targets, Map<String, Double> targetWeights) { double m = sources.size(); double n = targets.size(); DoubleMatrix similarMatrix = DoubleMatrix.zeros(sources.size(), targets.size()); double sum1 = 0; int i = 0; int j = 0; for (String w1 : sources) { j = 0; double max = Double.NEGATIVE_INFINITY; for (String w2 : targets) { double s = similarMatrix.get(i, j); if (Double.compare(s, 0) == 0) { s = distance(w1, w2); similarMatrix.put(i, j, s); } max = Math.max(max, s); j++; } Double weight = sourceWeights == null ? null : sourceWeights.get(w1); if (weight != null) { m += weight - 1; max *= weight; } sum1 += max; i++; } double v1 = sum1 / m; double sum2 = 0; i = 0; j = 0; for (String w2 : targets) { i = 0; double max = Double.NEGATIVE_INFINITY; for (String w1 : sources) { double s = similarMatrix.get(i, j); if (Double.compare(s, 0) == 0) { s = distance(w1, w2); similarMatrix.put(i, j, s); } max = Math.max(max, s); i++; } Double weight = targetWeights == null ? null : targetWeights.get(w2); if (weight != null) { n += weight - 1; max *= weight; } sum2 += max; j++; } double v2 = sum2 / (double) n; return (v1 + v2) / 2.D; }
From source file:io.github.karols.hocr4j.Page.java
@Nullable private Line findLineMaximizingImpl(@Nonnull Function<Line, Double> scoreFunction, @Nullable Bounded header, boolean slightlyBelow) { Line result = null;//from ww w . jav a2s. c o m double maxScore = Double.NEGATIVE_INFINITY; for (Area a : areas) { for (Paragraph p : a) { for (Line l : p) { Double thisScore = scoreFunction.apply(l); if (thisScore == null || thisScore < 0) { continue; } if (header != null) { thisScore += 0.1; double multiplier = header.getBounds().distance(l.bounds) + bounds.getHeight() / 10.0; multiplier = Math.sqrt(multiplier); multiplier = Math.sqrt(multiplier); thisScore /= multiplier; if (slightlyBelow && header.getBounds().isBelow(l.bounds)) { thisScore /= 2; } } if (thisScore > maxScore) { result = l; maxScore = thisScore; } } } } return result; }
From source file:org.jax.pubarray.server.restful.GraphingResource.java
/** * Create a graph for the given configuration * @param graphConfiguration//from w w w. j av a2s . c om * the key * @return * the graph */ @SuppressWarnings("unchecked") private JFreeChart createProbeIntensityGraph(ProbeIntensityGraphConfiguration graphConfiguration) { try { Connection connection = this.getConnection(); String[] probeIds = graphConfiguration.getProbeIds(); double[][] probeDataRows = new double[probeIds.length][]; for (int i = 0; i < probeIds.length; i++) { probeDataRows[i] = this.persistenceManager.getDataRowForProbeID(connection, probeIds[i]); } TableColumnMetadata orderBy = graphConfiguration.getOrderProbesBy(); final List<Comparable> orderByItems; if (orderBy != null) { LOG.info("We are ordering by: " + orderBy); orderByItems = this.persistenceManager.getDesignDataColumn(connection, orderBy); } else { orderByItems = null; } TableMetadata metadata = this.persistenceManager.getDataTableMetadata(connection); final CategoryDataset categoryDataset; if (graphConfiguration.getGroupReplicates()) { switch (graphConfiguration.getGroupedGraphType()) { case BOX_PLOT: { categoryDataset = new DefaultBoxAndWhiskerCategoryDataset(); } break; case SCATTER_PLOT: { categoryDataset = new DefaultMultiValueCategoryDataset(); } break; default: throw new IllegalArgumentException( "don't know how to deal with plot type: " + graphConfiguration.getGroupedGraphType()); } } else { categoryDataset = new DefaultCategoryDataset(); } // iterate through all of the selected probesets List<QualifiedColumnMetadata> termsOfInterest = Arrays.asList(graphConfiguration.getTermsOfInterest()); for (int rowIndex = 0; rowIndex < probeDataRows.length; rowIndex++) { double[] currRow = probeDataRows[rowIndex]; assert currRow.length == metadata.getColumnMetadata().length - 1; // should we log2 transform the data? if (graphConfiguration.getLog2TransformData()) { for (int i = 0; i < currRow.length; i++) { currRow[i] = Math.log(currRow[i]) / LOG2_FACTOR; } } // iterate through the columns in the data table (each column // represents a different array) List<ComparableContainer<Double, Comparable>> rowElemList = new ArrayList<ComparableContainer<Double, Comparable>>(); for (int colIndex = 0; colIndex < currRow.length; colIndex++) { // we use +1 indexing here because we want to skip over // the probesetId metadata and get right to the // array intensity metadata TableColumnMetadata colMeta = metadata.getColumnMetadata()[colIndex + 1]; // check to see if we need to skip this data if (!graphConfiguration.getIncludeDataFromAllArrays()) { // if it's one of the "terms of interest" we will keep // it. we're using a brute force search here boolean keepThisOne = false; for (QualifiedColumnMetadata termOfInterest : termsOfInterest) { if (termOfInterest.getTableName().equals(metadata.getTableName()) && termOfInterest.getName().equals(colMeta.getName())) { keepThisOne = true; break; } } if (!keepThisOne) { continue; } } final String columnName = colMeta.getName(); final Comparable columnKey; if (orderByItems == null) { columnKey = columnName; } else { // the ordering will be done on the selected // "order by" design criteria columnKey = new ComparableContainer<String, Comparable>(columnName, orderByItems.get(colIndex), !graphConfiguration.getGroupReplicates()); // TODO remove me!!!! System.out.println("For array " + columnName + " the order by " + "value is: " + orderByItems.get(colIndex)); // end of remove me } rowElemList .add(new ComparableContainer<Double, Comparable>(currRow[colIndex], columnKey, false)); } Collections.sort(rowElemList); if (graphConfiguration.getGroupReplicates()) { switch (graphConfiguration.getGroupedGraphType()) { case BOX_PLOT: { DefaultBoxAndWhiskerCategoryDataset dataset = (DefaultBoxAndWhiskerCategoryDataset) categoryDataset; for (int i = 0; i < rowElemList.size(); i++) { List<Double> groupList = new ArrayList<Double>(); groupList.add(rowElemList.get(i).getElement()); Comparable colKey = rowElemList.get(i).getComparable(); i++; for (; i < rowElemList.size() && rowElemList.get(i).getComparable().equals(colKey); i++) { groupList.add(rowElemList.get(i).getElement()); } i--; dataset.add(groupList, probeIds[rowIndex], colKey); } } break; case SCATTER_PLOT: { DefaultMultiValueCategoryDataset dataset = (DefaultMultiValueCategoryDataset) categoryDataset; for (int i = 0; i < rowElemList.size(); i++) { List<Double> groupList = new ArrayList<Double>(); groupList.add(rowElemList.get(i).getElement()); Comparable colKey = rowElemList.get(i).getComparable(); i++; for (; i < rowElemList.size() && rowElemList.get(i).getComparable().equals(colKey); i++) { groupList.add(rowElemList.get(i).getElement()); } i--; dataset.add(groupList, probeIds[rowIndex], colKey); } } break; } } else { DefaultCategoryDataset dataset = (DefaultCategoryDataset) categoryDataset; for (ComparableContainer<Double, Comparable> rowElem : rowElemList) { dataset.addValue(rowElem.getElement(), probeIds[rowIndex], rowElem.getComparable()); } } } CategoryAxis xAxis = new CategoryAxis(); if (graphConfiguration.getGroupReplicates() && orderBy != null) { xAxis.setLabel(orderBy.getName()); } else { if (orderBy != null) { xAxis.setLabel("Arrays (Ordered By " + orderBy.getName() + ")"); } else { xAxis.setLabel("Arrays"); } } xAxis.setCategoryLabelPositions(CategoryLabelPositions.UP_90); final NumberAxis yAxis; if (graphConfiguration.getLog2TransformData()) { yAxis = new NumberAxis("log2(Intensity)"); } else { yAxis = new NumberAxis("Intensity"); } yAxis.setAutoRange(true); yAxis.setAutoRangeIncludesZero(false); // TODO: this is a HACK to deal with auto-range bug in JFreeChart // which occurs when doing the grouped scatter plot if (graphConfiguration.getGroupReplicates() && graphConfiguration.getGroupedGraphType() == GroupedGraphType.SCATTER_PLOT) { double minVal = Double.POSITIVE_INFINITY; double maxVal = Double.NEGATIVE_INFINITY; for (double[] dataRow : probeDataRows) { for (double datum : dataRow) { if (datum > maxVal) { maxVal = datum; } if (datum < minVal) { minVal = datum; } } if (minVal != Double.POSITIVE_INFINITY && maxVal != Double.NEGATIVE_INFINITY && minVal != maxVal) { yAxis.setAutoRange(false); double margin = (maxVal - minVal) * 0.02; Range yRange = new Range(minVal - margin, maxVal + margin); yAxis.setRange(yRange); } } } // END HACK final CategoryItemRenderer renderer; if (graphConfiguration.getGroupReplicates()) { switch (graphConfiguration.getGroupedGraphType()) { case BOX_PLOT: { BoxAndWhiskerRenderer boxRenderer = new BoxAndWhiskerRenderer(); boxRenderer.setMaximumBarWidth(0.03); renderer = boxRenderer; } break; case SCATTER_PLOT: { renderer = new ScatterRenderer(); } break; default: throw new IllegalArgumentException( "don't know how to deal with plot type: " + graphConfiguration.getGroupedGraphType()); } } else { renderer = new LineAndShapeRenderer(); } Plot plot = new CategoryPlot(categoryDataset, xAxis, yAxis, renderer); return new JFreeChart("Intensity Values", plot); } catch (SQLException ex) { LOG.log(Level.SEVERE, "failed to generate image", ex); return null; } }
From source file:com.bigml.histogram.Histogram.java
/** * Returns a <code>SumResult</code> object which contains the * density estimate at the point <code>p</code> along * with the density for the targets./*from w ww. j a va2 s .c o m*/ * * @param p the density estimate point */ @SuppressWarnings("unchecked") public SumResult<T> extendedDensity(double p) { T emptyTarget = (T) _bins.first().getTarget().init(); double countDensity; T targetDensity; Bin<T> exact = _bins.get(p); if (p < _minimum || p > _maximum) { countDensity = 0; targetDensity = (T) emptyTarget.clone(); } else if (p == _minimum && p == _maximum) { countDensity = Double.POSITIVE_INFINITY; targetDensity = emptyTarget; } else if (exact != null) { double higher = Math.nextAfter(p, Double.POSITIVE_INFINITY); double lower = Math.nextAfter(p, Double.NEGATIVE_INFINITY); SumResult<T> lowerResult = extendedDensity(lower); SumResult<T> higherResult = extendedDensity(higher); countDensity = (lowerResult.getCount() + higherResult.getCount()) / 2; targetDensity = (T) lowerResult.getTargetSum().clone().sum(higherResult.getTargetSum()).mult(0.5); } else { Bin<T> lowerBin = _bins.lower(p); if (lowerBin == null) { lowerBin = new Bin<T>(_minimum, 0, (T) emptyTarget.clone()); } Bin<T> higherBin = _bins.higher(p); if (higherBin == null) { higherBin = new Bin<T>(_maximum, 0, (T) emptyTarget.clone()); } double bDiff = p - lowerBin.getMean(); double pDiff = higherBin.getMean() - lowerBin.getMean(); double bpRatio = bDiff / pDiff; NumericTarget countTarget = (NumericTarget) computeDensity(bpRatio, lowerBin.getMean(), higherBin.getMean(), new NumericTarget(lowerBin.getCount()), new NumericTarget(higherBin.getCount())); countDensity = countTarget.getSum(); targetDensity = (T) computeDensity(bpRatio, lowerBin.getMean(), higherBin.getMean(), lowerBin.getTarget(), higherBin.getTarget()); } return new SumResult<T>(countDensity, targetDensity); }
From source file:com.cognitect.transit.TransitMPTest.java
public void testWriteReadSpecialNumbers() throws Exception { ByteArrayOutputStream out = new ByteArrayOutputStream(); Writer w = TransitFactory.writer(TransitFactory.Format.MSGPACK, out); w.write(Double.NaN);/*from w w w . jav a 2s . c o m*/ w.write(Float.NaN); w.write(Double.POSITIVE_INFINITY); w.write(Float.POSITIVE_INFINITY); w.write(Double.NEGATIVE_INFINITY); w.write(Float.NEGATIVE_INFINITY); ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray()); Reader r = TransitFactory.reader(TransitFactory.Format.MSGPACK, in); assert ((Double) r.read()).isNaN(); assert ((Double) r.read()).isNaN(); assertEquals(Double.POSITIVE_INFINITY, (Double) r.read()); assertEquals(Double.POSITIVE_INFINITY, (Double) r.read()); assertEquals(Double.NEGATIVE_INFINITY, (Double) r.read()); assertEquals(Double.NEGATIVE_INFINITY, (Double) r.read()); }
From source file:afest.datastructures.tree.decision.erts.grower.AERTGrower.java
/** * Create a split on the given attribute by choosing a uniformly random threshold in [min, max). * @param <T> Type of ITrainingPoints used by the Extra Trees. * @param set set containing the points in which we choose the threshold. * @param attribute attribute to pick the threshold for. * @return a split on the given attribute by choosing a uniformly random threshold in [min, max). *//*from w w w .jav a 2s. co m*/ private <T extends ITrainingPoint<R, O>> ERTSplit<R> createSplit(Collection<T> set, R attribute) { double min = Double.POSITIVE_INFINITY; double max = Double.NEGATIVE_INFINITY; for (T aT : set) { double value = aT.getValue(attribute); if (value > max) { max = value; } if (value < min) { min = value; } } if (Double.isInfinite(max)) { max = Double.MAX_VALUE; } if (Double.isInfinite(min)) { min = -Double.MAX_VALUE; } max = max - Double.MIN_VALUE; min = min + Double.MIN_VALUE; double threshold = fRandom.nextDouble() * (max - min) + min; ERTSplit<R> split = new ERTSplit<R>(attribute, threshold); return split; }
From source file:hu.ppke.itk.nlpg.purepos.decoder.AbstractDecoder.java
private Map<NGram<Integer>, Map<Integer, Pair<Double, Double>>> getNextForSeenToken( final Set<NGram<Integer>> prevTagsSet, IProbabilityModel<Integer, String> wordProbModel, String wordForm, boolean isSpec, Collection<Integer> tags, Collection<Integer> anals) { Collection<Integer> tagset = filterTagsWithMorphology(tags, anals, wordProbModel.getContextMapper()); Map<NGram<Integer>, Map<Integer, Pair<Double, Double>>> ret = new HashMap<NGram<Integer>, Map<Integer, Pair<Double, Double>>>(); for (NGram<Integer> prevTags : prevTagsSet) { Map<Integer, Pair<Double, Double>> tagProbs = new HashMap<Integer, Pair<Double, Double>>(); for (Integer tag : tagset) { Double tagProb = model.getTagTransitionModel().getLogProb(prevTags.toList(), tag); List<Integer> actTags = new ArrayList<Integer>(prevTags.toList()); actTags.add(tag);/*from www . j a v a 2 s. c o m*/ // // Double emissionProb = wordProbModel.getLogProb(actTags, wordForm); if (tagProb == Double.NEGATIVE_INFINITY) tagProb = UNKOWN_TAG_TRANSITION; if (emissionProb == Double.NEGATIVE_INFINITY) emissionProb = UNKNOWN_TAG_WEIGHT; tagProbs.put(tag, new ImmutablePair<Double, Double>(tagProb, emissionProb)); } ret.put(prevTags, tagProbs); } return ret; }
From source file:com.rapidminer.gui.graphs.TransitionGraphCreator.java
private void updateGraph() { // remove old edges if available Iterator<String> e = edgeLabelMap.keySet().iterator(); while (e.hasNext()) { graph.removeEdge(e.next());//from w w w . j a va 2 s .c om } edgeLabelMap.clear(); edgeStrengthMap.clear(); // remove old vertices if available Iterator<String> v = vertexLabelMap.keySet().iterator(); while (v.hasNext()) { graph.removeVertex(v.next()); } vertexLabelMap.clear(); String sourceFilterName = null; if (sourceFilter.getSelectedIndex() > 0) { sourceFilterName = ((SourceId) sourceFilter.getSelectedItem()).getId(); } List<SortableEdge> sortableEdges = new LinkedList<SortableEdge>(); if (sourceFilterName == null) { for (Example example : exampleSet) { String source = example.getValueAsString(sourceAttribute); String target = example.getValueAsString(targetAttribute); double strength = 1.0d; if (strengthAttribute != null) { strength = example.getValue(strengthAttribute); } String type = null; if (typeAttribute != null) { type = example.getValueAsString(typeAttribute); } String edgeName = null; if (type != null) { edgeName = type; } else { edgeName = strength + ""; } sortableEdges .add(new SortableEdge(source, target, edgeName, strength, SortableEdge.DIRECTION_INCREASE)); } } else { List<String> sources = new LinkedList<String>(); sources.add(sourceFilterName); int hop = 1; int maxHops = (Integer) numberOfHops.getValue(); do { List<String> newSources = new LinkedList<String>(); for (String currentSourceFilterName : sources) { for (Example example : exampleSet) { String source = example.getValueAsString(sourceAttribute); if (currentSourceFilterName != null) { if (!currentSourceFilterName.equals(source)) { continue; } } String target = example.getValueAsString(targetAttribute); double strength = 1.0d; if (strengthAttribute != null) { strength = example.getValue(strengthAttribute); } String type = null; if (typeAttribute != null) { type = example.getValueAsString(typeAttribute); } String edgeName = null; if (type != null) { edgeName = type; } else { edgeName = strength + ""; } sortableEdges.add(new SortableEdge(source, target, edgeName, strength, SortableEdge.DIRECTION_INCREASE)); newSources.add(target); } } sources.clear(); hop++; if (hop > maxHops) { sources = null; } else { sources = newSources; } } while (sources != null); } Collections.sort(sortableEdges); // determine used vertices Set<String> allVertices = new HashSet<String>(); int numberOfEdges = edgeSlider.getValue(); int counter = 0; for (SortableEdge sortableEdge : sortableEdges) { if (counter > numberOfEdges) { break; } allVertices.add(sortableEdge.getFirstVertex()); allVertices.add(sortableEdge.getSecondVertex()); counter++; } // add all used vertices to graph for (String vertex : allVertices) { graph.addVertex(vertex); String description = getNodeDescription(vertex); if (description == null) { vertexLabelMap.put(vertex, vertex); } else { vertexLabelMap.put(vertex, description); } } counter = 0; double minStrength = Double.POSITIVE_INFINITY; double maxStrength = Double.NEGATIVE_INFINITY; Map<String, Double> strengthMap = new HashMap<String, Double>(); for (SortableEdge sortableEdge : sortableEdges) { if (counter > numberOfEdges) { break; } String idString = edgeFactory.create(); graph.addEdge(idString, sortableEdge.getFirstVertex(), sortableEdge.getSecondVertex(), EdgeType.DIRECTED); edgeLabelMap.put(idString, Tools.formatIntegerIfPossible(sortableEdge.getEdgeValue())); double strength = sortableEdge.getEdgeValue(); minStrength = Math.min(minStrength, strength); maxStrength = Math.max(maxStrength, strength); strengthMap.put(idString, strength); counter++; } for (Entry<String, Double> entry : strengthMap.entrySet()) { edgeStrengthMap.put(entry.getKey(), (strengthMap.get(entry.getKey()) - minStrength) / (maxStrength - minStrength)); } }