List of usage examples for java.lang Double NEGATIVE_INFINITY
double NEGATIVE_INFINITY
To view the source code for java.lang Double NEGATIVE_INFINITY.
Click Source Link
From source file:com.joptimizer.optimizers.LPStandardConverterTest.java
/** * Standardization of a problem on the form: * min(c) s.t./*from w ww .ja v a2 s.c om*/ * G.x < h * A.x = b * lb <= x <= ub */ public void testCGhAbLbUb1() throws Exception { log.debug("testCGhAbLbUb1"); String problemId = "1"; double[] c = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "c" + problemId + ".txt"); double[][] G = Utils.loadDoubleMatrixFromFile( "lp" + File.separator + "standardization" + File.separator + "G" + problemId + ".csv", ",".charAt(0)); double[] h = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "h" + problemId + ".txt"); ; double[][] A = Utils.loadDoubleMatrixFromFile( "lp" + File.separator + "standardization" + File.separator + "A" + problemId + ".csv", ",".charAt(0)); double[] b = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "b" + problemId + ".txt"); double[] lb = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "lb" + problemId + ".txt"); double[] ub = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "ub" + problemId + ".txt"); double[] expectedSol = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "sol" + problemId + ".txt"); double expectedValue = Utils.loadDoubleArrayFromFile( "lp" + File.separator + "standardization" + File.separator + "value" + problemId + ".txt")[0]; //double expectedTolerance = Utils.loadDoubleArrayFromFile("lp"+File.separator+"standardization"+File.separator+"tolerance"+problemId+".txt")[0]; double expectedTolerance = MatrixUtils.createRealMatrix(A) .operate(MatrixUtils.createRealVector(expectedSol)).subtract(MatrixUtils.createRealVector(b)) .getNorm(); //standard form conversion double unboundedLBValue = Double.NEGATIVE_INFINITY;//this is because in the file the unbounded lb are -Infinity values (not the default value) double unboundedUBValue = Double.POSITIVE_INFINITY;//this is because in the file the unbounded ub are +Infinity values LPStandardConverter lpConverter = new LPStandardConverter(unboundedLBValue, unboundedUBValue); lpConverter.toStandardForm(c, G, h, A, b, lb, ub); int n = lpConverter.getStandardN(); int s = lpConverter.getStandardS(); c = lpConverter.getStandardC().toArray(); A = lpConverter.getStandardA().toArray(); b = lpConverter.getStandardB().toArray(); lb = lpConverter.getStandardLB().toArray(); ub = lpConverter.getStandardUB().toArray(); log.debug("n : " + n); log.debug("s : " + s); log.debug("c : " + ArrayUtils.toString(c)); log.debug("A : " + ArrayUtils.toString(A)); log.debug("b : " + ArrayUtils.toString(b)); log.debug("lb : " + ArrayUtils.toString(lb)); log.debug("ub : " + ArrayUtils.toString(ub)); //check consistency assertEquals(G.length, s); assertEquals(s + lpConverter.getOriginalN(), n); assertEquals(lb.length, n); assertEquals(ub.length, n); //check constraints RealMatrix GOrig = new Array2DRowRealMatrix(G); RealVector hOrig = new ArrayRealVector(h); RealMatrix AStandard = new Array2DRowRealMatrix(A); RealVector bStandard = new ArrayRealVector(b); RealVector expectedSolVector = new ArrayRealVector(expectedSol); RealVector Gxh = GOrig.operate(expectedSolVector).subtract(hOrig);//G.x - h RealVector slackVariables = new ArrayRealVector(s); for (int i = 0; i < s; i++) { slackVariables.setEntry(i, 0. - Gxh.getEntry(i));//the difference from 0 assertTrue(slackVariables.getEntry(i) >= 0.); } RealVector sol = slackVariables.append(expectedSolVector); RealVector Axmb = AStandard.operate(sol).subtract(bStandard); assertEquals(0., Axmb.getNorm(), expectedTolerance); // Utils.writeDoubleArrayToFile(new double[]{s}, "target" + File.separator + "standardS"+problemId+".txt"); // Utils.writeDoubleArrayToFile(c, "target" + File.separator + "standardC"+problemId+".txt"); // Utils.writeDoubleMatrixToFile(A, "target" + File.separator + "standardA"+problemId+".txt"); // Utils.writeDoubleArrayToFile(b, "target" + File.separator + "standardB"+problemId+".txt"); // Utils.writeDoubleArrayToFile(lb, "target" + File.separator + "standardLB"+problemId+".txt"); // Utils.writeDoubleArrayToFile(ub, "target" + File.separator + "standardUB"+problemId+".txt"); }
From source file:eagle.security.userprofile.impl.UserProfileAnomalyKDEEvaluator.java
@Override public List<MLCallbackResult> detect(final String user, final String algorithm, UserActivityAggModel userActivity, UserProfileKDEModel aModel) { List<MLCallbackResult> mlPredictionOutputList = new ArrayList<MLCallbackResult>(); RealMatrix inputData = userActivity.matrix(); double[] probabilityEstimation = new double[inputData.getRowDimension()]; for (int i = 0; i < probabilityEstimation.length; i++) probabilityEstimation[i] = 1.0;// ww w.j a v a 2 s . c o m boolean[][] anomalyFeature = new boolean[inputData.getRowDimension()][inputData.getColumnDimension()]; for (int i = 0; i < anomalyFeature.length; i++) { for (int j = 0; j < anomalyFeature[i].length; j++) { anomalyFeature[i][j] = false; } } if (aModel == null) { LOG.info("No model available for this uer, returning"); return null; } Map<String, String> context = new HashMap<String, String>() { { put(UserProfileConstants.USER_TAG, user); put(UserProfileConstants.ALGORITHM_TAG, algorithm); } }; for (int i = 0; i < inputData.getRowDimension(); i++) { List<String> cmds = JavaConversions.seqAsJavaList(userActivity.cmdTypes()); if (inputData.getColumnDimension() != cmds.size()) { LOG.error("Test data is not with same dimension as training, aborting..."); return null; } else { UserCommandStatistics[] listStats = aModel.statistics(); for (int j = 0; j < inputData.getColumnDimension(); j++) { // LOG.info("mean for j=" + j + " is:" + listStats[j].getMean()); // LOG.info("stddev for j=" + j + " is:" + listStats[j].getStddev()); if (listStats[j].isLowVariant()) { // LOG.info(listStats[j].getCommandName() + " is low variant for user: " + user); if (inputData.getEntry(i, j) > listStats[j].getMean()) { probabilityEstimation[i] *= Double.NEGATIVE_INFINITY; anomalyFeature[i][j] = true; } } else { double stddev = listStats[j].getStddev(); //LOG.info("stddev: " + stddev); double mean = listStats[j].getMean(); //LOG.info("mean: " + mean); double sqrt2PI = Math.sqrt(2.0 * Math.PI); //LOG.info("sqrt2PI: " + sqrt2PI); double denominatorFirstPart = sqrt2PI * stddev; //LOG.info("denominatorFirstPart: " + denominatorFirstPart); double squareMeanNormal = Math.pow((inputData.getEntry(i, j) - mean), 2); //LOG.info("squareMeanNormal: " + squareMeanNormal); double twoPowStandardDev = Math.pow(stddev, 2); //LOG.info("twoPowStandardDev: " + twoPowStandardDev); double twoTimesTwoPowStandardDev = 2.0 * twoPowStandardDev; //LOG.info("twoTimesTwoPowStandardDev: " + twoTimesTwoPowStandardDev); double tempVal = ((1.00 / denominatorFirstPart) * (Math.exp(-(squareMeanNormal / twoTimesTwoPowStandardDev)))); probabilityEstimation[i] *= tempVal; //LOG.info("probabilityEstimation: " + probabilityEstimation[i]); if ((inputData.getEntry(i, j) - mean) > 2 * stddev) anomalyFeature[i][j] = true; } } } } for (int i = 0; i < probabilityEstimation.length; i++) { MLCallbackResult callBackResult = new MLCallbackResult(); callBackResult.setContext(context); //LOG.info("probability estimation for data @" + i + " is: " + probabilityEstimation[i]); if (probabilityEstimation[i] < aModel.maxProbabilityEstimate()) { callBackResult.setAnomaly(true); for (int col = 0; col < anomalyFeature[i].length; col++) { //LOG.info("feature anomaly? " + (featureVals[col] == true)); if (anomalyFeature[i][col] == true) { callBackResult.setFeature(aModel.statistics()[col].getCommandName()); } } } else { callBackResult.setAnomaly(false); } callBackResult.setTimestamp(userActivity.timestamp()); List<String> datapoints = new ArrayList<String>(); double[] rowVals = userActivity.matrix().getRow(i); for (double rowVal : rowVals) datapoints.add(rowVal + ""); callBackResult.setDatapoints(datapoints); callBackResult.setId(user); callBackResult.setAlgorithm(UserProfileConstants.KDE_ALGORITHM); mlPredictionOutputList.add(callBackResult); } return mlPredictionOutputList; }
From source file:com.aliyun.odps.ship.common.RecordConverter.java
/** * tunnel record to byte[] array/*from w w w.jav a2s .c om*/ */ public byte[][] format(Record r) throws UnsupportedEncodingException { int cols = schema.getColumns().size(); byte[][] line = new byte[cols][]; byte[] colValue = null; for (int i = 0; i < cols; i++) { OdpsType t = schema.getColumn(i).getType(); switch (t) { case BIGINT: { Long v = r.getBigint(i); colValue = v == null ? null : v.toString().getBytes(defaultCharset); break; } case DOUBLE: { Double v = r.getDouble(i); if (v == null) { colValue = null; } else if (v.equals(Double.POSITIVE_INFINITY) || v.equals(Double.NEGATIVE_INFINITY)) { colValue = v.toString().getBytes(defaultCharset); } else { colValue = doubleFormat.format(v).replaceAll(",", "").getBytes(defaultCharset); } break; } case DATETIME: { Date v = r.getDatetime(i); if (v == null) { colValue = null; } else { colValue = dateFormatter.format(v).getBytes(defaultCharset); } break; } case BOOLEAN: { Boolean v = r.getBoolean(i); colValue = v == null ? null : v.toString().getBytes(defaultCharset); break; } case STRING: { byte[] v = r.getBytes(i); if (v == null) { colValue = null; } else if (Util.isIgnoreCharset(charset)) { colValue = v; } else { // data at ODPS side is always utf-8 colValue = new String(v, Constants.REMOTE_CHARSET).getBytes(charset); } break; } case DECIMAL: { BigDecimal v = r.getDecimal(i); colValue = v == null ? null : v.toPlainString().getBytes(defaultCharset); break; } default: throw new RuntimeException("Unknown column type: " + t); } if (colValue == null) { line[i] = nullBytes; } else { line[i] = colValue; } } return line; }
From source file:edu.jhuapl.bsp.detector.OpenMath.java
public static double max(double[] in) { if (in != null) { double minvalue = Double.NEGATIVE_INFINITY; for (int i = 0; i < in.length; i++) { minvalue = Math.max(in[i], minvalue); }/*from w w w.j a v a2 s .c o m*/ return minvalue; } return Double.POSITIVE_INFINITY; }
From source file:org.jfree.data.RangeTest.java
/** * Simple tests for the contains() method. *///from w w w. j a v a 2 s . co m @Test public void testContains() { Range r1 = new Range(0.0, 1.0); assertFalse(r1.contains(Double.NaN)); assertFalse(r1.contains(Double.NEGATIVE_INFINITY)); assertFalse(r1.contains(-1.0)); assertTrue(r1.contains(0.0)); assertTrue(r1.contains(0.5)); assertTrue(r1.contains(1.0)); assertFalse(r1.contains(2.0)); assertFalse(r1.contains(Double.POSITIVE_INFINITY)); }
From source file:ArrayUtil.java
public static int[] multimaxIndex(double[] d) { int maxpos[] = new int[d.length], lastpos = 0; double maxval = Double.NEGATIVE_INFINITY; for (int i = 0; i < d.length; i++) if (d[i] > maxval) { maxval = d[i];//from ww w.ja va 2 s . com maxpos = new int[d.length - i]; lastpos = 0; maxpos[lastpos++] = i; } else if (d[i] == maxval) { maxpos[lastpos++] = i; } int r[] = new int[lastpos]; System.arraycopy(maxpos, 0, r, 0, lastpos); return r; }
From source file:edu.cornell.med.icb.learning.libsvm.LibSvmClassifier.java
public double predict(final ClassificationModel trainingModel, final ClassificationProblem problem, final int instanceIndex, final double[] probabilities) { final svm_model model = getNativeModel(trainingModel); if (svm.svm_check_probability_model(model) == 1) { LOG.debug("estimating probabilities"); final svm_problem nativeProblem = getNativeProblem(problem); if (LOG.isTraceEnabled()) { printNodes(instanceIndex, nativeProblem); }//from w w w . j a v a 2s.c o m // the SVM was trained to estimate probabilities. Return estimated probabilities. final double decision = svm.svm_predict_probability(getNativeModel(trainingModel), nativeProblem.x[instanceIndex], probabilities); if (LOG.isDebugEnabled()) { LOG.debug("decision values: " + ArrayUtils.toString(probabilities)); } return decision; } else { // Regular SVM was not trained to estimate probability. Report the decision function in place of estimated // probabilities. LOG.debug( "substituting decision values for probabilities. The SVM was not trained to estimate probabilities."); final svm_problem nativeProblem = getNativeProblem(problem); if (LOG.isTraceEnabled()) { printNodes(instanceIndex, nativeProblem); } svm.svm_predict_values(getNativeModel(trainingModel), nativeProblem.x[instanceIndex], probabilities); probabilities[0] = Math.abs(probabilities[0]); probabilities[1] = Double.NEGATIVE_INFINITY; // make sure probs[0] is max of the two values. if (LOG.isDebugEnabled()) { LOG.debug("decision values: " + ArrayUtils.toString(probabilities)); } final double decision = svm.svm_predict(getNativeModel(trainingModel), getNativeProblem(problem).x[instanceIndex]); if (LOG.isDebugEnabled()) { LOG.debug("decision: " + decision); } return decision; } }
From source file:gedi.util.math.stat.distributions.NormalMixtureDistribution.java
@Override public double getSupportLowerBound() { return Double.NEGATIVE_INFINITY; }
From source file:com.joptimizer.util.MPSParserTest.java
/** * This is the PILOT4 netlib problem./*from w w w . j a v a 2s .co m*/ */ public void testMps3() throws Exception { log.debug("testMps3"); String problemId = "3"; File f = Utils .getClasspathResourceAsFile("lp" + File.separator + "mps" + File.separator + problemId + ".mps"); double unboundedLBValue = Double.NEGATIVE_INFINITY; double unboundedUBValue = Double.POSITIVE_INFINITY; double unspecifiedLBValue = 0; double unspecifiedUBValue = unboundedUBValue; MPSParser p = new MPSParser(unspecifiedLBValue, unspecifiedUBValue, unboundedLBValue, unboundedUBValue); p.parse(f); int n = p.getN(); int meq = p.getMeq(); int mieq = p.getMieq(); log.debug("name: " + p.getName()); log.debug("n : " + n); log.debug("meq : " + meq); log.debug("mieq: " + mieq); log.debug("rows: " + (meq + mieq)); log.debug("lb : " + ArrayUtils.toString(p.getLb().toArray())); log.debug("ub : " + ArrayUtils.toString(p.getUb().toArray())); assertEquals(n, 1000); assertEquals(meq, 287); assertEquals(mieq, 123); List<String> unboundedVariables = Arrays.asList(new String[] { "XCRO01", "XROP01", "XGAS01", "XELE01", "XAGR01", "XMNG01", "XCMP01", "XFDS01", "XPPR01", "XSCG01", "XMET01", "XTEX01", "XLUM01", "XFAP01", "XMFG01", "XTAW01", "XTRD01", "XFIN01", "XSVC01", "XTRE01", "XMAC01" }); List<String> variablesNames = p.getVariablesNames(); for (int i = 0; i < n; i++) { String variable = variablesNames.get(i); if (unboundedVariables.contains(variable)) { //this variables are stated to be unbounded in this mps model assertEquals(unboundedLBValue, p.getLb().getQuick(i)); assertEquals(unboundedUBValue, p.getUb().getQuick(i)); } else if ("PLWU01".equalsIgnoreCase(variable)) { //this variables has not explicit bounds in this mps model assertEquals(unspecifiedLBValue, p.getLb().getQuick(i)); assertEquals(unspecifiedUBValue, p.getUb().getQuick(i)); } } }
From source file:beast.math.MathUtils.java
/** * @param logpdf array of unnormalised log probabilities * @return a sample according to an unnormalised probability distribution * * Use this if probabilities are rounding to zero when converted to real space *///from w w w. j av a 2 s. c o m public static int randomChoiceLogPDF(double[] logpdf) { double scalingFactor = Double.NEGATIVE_INFINITY; for (double aLogpdf : logpdf) { if (aLogpdf > scalingFactor) { scalingFactor = aLogpdf; } } if (scalingFactor == Double.NEGATIVE_INFINITY) { throw new Error("randomChoiceLogPDF falls through -- all -INF components in input distribution"); } for (int j = 0; j < logpdf.length; j++) { logpdf[j] = logpdf[j] - scalingFactor; } double[] pdf = new double[logpdf.length]; for (int j = 0; j < logpdf.length; j++) { pdf[j] = Math.exp(logpdf[j]); } return randomChoicePDF(pdf); }